Tiago Gomes pushed to branch tiagogomes/issue-195 at BuildStream / buildstream
Commits:
-
f77d60b0
by Tiago Gomes at 2018-08-01T13:41:06Z
-
2413998d
by Tiago Gomes at 2018-08-01T13:41:11Z
-
81205446
by Tiago Gomes at 2018-08-01T13:41:11Z
-
323f0816
by Tiago Gomes at 2018-08-01T13:41:11Z
-
5a2acdf1
by Tiago Gomes at 2018-08-01T13:41:11Z
-
3128f7d6
by Tiago Gomes at 2018-08-01T13:41:11Z
-
648304b3
by Tiago Gomes at 2018-08-01T13:41:11Z
-
338dfc4b
by Tiago Gomes at 2018-08-01T13:41:11Z
-
f93268db
by Tiago Gomes at 2018-08-01T13:41:11Z
-
cff373c7
by Tiago Gomes at 2018-08-01T13:41:11Z
30 changed files:
- .pylintrc
- buildstream/_exceptions.py
- buildstream/_project.py
- buildstream/_yaml.py
- buildstream/plugin.py
- buildstream/plugins/sources/local.py
- buildstream/plugins/sources/ostree.py
- buildstream/plugins/sources/patch.py
- + doc/examples/junctions/autotools/elements/base.bst
- + doc/examples/junctions/autotools/elements/base/alpine.bst
- + doc/examples/junctions/autotools/elements/hello.bst
- + doc/examples/junctions/autotools/project.conf
- doc/examples/junctions/elements/hello-junction.bst
- doc/source/advanced-features/junction-elements.rst
- tests/artifactcache/expiry.py
- tests/examples/junctions.py
- tests/format/project.py
- + tests/format/project/element-path/project.conf
- + tests/format/project/local-plugin/project.conf
- tests/frontend/push.py
- tests/sources/local.py
- + tests/sources/local/invalid-relative-path/file.txt
- + tests/sources/local/invalid-relative-path/project.conf
- + tests/sources/local/invalid-relative-path/target.bst
- tests/sources/patch.py
- + tests/sources/patch/invalid-relative-path/file_1.patch
- + tests/sources/patch/invalid-relative-path/irregular.bst
- + tests/sources/patch/invalid-relative-path/project.conf
- tests/testutils/element_generators.py
- + tests/testutils/filetypegenerator.py
Changes:
| ... | ... | @@ -184,7 +184,7 @@ ignore-on-opaque-inference=yes |
| 184 | 184 |
# List of class names for which member attributes should not be checked (useful
|
| 185 | 185 |
# for classes with dynamically set attributes). This supports the use of
|
| 186 | 186 |
# qualified names.
|
| 187 |
-ignored-classes=optparse.Values,thread._local,_thread._local,contextlib.closing,gi.repository.GLib.GError
|
|
| 187 |
+ignored-classes=optparse.Values,thread._local,_thread._local,contextlib.closing,gi.repository.GLib.GError,pathlib.PurePath
|
|
| 188 | 188 |
|
| 189 | 189 |
# List of module names for which member attributes should not be checked
|
| 190 | 190 |
# (useful for modules/projects where namespaces are manipulated during runtime
|
| 1 | 1 |
#
|
| 2 |
-# Copyright (C) 2016 Codethink Limited
|
|
| 2 |
+# Copyright (C) 2016,2018 Codethink Limited
|
|
| 3 | 3 |
#
|
| 4 | 4 |
# This program is free software; you can redistribute it and/or
|
| 5 | 5 |
# modify it under the terms of the GNU Lesser General Public
|
| ... | ... | @@ -16,6 +16,7 @@ |
| 16 | 16 |
#
|
| 17 | 17 |
# Authors:
|
| 18 | 18 |
# Tristan Van Berkom <tristan vanberkom codethink co uk>
|
| 19 |
+# Tiago Gomes <tiago gomes codethink co uk>
|
|
| 19 | 20 |
|
| 20 | 21 |
from enum import Enum
|
| 21 | 22 |
|
| ... | ... | @@ -205,6 +206,13 @@ class LoadErrorReason(Enum): |
| 205 | 206 |
# Try to load a directory not a yaml file
|
| 206 | 207 |
LOADING_DIRECTORY = 18
|
| 207 | 208 |
|
| 209 |
+ # A project path is not relative to the project directory
|
|
| 210 |
+ PROJ_PATH_INVALID = 19
|
|
| 211 |
+ |
|
| 212 |
+ # A project path points to a path of the not right kind (e.g.a
|
|
| 213 |
+ # socket)
|
|
| 214 |
+ PROJ_PATH_INVALID_KIND = 20
|
|
| 215 |
+ |
|
| 208 | 216 |
|
| 209 | 217 |
# LoadError
|
| 210 | 218 |
#
|
| ... | ... | @@ -16,6 +16,7 @@ |
| 16 | 16 |
#
|
| 17 | 17 |
# Authors:
|
| 18 | 18 |
# Tristan Van Berkom <tristan vanberkom codethink co uk>
|
| 19 |
+# Tiago Gomes <tiago gomes codethink co uk>
|
|
| 19 | 20 |
|
| 20 | 21 |
import os
|
| 21 | 22 |
import multiprocessing # for cpu_count()
|
| ... | ... | @@ -291,7 +292,8 @@ class Project(): |
| 291 | 292 |
|
| 292 | 293 |
self.element_path = os.path.join(
|
| 293 | 294 |
self.directory,
|
| 294 |
- _yaml.node_get(config, str, 'element-path')
|
|
| 295 |
+ _yaml.node_get_project_path(config, 'element-path', self.directory,
|
|
| 296 |
+ check_is_dir=True)
|
|
| 295 | 297 |
)
|
| 296 | 298 |
|
| 297 | 299 |
# Load project options
|
| ... | ... | @@ -500,8 +502,11 @@ class Project(): |
| 500 | 502 |
if group in origin_dict:
|
| 501 | 503 |
del origin_dict[group]
|
| 502 | 504 |
if origin_dict['origin'] == 'local':
|
| 505 |
+ path = _yaml.node_get_project_path(origin, 'path',
|
|
| 506 |
+ self.directory,
|
|
| 507 |
+ check_is_dir=True)
|
|
| 503 | 508 |
# paths are passed in relative to the project, but must be absolute
|
| 504 |
- origin_dict['path'] = os.path.join(self.directory, origin_dict['path'])
|
|
| 509 |
+ origin_dict['path'] = os.path.join(self.directory, path)
|
|
| 505 | 510 |
destination.append(origin_dict)
|
| 506 | 511 |
|
| 507 | 512 |
# _ensure_project_dir()
|
| 1 | 1 |
#
|
| 2 |
-# Copyright (C) 2016 Codethink Limited
|
|
| 2 |
+# Copyright (C) 2016,2018 Codethink Limited
|
|
| 3 | 3 |
#
|
| 4 | 4 |
# This program is free software; you can redistribute it and/or
|
| 5 | 5 |
# modify it under the terms of the GNU Lesser General Public
|
| ... | ... | @@ -22,6 +22,7 @@ import collections |
| 22 | 22 |
import string
|
| 23 | 23 |
from copy import deepcopy
|
| 24 | 24 |
from contextlib import ExitStack
|
| 25 |
+from pathlib import Path
|
|
| 25 | 26 |
|
| 26 | 27 |
from ruamel import yaml
|
| 27 | 28 |
from ruamel.yaml.representer import SafeRepresenter, RoundTripRepresenter
|
| ... | ... | @@ -392,6 +393,96 @@ def node_get(node, expected_type, key, indices=None, default_value=_get_sentinel |
| 392 | 393 |
return value
|
| 393 | 394 |
|
| 394 | 395 |
|
| 396 |
+# node_get_project_path()
|
|
| 397 |
+#
|
|
| 398 |
+# Fetches a project path from a dictionary node and validates it
|
|
| 399 |
+#
|
|
| 400 |
+# Only paths that exist in the filesystem and are relative to the
|
|
| 401 |
+# project directory are valid. In addition, paths can not refer to the
|
|
| 402 |
+# parent directory in the first path component, or point to symbolic
|
|
| 403 |
+# links, fifos, sockets and block/character devices.
|
|
| 404 |
+#
|
|
| 405 |
+# The `check_is_file` and `check_is_dir` parameters can be used to
|
|
| 406 |
+# perform additional validations on the path. Note that an exception
|
|
| 407 |
+# will always be raised if both parameters are set to ``True``.
|
|
| 408 |
+#
|
|
| 409 |
+# Args:
|
|
| 410 |
+# node (dict): A dictionary loaded from YAML
|
|
| 411 |
+# key (str): The key whose value contains a path to validate
|
|
| 412 |
+# project_dir (str): The project directory
|
|
| 413 |
+# check_is_file (bool): If ``True`` an error will also be raised
|
|
| 414 |
+# if path does not point to a regular file.
|
|
| 415 |
+# Defaults to ``False``
|
|
| 416 |
+# check_is_dir (bool): If ``True`` an error will be also raised
|
|
| 417 |
+# if path does not point to a directory.
|
|
| 418 |
+# Defaults to ``False``
|
|
| 419 |
+# Returns:
|
|
| 420 |
+# (str): The project path
|
|
| 421 |
+#
|
|
| 422 |
+# Raises:
|
|
| 423 |
+# (LoadError): In case that the project path is not valid or does not
|
|
| 424 |
+# exist
|
|
| 425 |
+#
|
|
| 426 |
+def node_get_project_path(node, key, project_dir, *,
|
|
| 427 |
+ check_is_file=False, check_is_dir=False):
|
|
| 428 |
+ path_str = node_get(node, str, key)
|
|
| 429 |
+ path = Path(path_str)
|
|
| 430 |
+ project_dir_path = Path(project_dir)
|
|
| 431 |
+ |
|
| 432 |
+ provenance = node_get_provenance(node, key=key)
|
|
| 433 |
+ |
|
| 434 |
+ if (project_dir_path / path).is_symlink():
|
|
| 435 |
+ raise LoadError(LoadErrorReason.PROJ_PATH_INVALID_KIND,
|
|
| 436 |
+ "{}: Specified path '{}' must not point to "
|
|
| 437 |
+ "symbolic links "
|
|
| 438 |
+ .format(provenance, path_str))
|
|
| 439 |
+ |
|
| 440 |
+ if path.parts and path.parts[0] == '..':
|
|
| 441 |
+ raise LoadError(LoadErrorReason.PROJ_PATH_INVALID,
|
|
| 442 |
+ "{}: Specified path '{}' first component must "
|
|
| 443 |
+ "not be '..'"
|
|
| 444 |
+ .format(provenance, path_str))
|
|
| 445 |
+ |
|
| 446 |
+ try:
|
|
| 447 |
+ if sys.version_info[0] == 3 and sys.version_info[1] < 6:
|
|
| 448 |
+ full_resolved_path = (project_dir_path / path).resolve()
|
|
| 449 |
+ else:
|
|
| 450 |
+ full_resolved_path = (project_dir_path / path).resolve(strict=True)
|
|
| 451 |
+ except FileNotFoundError:
|
|
| 452 |
+ raise LoadError(LoadErrorReason.MISSING_FILE,
|
|
| 453 |
+ "{}: Specified path '{}' does not exist"
|
|
| 454 |
+ .format(provenance, path_str))
|
|
| 455 |
+ |
|
| 456 |
+ is_inside = project_dir_path in full_resolved_path.parents or (
|
|
| 457 |
+ full_resolved_path == project_dir_path)
|
|
| 458 |
+ |
|
| 459 |
+ if path.is_absolute() or not is_inside:
|
|
| 460 |
+ raise LoadError(LoadErrorReason.PROJ_PATH_INVALID,
|
|
| 461 |
+ "{}: Specified path '{}' must be relative and "
|
|
| 462 |
+ "inside project directory"
|
|
| 463 |
+ .format(provenance, path_str))
|
|
| 464 |
+ |
|
| 465 |
+ if full_resolved_path.is_socket() or (
|
|
| 466 |
+ full_resolved_path.is_fifo() or
|
|
| 467 |
+ full_resolved_path.is_block_device()):
|
|
| 468 |
+ raise LoadError(LoadErrorReason.PROJ_PATH_INVALID_KIND,
|
|
| 469 |
+ "{}: Specified path '{}' points to an unsupported "
|
|
| 470 |
+ "file kind"
|
|
| 471 |
+ .format(provenance, path_str))
|
|
| 472 |
+ |
|
| 473 |
+ if check_is_file and not full_resolved_path.is_file():
|
|
| 474 |
+ raise LoadError(LoadErrorReason.PROJ_PATH_INVALID_KIND,
|
|
| 475 |
+ "{}: Specified path '{}' is not a regular file"
|
|
| 476 |
+ .format(provenance, path_str))
|
|
| 477 |
+ |
|
| 478 |
+ if check_is_dir and not full_resolved_path.is_dir():
|
|
| 479 |
+ raise LoadError(LoadErrorReason.PROJ_PATH_INVALID_KIND,
|
|
| 480 |
+ "{}: Specified path '{}' is not a directory"
|
|
| 481 |
+ .format(provenance, path_str))
|
|
| 482 |
+ |
|
| 483 |
+ return path_str
|
|
| 484 |
+ |
|
| 485 |
+ |
|
| 395 | 486 |
# node_items()
|
| 396 | 487 |
#
|
| 397 | 488 |
# A convenience generator for iterating over loaded key/value
|
| 1 | 1 |
#
|
| 2 |
-# Copyright (C) 2017 Codethink Limited
|
|
| 2 |
+# Copyright (C) 2017,2018 Codethink Limited
|
|
| 3 | 3 |
#
|
| 4 | 4 |
# This program is free software; you can redistribute it and/or
|
| 5 | 5 |
# modify it under the terms of the GNU Lesser General Public
|
| ... | ... | @@ -335,6 +335,52 @@ class Plugin(): |
| 335 | 335 |
"""
|
| 336 | 336 |
return _yaml.node_get(node, expected_type, member_name, default_value=default)
|
| 337 | 337 |
|
| 338 |
+ def node_get_project_path(self, node, key, *,
|
|
| 339 |
+ check_is_file=False, check_is_dir=False):
|
|
| 340 |
+ """Fetches a project path from a dictionary node and validates it
|
|
| 341 |
+ |
|
| 342 |
+ Only paths that exist in the filesystem and are relative to the
|
|
| 343 |
+ project directory are valid. In addition, paths can not refer to
|
|
| 344 |
+ the parent directory in the first path component, or point to
|
|
| 345 |
+ symbolic links, fifos, sockets and block/character devices.
|
|
| 346 |
+ |
|
| 347 |
+ The `check_is_file` and `check_is_dir` parameters can be used to
|
|
| 348 |
+ perform additional validations on the path. Note that an
|
|
| 349 |
+ exception will always be raised if both parameters are set to
|
|
| 350 |
+ ``True``.
|
|
| 351 |
+ |
|
| 352 |
+ Args:
|
|
| 353 |
+ node (dict): A dictionary loaded from YAML
|
|
| 354 |
+ key (str): The key whose value contains a path to validate
|
|
| 355 |
+ check_is_file (bool): If ``True`` an error will also be raised
|
|
| 356 |
+ if path does not point to a regular file.
|
|
| 357 |
+ Defaults to ``False``
|
|
| 358 |
+ check_is_dir (bool): If ``True`` an error will also be raised
|
|
| 359 |
+ if path does not point to a directory.
|
|
| 360 |
+ Defaults to ``False``
|
|
| 361 |
+ |
|
| 362 |
+ Returns:
|
|
| 363 |
+ (str): The project path
|
|
| 364 |
+ |
|
| 365 |
+ Raises:
|
|
| 366 |
+ :class:`.LoadError`: In the case that the project path is not
|
|
| 367 |
+ valid or does not exist
|
|
| 368 |
+ |
|
| 369 |
+ *Since: 1.2*
|
|
| 370 |
+ |
|
| 371 |
+ **Example:**
|
|
| 372 |
+ |
|
| 373 |
+ .. code:: python
|
|
| 374 |
+ |
|
| 375 |
+ path = self.node_get_project_path(node, 'path')
|
|
| 376 |
+ |
|
| 377 |
+ """
|
|
| 378 |
+ |
|
| 379 |
+ return _yaml.node_get_project_path(node, key,
|
|
| 380 |
+ self.__project.directory,
|
|
| 381 |
+ check_is_file=check_is_file,
|
|
| 382 |
+ check_is_dir=check_is_dir)
|
|
| 383 |
+ |
|
| 338 | 384 |
def node_validate(self, node, valid_keys):
|
| 339 | 385 |
"""This should be used in :func:`~buildstream.plugin.Plugin.configure`
|
| 340 | 386 |
implementations to assert that users have only entered
|
| 1 | 1 |
#
|
| 2 |
-# Copyright (C) 2016 Codethink Limited
|
|
| 2 |
+# Copyright (C) 2016,2018 Codethink Limited
|
|
| 3 | 3 |
#
|
| 4 | 4 |
# This program is free software; you can redistribute it and/or
|
| 5 | 5 |
# modify it under the terms of the GNU Lesser General Public
|
| ... | ... | @@ -16,6 +16,7 @@ |
| 16 | 16 |
#
|
| 17 | 17 |
# Authors:
|
| 18 | 18 |
# Tristan Van Berkom <tristan vanberkom codethink co uk>
|
| 19 |
+# Tiago Gomes <tiago gomes codethink co uk>
|
|
| 19 | 20 |
|
| 20 | 21 |
"""
|
| 21 | 22 |
local - stage local files and directories
|
| ... | ... | @@ -36,7 +37,7 @@ local - stage local files and directories |
| 36 | 37 |
"""
|
| 37 | 38 |
|
| 38 | 39 |
import os
|
| 39 |
-from buildstream import Source, SourceError, Consistency
|
|
| 40 |
+from buildstream import Source, Consistency
|
|
| 40 | 41 |
from buildstream import utils
|
| 41 | 42 |
|
| 42 | 43 |
|
| ... | ... | @@ -51,14 +52,11 @@ class LocalSource(Source): |
| 51 | 52 |
|
| 52 | 53 |
def configure(self, node):
|
| 53 | 54 |
self.node_validate(node, ['path'] + Source.COMMON_CONFIG_KEYS)
|
| 54 |
- |
|
| 55 |
- self.path = self.node_get_member(node, str, 'path')
|
|
| 55 |
+ self.path = self.node_get_project_path(node, 'path')
|
|
| 56 | 56 |
self.fullpath = os.path.join(self.get_project_directory(), self.path)
|
| 57 | 57 |
|
| 58 | 58 |
def preflight(self):
|
| 59 |
- # Check if the configured file or directory really exists
|
|
| 60 |
- if not os.path.exists(self.fullpath):
|
|
| 61 |
- raise SourceError("Specified path '{}' does not exist".format(self.path))
|
|
| 59 |
+ pass
|
|
| 62 | 60 |
|
| 63 | 61 |
def get_unique_key(self):
|
| 64 | 62 |
if self.__unique_key is None:
|
| 1 | 1 |
#
|
| 2 |
-# Copyright (C) 2016 Codethink Limited
|
|
| 2 |
+# Copyright (C) 2016,2018 Codethink Limited
|
|
| 3 | 3 |
#
|
| 4 | 4 |
# This program is free software; you can redistribute it and/or
|
| 5 | 5 |
# modify it under the terms of the GNU Lesser General Public
|
| ... | ... | @@ -16,6 +16,7 @@ |
| 16 | 16 |
#
|
| 17 | 17 |
# Authors:
|
| 18 | 18 |
# Andrew Leeming <andrew leeming codethink co uk>
|
| 19 |
+# Tiago Gomes <tiago gomes codethink co uk>
|
|
| 19 | 20 |
|
| 20 | 21 |
"""
|
| 21 | 22 |
ostree - stage files from an OSTree repository
|
| ... | ... | @@ -73,9 +74,10 @@ class OSTreeSource(Source): |
| 73 | 74 |
utils.url_directory_name(self.url))
|
| 74 | 75 |
|
| 75 | 76 |
# (optional) Not all repos are signed. But if they are, get the gpg key
|
| 76 |
- self.gpg_key = self.node_get_member(node, str, 'gpg-key', None)
|
|
| 77 | 77 |
self.gpg_key_path = None
|
| 78 |
- if self.gpg_key is not None:
|
|
| 78 |
+ if self.node_get_member(node, str, 'gpg-key', None):
|
|
| 79 |
+ self.gpg_key = self.node_get_project_path(node, 'gpg-key',
|
|
| 80 |
+ check_is_file=True)
|
|
| 79 | 81 |
self.gpg_key_path = os.path.join(self.get_project_directory(), self.gpg_key)
|
| 80 | 82 |
|
| 81 | 83 |
# Our OSTree repo handle
|
| 1 | 1 |
#
|
| 2 | 2 |
# Copyright Bloomberg Finance LP
|
| 3 |
+# Copyright (C) 2018 Codethink Limited
|
|
| 3 | 4 |
#
|
| 4 | 5 |
# This program is free software; you can redistribute it and/or
|
| 5 | 6 |
# modify it under the terms of the GNU Lesser General Public
|
| ... | ... | @@ -16,6 +17,7 @@ |
| 16 | 17 |
#
|
| 17 | 18 |
# Authors:
|
| 18 | 19 |
# Chandan Singh <csingh43 bloomberg net>
|
| 20 |
+# Tiago Gomes <tiago gomes codethink co uk>
|
|
| 19 | 21 |
|
| 20 | 22 |
"""
|
| 21 | 23 |
patch - apply locally stored patches
|
| ... | ... | @@ -52,19 +54,12 @@ class PatchSource(Source): |
| 52 | 54 |
# pylint: disable=attribute-defined-outside-init
|
| 53 | 55 |
|
| 54 | 56 |
def configure(self, node):
|
| 55 |
- self.path = self.node_get_member(node, str, "path")
|
|
| 57 |
+ self.path = self.node_get_project_path(node, 'path',
|
|
| 58 |
+ check_is_file=True)
|
|
| 56 | 59 |
self.strip_level = self.node_get_member(node, int, "strip-level", 1)
|
| 57 | 60 |
self.fullpath = os.path.join(self.get_project_directory(), self.path)
|
| 58 | 61 |
|
| 59 | 62 |
def preflight(self):
|
| 60 |
- # Check if the configured file really exists
|
|
| 61 |
- if not os.path.exists(self.fullpath):
|
|
| 62 |
- raise SourceError("Specified path '{}' does not exist".format(self.path),
|
|
| 63 |
- reason="patch-no-exist")
|
|
| 64 |
- elif not os.path.isfile(self.fullpath):
|
|
| 65 |
- raise SourceError("Specified path '{}' must be a file".format(self.path),
|
|
| 66 |
- reason="patch-not-a-file")
|
|
| 67 |
- |
|
| 68 | 63 |
# Check if patch is installed, get the binary at the same time
|
| 69 | 64 |
self.host_patch = utils.get_host_tool("patch")
|
| 70 | 65 |
|
| 1 |
+kind: stack
|
|
| 2 |
+description: Base stack
|
|
| 3 |
+ |
|
| 4 |
+depends:
|
|
| 5 |
+- base/alpine.bst
|
| 1 |
+kind: import
|
|
| 2 |
+description: |
|
|
| 3 |
+ |
|
| 4 |
+ Alpine Linux base runtime
|
|
| 5 |
+ |
|
| 6 |
+sources:
|
|
| 7 |
+- kind: tar
|
|
| 8 |
+ |
|
| 9 |
+ # This is a post doctored, trimmed down system image
|
|
| 10 |
+ # of the Alpine linux distribution.
|
|
| 11 |
+ #
|
|
| 12 |
+ url: alpine:integration-tests-base.v1.x86_64.tar.xz
|
|
| 13 |
+ ref: 3eb559250ba82b64a68d86d0636a6b127aa5f6d25d3601a79f79214dc9703639
|
| 1 |
+kind: autotools
|
|
| 2 |
+description: |
|
|
| 3 |
+ |
|
| 4 |
+ Hello world example from automake
|
|
| 5 |
+ |
|
| 6 |
+variables:
|
|
| 7 |
+ |
|
| 8 |
+ # The hello world example lives in the doc/amhello folder.
|
|
| 9 |
+ #
|
|
| 10 |
+ # Set the %{command-subdir} variable to that location
|
|
| 11 |
+ # and just have the autotools element run it's commands there.
|
|
| 12 |
+ #
|
|
| 13 |
+ command-subdir: doc/amhello
|
|
| 14 |
+ |
|
| 15 |
+sources:
|
|
| 16 |
+- kind: tar
|
|
| 17 |
+ url: gnu:automake-1.16.tar.gz
|
|
| 18 |
+ ref: 80da43bb5665596ee389e6d8b64b4f122ea4b92a685b1dbd813cd1f0e0c2d83f
|
|
| 19 |
+ |
|
| 20 |
+depends:
|
|
| 21 |
+- base.bst
|
| 1 |
+# Unique project name
|
|
| 2 |
+name: autotools
|
|
| 3 |
+ |
|
| 4 |
+# Required BuildStream format version
|
|
| 5 |
+format-version: 9
|
|
| 6 |
+ |
|
| 7 |
+# Subdirectory where elements are stored
|
|
| 8 |
+element-path: elements
|
|
| 9 |
+ |
|
| 10 |
+# Define some aliases for the tarballs we download
|
|
| 11 |
+aliases:
|
|
| 12 |
+ alpine: https://gnome7.codethink.co.uk/tarballs/
|
|
| 13 |
+ gnu: https://ftp.gnu.org/gnu/automake/
|
| 1 | 1 |
kind: junction
|
| 2 |
- |
|
| 3 |
-# Specify the source of the BuildStream project
|
|
| 4 |
-# We are going to use the autotools examples distributed with BuildStream in the
|
|
| 5 |
-# doc/examples/autotools directory
|
|
| 6 | 2 |
sources:
|
| 7 | 3 |
- kind: local
|
| 8 |
- path: ../autotools
|
|
| 4 |
+ path: autotools
|
| ... | ... | @@ -21,8 +21,8 @@ Below is a simple example of bst file for a junction element: |
| 21 | 21 |
.. literalinclude:: ../../examples/junctions/elements/hello-junction.bst
|
| 22 | 22 |
:language: yaml
|
| 23 | 23 |
|
| 24 |
-This element imports the autotools example project found in the BuildStream
|
|
| 25 |
-doc/examples/autotools subdirectory.
|
|
| 24 |
+This element imports the autotools example subproject found in the
|
|
| 25 |
+BuildStream doc/examples/junctions/autotools subdirectory.
|
|
| 26 | 26 |
|
| 27 | 27 |
.. note::
|
| 28 | 28 |
|
| ... | ... | @@ -5,7 +5,7 @@ import pytest |
| 5 | 5 |
from buildstream import _yaml
|
| 6 | 6 |
from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
| 7 | 7 |
|
| 8 |
-from tests.testutils import cli
|
|
| 8 |
+from tests.testutils import cli, create_element_size
|
|
| 9 | 9 |
|
| 10 | 10 |
|
| 11 | 11 |
DATA_DIR = os.path.join(
|
| ... | ... | @@ -14,32 +14,12 @@ DATA_DIR = os.path.join( |
| 14 | 14 |
)
|
| 15 | 15 |
|
| 16 | 16 |
|
| 17 |
-def create_element(name, path, dependencies, size):
|
|
| 18 |
- os.makedirs(path, exist_ok=True)
|
|
| 19 |
- |
|
| 20 |
- # Create a file to be included in this element's artifact
|
|
| 21 |
- with open(os.path.join(path, name + '_data'), 'wb+') as f:
|
|
| 22 |
- f.write(os.urandom(size))
|
|
| 23 |
- |
|
| 24 |
- element = {
|
|
| 25 |
- 'kind': 'import',
|
|
| 26 |
- 'sources': [
|
|
| 27 |
- {
|
|
| 28 |
- 'kind': 'local',
|
|
| 29 |
- 'path': os.path.join(path, name + '_data')
|
|
| 30 |
- }
|
|
| 31 |
- ],
|
|
| 32 |
- 'depends': dependencies
|
|
| 33 |
- }
|
|
| 34 |
- _yaml.dump(element, os.path.join(path, name))
|
|
| 35 |
- |
|
| 36 |
- |
|
| 37 | 17 |
# Ensure that the cache successfully removes an old artifact if we do
|
| 38 | 18 |
# not have enough space left.
|
| 39 | 19 |
@pytest.mark.datafiles(DATA_DIR)
|
| 40 | 20 |
def test_artifact_expires(cli, datafiles, tmpdir):
|
| 41 | 21 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 42 |
- element_path = os.path.join(project, 'elements')
|
|
| 22 |
+ element_path = 'elements'
|
|
| 43 | 23 |
cache_location = os.path.join(project, 'cache', 'artifacts', 'ostree')
|
| 44 | 24 |
checkout = os.path.join(project, 'checkout')
|
| 45 | 25 |
|
| ... | ... | @@ -52,7 +32,7 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
| 52 | 32 |
# Create an element that uses almost the entire cache (an empty
|
| 53 | 33 |
# ostree cache starts at about ~10KiB, so we need a bit of a
|
| 54 | 34 |
# buffer)
|
| 55 |
- create_element('target.bst', element_path, [], 6000000)
|
|
| 35 |
+ create_element_size('target.bst', project, element_path, [], 6000000)
|
|
| 56 | 36 |
res = cli.run(project=project, args=['build', 'target.bst'])
|
| 57 | 37 |
res.assert_success()
|
| 58 | 38 |
|
| ... | ... | @@ -61,7 +41,7 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
| 61 | 41 |
# Our cache should now be almost full. Let's create another
|
| 62 | 42 |
# artifact and see if we can cause buildstream to delete the old
|
| 63 | 43 |
# one.
|
| 64 |
- create_element('target2.bst', element_path, [], 6000000)
|
|
| 44 |
+ create_element_size('target2.bst', project, element_path, [], 6000000)
|
|
| 65 | 45 |
res = cli.run(project=project, args=['build', 'target2.bst'])
|
| 66 | 46 |
res.assert_success()
|
| 67 | 47 |
|
| ... | ... | @@ -82,7 +62,7 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
| 82 | 62 |
@pytest.mark.datafiles(DATA_DIR)
|
| 83 | 63 |
def test_artifact_too_large(cli, datafiles, tmpdir, size):
|
| 84 | 64 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 85 |
- element_path = os.path.join(project, 'elements')
|
|
| 65 |
+ element_path = 'elements'
|
|
| 86 | 66 |
|
| 87 | 67 |
cli.configure({
|
| 88 | 68 |
'cache': {
|
| ... | ... | @@ -91,7 +71,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir, size): |
| 91 | 71 |
})
|
| 92 | 72 |
|
| 93 | 73 |
# Create an element whose artifact is too large
|
| 94 |
- create_element('target.bst', element_path, [], size)
|
|
| 74 |
+ create_element_size('target.bst', project, element_path, [], size)
|
|
| 95 | 75 |
res = cli.run(project=project, args=['build', 'target.bst'])
|
| 96 | 76 |
res.assert_main_error(ErrorDomain.STREAM, None)
|
| 97 | 77 |
|
| ... | ... | @@ -99,7 +79,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir, size): |
| 99 | 79 |
@pytest.mark.datafiles(DATA_DIR)
|
| 100 | 80 |
def test_expiry_order(cli, datafiles, tmpdir):
|
| 101 | 81 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 102 |
- element_path = os.path.join(project, 'elements')
|
|
| 82 |
+ element_path = 'elements'
|
|
| 103 | 83 |
cache_location = os.path.join(project, 'cache', 'artifacts', 'ostree')
|
| 104 | 84 |
checkout = os.path.join(project, 'workspace')
|
| 105 | 85 |
|
| ... | ... | @@ -110,21 +90,21 @@ def test_expiry_order(cli, datafiles, tmpdir): |
| 110 | 90 |
})
|
| 111 | 91 |
|
| 112 | 92 |
# Create an artifact
|
| 113 |
- create_element('dep.bst', element_path, [], 2000000)
|
|
| 93 |
+ create_element_size('dep.bst', project, element_path, [], 2000000)
|
|
| 114 | 94 |
res = cli.run(project=project, args=['build', 'dep.bst'])
|
| 115 | 95 |
res.assert_success()
|
| 116 | 96 |
|
| 117 | 97 |
# Create another artifact
|
| 118 |
- create_element('unrelated.bst', element_path, [], 2000000)
|
|
| 98 |
+ create_element_size('unrelated.bst', project, element_path, [], 2000000)
|
|
| 119 | 99 |
res = cli.run(project=project, args=['build', 'unrelated.bst'])
|
| 120 | 100 |
res.assert_success()
|
| 121 | 101 |
|
| 122 | 102 |
# And build something else
|
| 123 |
- create_element('target.bst', element_path, [], 2000000)
|
|
| 103 |
+ create_element_size('target.bst', project, element_path, [], 2000000)
|
|
| 124 | 104 |
res = cli.run(project=project, args=['build', 'target.bst'])
|
| 125 | 105 |
res.assert_success()
|
| 126 | 106 |
|
| 127 |
- create_element('target2.bst', element_path, [], 2000000)
|
|
| 107 |
+ create_element_size('target2.bst', project, element_path, [], 2000000)
|
|
| 128 | 108 |
res = cli.run(project=project, args=['build', 'target2.bst'])
|
| 129 | 109 |
res.assert_success()
|
| 130 | 110 |
|
| ... | ... | @@ -133,7 +113,7 @@ def test_expiry_order(cli, datafiles, tmpdir): |
| 133 | 113 |
res.assert_success()
|
| 134 | 114 |
|
| 135 | 115 |
# Finally, build something that will cause the cache to overflow
|
| 136 |
- create_element('expire.bst', element_path, [], 2000000)
|
|
| 116 |
+ create_element_size('expire.bst', project, element_path, [], 2000000)
|
|
| 137 | 117 |
res = cli.run(project=project, args=['build', 'expire.bst'])
|
| 138 | 118 |
res.assert_success()
|
| 139 | 119 |
|
| ... | ... | @@ -153,7 +133,7 @@ def test_expiry_order(cli, datafiles, tmpdir): |
| 153 | 133 |
@pytest.mark.datafiles(DATA_DIR)
|
| 154 | 134 |
def test_keep_dependencies(cli, datafiles, tmpdir):
|
| 155 | 135 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 156 |
- element_path = os.path.join(project, 'elements')
|
|
| 136 |
+ element_path = 'elements'
|
|
| 157 | 137 |
cache_location = os.path.join(project, 'cache', 'artifacts', 'ostree')
|
| 158 | 138 |
|
| 159 | 139 |
cli.configure({
|
| ... | ... | @@ -163,12 +143,12 @@ def test_keep_dependencies(cli, datafiles, tmpdir): |
| 163 | 143 |
})
|
| 164 | 144 |
|
| 165 | 145 |
# Create a pretty big dependency
|
| 166 |
- create_element('dependency.bst', element_path, [], 5000000)
|
|
| 146 |
+ create_element_size('dependency.bst', project, element_path, [], 5000000)
|
|
| 167 | 147 |
res = cli.run(project=project, args=['build', 'dependency.bst'])
|
| 168 | 148 |
res.assert_success()
|
| 169 | 149 |
|
| 170 | 150 |
# Now create some other unrelated artifact
|
| 171 |
- create_element('unrelated.bst', element_path, [], 4000000)
|
|
| 151 |
+ create_element_size('unrelated.bst', project, element_path, [], 4000000)
|
|
| 172 | 152 |
res = cli.run(project=project, args=['build', 'unrelated.bst'])
|
| 173 | 153 |
res.assert_success()
|
| 174 | 154 |
|
| ... | ... | @@ -184,7 +164,8 @@ def test_keep_dependencies(cli, datafiles, tmpdir): |
| 184 | 164 |
# duplicating artifacts (bad!) we need to make this equal in size
|
| 185 | 165 |
# or smaller than half the size of its dependencies.
|
| 186 | 166 |
#
|
| 187 |
- create_element('target.bst', element_path, ['dependency.bst'], 2000000)
|
|
| 167 |
+ create_element_size('target.bst', project,
|
|
| 168 |
+ element_path, ['dependency.bst'], 2000000)
|
|
| 188 | 169 |
res = cli.run(project=project, args=['build', 'target.bst'])
|
| 189 | 170 |
res.assert_success()
|
| 190 | 171 |
|
| ... | ... | @@ -197,7 +178,7 @@ def test_keep_dependencies(cli, datafiles, tmpdir): |
| 197 | 178 |
@pytest.mark.datafiles(DATA_DIR)
|
| 198 | 179 |
def test_never_delete_dependencies(cli, datafiles, tmpdir):
|
| 199 | 180 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 200 |
- element_path = os.path.join(project, 'elements')
|
|
| 181 |
+ element_path = 'elements'
|
|
| 201 | 182 |
|
| 202 | 183 |
cli.configure({
|
| 203 | 184 |
'cache': {
|
| ... | ... | @@ -206,10 +187,14 @@ def test_never_delete_dependencies(cli, datafiles, tmpdir): |
| 206 | 187 |
})
|
| 207 | 188 |
|
| 208 | 189 |
# Create a build tree
|
| 209 |
- create_element('dependency.bst', element_path, [], 8000000)
|
|
| 210 |
- create_element('related.bst', element_path, ['dependency.bst'], 8000000)
|
|
| 211 |
- create_element('target.bst', element_path, ['related.bst'], 8000000)
|
|
| 212 |
- create_element('target2.bst', element_path, ['target.bst'], 8000000)
|
|
| 190 |
+ create_element_size('dependency.bst', project,
|
|
| 191 |
+ element_path, [], 8000000)
|
|
| 192 |
+ create_element_size('related.bst', project,
|
|
| 193 |
+ element_path, ['dependency.bst'], 8000000)
|
|
| 194 |
+ create_element_size('target.bst', project,
|
|
| 195 |
+ element_path, ['related.bst'], 8000000)
|
|
| 196 |
+ create_element_size('target2.bst', project,
|
|
| 197 |
+ element_path, ['target.bst'], 8000000)
|
|
| 213 | 198 |
|
| 214 | 199 |
# We try to build this pipeline, but it's too big for the
|
| 215 | 200 |
# cache. Since all elements are required, the build should fail.
|
| ... | ... | @@ -249,7 +234,7 @@ def test_never_delete_dependencies(cli, datafiles, tmpdir): |
| 249 | 234 |
@pytest.mark.datafiles(DATA_DIR)
|
| 250 | 235 |
def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
|
| 251 | 236 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 252 |
- element_path = os.path.join(project, 'elements')
|
|
| 237 |
+ os.makedirs(os.path.join(project, 'elements'))
|
|
| 253 | 238 |
|
| 254 | 239 |
cli.configure({
|
| 255 | 240 |
'cache': {
|
| ... | ... | @@ -11,42 +11,12 @@ DATA_DIR = os.path.join( |
| 11 | 11 |
os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'junctions'
|
| 12 | 12 |
)
|
| 13 | 13 |
|
| 14 |
-JUNCTION_IMPORT_PATH = os.path.join(
|
|
| 15 |
- os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'autotools'
|
|
| 16 |
-)
|
|
| 17 |
- |
|
| 18 |
- |
|
| 19 |
-def ammend_juntion_path_paths(tmpdir):
|
|
| 20 |
- # The junction element in the examples/junctions project uses a local source type.
|
|
| 21 |
- # It's "path:" must specify a relative path from the project's root directory.
|
|
| 22 |
- # For the hello-junction element to function during these tests, the copy of the junctions
|
|
| 23 |
- # project made in the buildstream/tmp/directory, "path:" must be ammended to be the relative
|
|
| 24 |
- # path to the autotools example from the temporary test directory.
|
|
| 25 |
- junction_element = os.path.join(tmpdir, "elements", "hello-junction.bst")
|
|
| 26 |
- junction_element_bst = ""
|
|
| 27 |
- junction_relative_path = os.path.relpath(JUNCTION_IMPORT_PATH, tmpdir)
|
|
| 28 |
- with open(junction_element, 'r') as f:
|
|
| 29 |
- junction_element_bst = f.read()
|
|
| 30 |
- ammended_element_bst = junction_element_bst.replace("../autotools", junction_relative_path)
|
|
| 31 |
- with open(junction_element, 'w') as f:
|
|
| 32 |
- f.write(ammended_element_bst)
|
|
| 33 |
- |
|
| 34 |
- |
|
| 35 |
-# Check that the autotools project is where the junctions example expects and
|
|
| 36 |
-# contains the hello.bst element.
|
|
| 37 |
-@pytest.mark.datafiles(DATA_DIR)
|
|
| 38 |
-def test_autotools_example_is_present(datafiles):
|
|
| 39 |
- autotools_path = JUNCTION_IMPORT_PATH
|
|
| 40 |
- assert os.path.exists(autotools_path)
|
|
| 41 |
- assert os.path.exists(os.path.join(autotools_path, "elements", "hello.bst"))
|
|
| 42 |
- |
|
| 43 | 14 |
|
| 44 | 15 |
# Test that the project builds successfully
|
| 45 | 16 |
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
|
| 46 | 17 |
@pytest.mark.datafiles(DATA_DIR)
|
| 47 | 18 |
def test_build(cli, tmpdir, datafiles):
|
| 48 | 19 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 49 |
- ammend_juntion_path_paths(str(tmpdir))
|
|
| 50 | 20 |
|
| 51 | 21 |
result = cli.run(project=project, args=['build', 'callHello.bst'])
|
| 52 | 22 |
result.assert_success()
|
| ... | ... | @@ -57,7 +27,6 @@ def test_build(cli, tmpdir, datafiles): |
| 57 | 27 |
@pytest.mark.datafiles(DATA_DIR)
|
| 58 | 28 |
def test_shell_call_hello(cli, tmpdir, datafiles):
|
| 59 | 29 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 60 |
- ammend_juntion_path_paths(str(tmpdir))
|
|
| 61 | 30 |
|
| 62 | 31 |
result = cli.run(project=project, args=['build', 'callHello.bst'])
|
| 63 | 32 |
result.assert_success()
|
| ... | ... | @@ -73,7 +42,6 @@ def test_shell_call_hello(cli, tmpdir, datafiles): |
| 73 | 42 |
def test_open_cross_junction_workspace(cli, tmpdir, datafiles):
|
| 74 | 43 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 75 | 44 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello_junction")
|
| 76 |
- ammend_juntion_path_paths(str(tmpdir))
|
|
| 77 | 45 |
|
| 78 | 46 |
result = cli.run(project=project,
|
| 79 | 47 |
args=['workspace', 'open', 'hello-junction.bst:hello.bst', workspace_dir])
|
| ... | ... | @@ -2,7 +2,7 @@ import os |
| 2 | 2 |
import pytest
|
| 3 | 3 |
from buildstream import _yaml
|
| 4 | 4 |
from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
| 5 |
-from tests.testutils.runcli import cli
|
|
| 5 |
+from tests.testutils import cli, filetypegenerator
|
|
| 6 | 6 |
|
| 7 | 7 |
|
| 8 | 8 |
# Project directory
|
| ... | ... | @@ -90,6 +90,48 @@ def test_project_unsupported(cli, datafiles): |
| 90 | 90 |
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.UNSUPPORTED_PROJECT)
|
| 91 | 91 |
|
| 92 | 92 |
|
| 93 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'element-path'))
|
|
| 94 |
+def test_missing_element_path_directory(cli, datafiles):
|
|
| 95 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 96 |
+ result = cli.run(project=project, args=['workspace', 'list'])
|
|
| 97 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
| 98 |
+ LoadErrorReason.MISSING_FILE)
|
|
| 99 |
+ |
|
| 100 |
+ |
|
| 101 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'element-path'))
|
|
| 102 |
+def test_element_path_not_a_directory(cli, datafiles):
|
|
| 103 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 104 |
+ path = os.path.join(project, 'elements')
|
|
| 105 |
+ for file_type in filetypegenerator.generate_file_types(path):
|
|
| 106 |
+ result = cli.run(project=project, args=['workspace', 'list'])
|
|
| 107 |
+ if not os.path.isdir(path):
|
|
| 108 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
| 109 |
+ LoadErrorReason.PROJ_PATH_INVALID_KIND)
|
|
| 110 |
+ else:
|
|
| 111 |
+ result.assert_success()
|
|
| 112 |
+ |
|
| 113 |
+ |
|
| 114 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'local-plugin'))
|
|
| 115 |
+def test_missing_local_plugin_directory(cli, datafiles):
|
|
| 116 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 117 |
+ result = cli.run(project=project, args=['workspace', 'list'])
|
|
| 118 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
| 119 |
+ LoadErrorReason.MISSING_FILE)
|
|
| 120 |
+ |
|
| 121 |
+ |
|
| 122 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'local-plugin'))
|
|
| 123 |
+def test_local_plugin_not_directory(cli, datafiles):
|
|
| 124 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 125 |
+ path = os.path.join(project, 'plugins')
|
|
| 126 |
+ for file_type in filetypegenerator.generate_file_types(path):
|
|
| 127 |
+ result = cli.run(project=project, args=['workspace', 'list'])
|
|
| 128 |
+ if not os.path.isdir(path):
|
|
| 129 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
| 130 |
+ LoadErrorReason.PROJ_PATH_INVALID_KIND)
|
|
| 131 |
+ else:
|
|
| 132 |
+ result.assert_success()
|
|
| 133 |
+ |
|
| 134 |
+ |
|
| 93 | 135 |
@pytest.mark.datafiles(DATA_DIR)
|
| 94 | 136 |
def test_project_plugin_load_allowed(cli, datafiles):
|
| 95 | 137 |
project = os.path.join(datafiles.dirname, datafiles.basename, 'plugin-allowed')
|
| 1 |
+name: foo
|
|
| 2 |
+element-path: elements
|
| 1 |
+name: foo
|
|
| 2 |
+plugins:
|
|
| 3 |
+- origin: local
|
|
| 4 |
+ path: plugins
|
|
| 5 |
+ sources:
|
|
| 6 |
+ mysource: 0
|
| ... | ... | @@ -202,7 +202,7 @@ def test_push_after_pull(cli, tmpdir, datafiles): |
| 202 | 202 |
@pytest.mark.datafiles(DATA_DIR)
|
| 203 | 203 |
def test_artifact_expires(cli, datafiles, tmpdir):
|
| 204 | 204 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 205 |
- element_path = os.path.join(project, 'elements')
|
|
| 205 |
+ element_path = 'elements'
|
|
| 206 | 206 |
|
| 207 | 207 |
# Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
|
| 208 | 208 |
# Mock a file system with 12 MB free disk space
|
| ... | ... | @@ -215,12 +215,12 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
| 215 | 215 |
})
|
| 216 | 216 |
|
| 217 | 217 |
# Create and build an element of 5 MB
|
| 218 |
- create_element_size('element1.bst', element_path, [], int(5e6)) # [] => no deps
|
|
| 218 |
+ create_element_size('element1.bst', project, element_path, [], int(5e6))
|
|
| 219 | 219 |
result = cli.run(project=project, args=['build', 'element1.bst'])
|
| 220 | 220 |
result.assert_success()
|
| 221 | 221 |
|
| 222 | 222 |
# Create and build an element of 5 MB
|
| 223 |
- create_element_size('element2.bst', element_path, [], int(5e6)) # [] => no deps
|
|
| 223 |
+ create_element_size('element2.bst', project, element_path, [], int(5e6))
|
|
| 224 | 224 |
result = cli.run(project=project, args=['build', 'element2.bst'])
|
| 225 | 225 |
result.assert_success()
|
| 226 | 226 |
|
| ... | ... | @@ -231,7 +231,7 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
| 231 | 231 |
assert_shared(cli, share, project, 'element2.bst')
|
| 232 | 232 |
|
| 233 | 233 |
# Create and build another element of 5 MB (This will exceed the free disk space available)
|
| 234 |
- create_element_size('element3.bst', element_path, [], int(5e6))
|
|
| 234 |
+ create_element_size('element3.bst', project, element_path, [], int(5e6))
|
|
| 235 | 235 |
result = cli.run(project=project, args=['build', 'element3.bst'])
|
| 236 | 236 |
result.assert_success()
|
| 237 | 237 |
|
| ... | ... | @@ -250,7 +250,7 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
| 250 | 250 |
@pytest.mark.datafiles(DATA_DIR)
|
| 251 | 251 |
def test_artifact_too_large(cli, datafiles, tmpdir):
|
| 252 | 252 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 253 |
- element_path = os.path.join(project, 'elements')
|
|
| 253 |
+ element_path = 'elements'
|
|
| 254 | 254 |
|
| 255 | 255 |
# Create an artifact share (remote cache) in tmpdir/artifactshare
|
| 256 | 256 |
# Mock a file system with 5 MB total space
|
| ... | ... | @@ -263,12 +263,12 @@ def test_artifact_too_large(cli, datafiles, tmpdir): |
| 263 | 263 |
})
|
| 264 | 264 |
|
| 265 | 265 |
# Create and push a 3MB element
|
| 266 |
- create_element_size('small_element.bst', element_path, [], int(3e6))
|
|
| 266 |
+ create_element_size('small_element.bst', project, element_path, [], int(3e6))
|
|
| 267 | 267 |
result = cli.run(project=project, args=['build', 'small_element.bst'])
|
| 268 | 268 |
result.assert_success()
|
| 269 | 269 |
|
| 270 | 270 |
# Create and try to push a 6MB element.
|
| 271 |
- create_element_size('large_element.bst', element_path, [], int(6e6))
|
|
| 271 |
+ create_element_size('large_element.bst', project, element_path, [], int(6e6))
|
|
| 272 | 272 |
result = cli.run(project=project, args=['build', 'large_element.bst'])
|
| 273 | 273 |
result.assert_success()
|
| 274 | 274 |
|
| ... | ... | @@ -285,7 +285,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir): |
| 285 | 285 |
@pytest.mark.datafiles(DATA_DIR)
|
| 286 | 286 |
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
|
| 287 | 287 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 288 |
- element_path = os.path.join(project, 'elements')
|
|
| 288 |
+ element_path = 'elements'
|
|
| 289 | 289 |
|
| 290 | 290 |
# Create an artifact share (remote cache) in tmpdir/artifactshare
|
| 291 | 291 |
# Mock a file system with 12 MB free disk space
|
| ... | ... | @@ -298,11 +298,11 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir): |
| 298 | 298 |
})
|
| 299 | 299 |
|
| 300 | 300 |
# Create and build 2 elements, each of 5 MB.
|
| 301 |
- create_element_size('element1.bst', element_path, [], int(5e6))
|
|
| 301 |
+ create_element_size('element1.bst', project, element_path, [], int(5e6))
|
|
| 302 | 302 |
result = cli.run(project=project, args=['build', 'element1.bst'])
|
| 303 | 303 |
result.assert_success()
|
| 304 | 304 |
|
| 305 |
- create_element_size('element2.bst', element_path, [], int(5e6))
|
|
| 305 |
+ create_element_size('element2.bst', project, element_path, [], int(5e6))
|
|
| 306 | 306 |
result = cli.run(project=project, args=['build', 'element2.bst'])
|
| 307 | 307 |
result.assert_success()
|
| 308 | 308 |
|
| ... | ... | @@ -327,7 +327,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir): |
| 327 | 327 |
assert cli.get_element_state(project, 'element1.bst') == 'cached'
|
| 328 | 328 |
|
| 329 | 329 |
# Create and build the element3 (of 5 MB)
|
| 330 |
- create_element_size('element3.bst', element_path, [], int(5e6))
|
|
| 330 |
+ create_element_size('element3.bst', project, element_path, [], int(5e6))
|
|
| 331 | 331 |
result = cli.run(project=project, args=['build', 'element3.bst'])
|
| 332 | 332 |
result.assert_success()
|
| 333 | 333 |
|
| 1 | 1 |
import os
|
| 2 | 2 |
import pytest
|
| 3 | 3 |
|
| 4 |
-from buildstream._exceptions import ErrorDomain
|
|
| 5 |
-from tests.testutils import cli
|
|
| 4 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
| 5 |
+from tests.testutils import cli, filetypegenerator
|
|
| 6 | 6 |
|
| 7 | 7 |
DATA_DIR = os.path.join(
|
| 8 | 8 |
os.path.dirname(os.path.realpath(__file__)),
|
| ... | ... | @@ -11,17 +11,62 @@ DATA_DIR = os.path.join( |
| 11 | 11 |
|
| 12 | 12 |
|
| 13 | 13 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
| 14 |
-def test_missing_file(cli, tmpdir, datafiles):
|
|
| 14 |
+def test_missing_path(cli, tmpdir, datafiles):
|
|
| 15 | 15 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 16 | 16 |
|
| 17 | 17 |
# Removing the local file causes preflight to fail
|
| 18 |
- localfile = os.path.join(datafiles.dirname, datafiles.basename, 'file.txt')
|
|
| 18 |
+ localfile = os.path.join(project, 'file.txt')
|
|
| 19 | 19 |
os.remove(localfile)
|
| 20 | 20 |
|
| 21 | 21 |
result = cli.run(project=project, args=[
|
| 22 | 22 |
'show', 'target.bst'
|
| 23 | 23 |
])
|
| 24 |
- result.assert_main_error(ErrorDomain.SOURCE, None)
|
|
| 24 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
|
|
| 25 |
+ |
|
| 26 |
+ |
|
| 27 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
|
| 28 |
+def test_non_regular_file_or_directory(cli, tmpdir, datafiles):
|
|
| 29 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 30 |
+ localfile = os.path.join(project, 'file.txt')
|
|
| 31 |
+ |
|
| 32 |
+ for file_type in filetypegenerator.generate_file_types(localfile):
|
|
| 33 |
+ result = cli.run(project=project, args=[
|
|
| 34 |
+ 'show', 'target.bst'
|
|
| 35 |
+ ])
|
|
| 36 |
+ if os.path.isdir(localfile) and not os.path.islink(localfile):
|
|
| 37 |
+ result.assert_success()
|
|
| 38 |
+ elif os.path.isfile(localfile) and not os.path.islink(localfile):
|
|
| 39 |
+ result.assert_success()
|
|
| 40 |
+ else:
|
|
| 41 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
| 42 |
+ LoadErrorReason.PROJ_PATH_INVALID_KIND)
|
|
| 43 |
+ |
|
| 44 |
+ |
|
| 45 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
|
| 46 |
+def test_invalid_absolute_path(cli, tmpdir, datafiles):
|
|
| 47 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 48 |
+ |
|
| 49 |
+ with open(os.path.join(project, "target.bst"), 'r') as f:
|
|
| 50 |
+ old_yaml = f.read()
|
|
| 51 |
+ |
|
| 52 |
+ new_yaml = old_yaml.replace("file.txt", os.path.join(project, "file.txt"))
|
|
| 53 |
+ assert old_yaml != new_yaml
|
|
| 54 |
+ |
|
| 55 |
+ with open(os.path.join(project, "target.bst"), 'w') as f:
|
|
| 56 |
+ f.write(new_yaml)
|
|
| 57 |
+ |
|
| 58 |
+ result = cli.run(project=project, args=['show', 'target.bst'])
|
|
| 59 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
| 60 |
+ LoadErrorReason.PROJ_PATH_INVALID)
|
|
| 61 |
+ |
|
| 62 |
+ |
|
| 63 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'invalid-relative-path'))
|
|
| 64 |
+def test_invalid_relative_path(cli, tmpdir, datafiles):
|
|
| 65 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 66 |
+ |
|
| 67 |
+ result = cli.run(project=project, args=['show', 'target.bst'])
|
|
| 68 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
| 69 |
+ LoadErrorReason.PROJ_PATH_INVALID)
|
|
| 25 | 70 |
|
| 26 | 71 |
|
| 27 | 72 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
| 1 |
+This is a text file
|
| 1 |
+# Basic project
|
|
| 2 |
+name: foo
|
| 1 |
+kind: import
|
|
| 2 |
+description: This is the pony
|
|
| 3 |
+sources:
|
|
| 4 |
+- kind: local
|
|
| 5 |
+ path: ../invalid-relative-path/file.txt
|
| 1 | 1 |
import os
|
| 2 | 2 |
import pytest
|
| 3 | 3 |
|
| 4 |
-from buildstream._exceptions import ErrorDomain
|
|
| 5 |
-from tests.testutils import cli
|
|
| 4 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
| 5 |
+from tests.testutils import cli, filetypegenerator
|
|
| 6 | 6 |
|
| 7 | 7 |
DATA_DIR = os.path.join(
|
| 8 | 8 |
os.path.dirname(os.path.realpath(__file__)),
|
| ... | ... | @@ -15,27 +15,56 @@ def test_missing_patch(cli, tmpdir, datafiles): |
| 15 | 15 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 16 | 16 |
|
| 17 | 17 |
# Removing the local file causes preflight to fail
|
| 18 |
- localfile = os.path.join(datafiles.dirname, datafiles.basename, 'file_1.patch')
|
|
| 18 |
+ localfile = os.path.join(project, 'file_1.patch')
|
|
| 19 | 19 |
os.remove(localfile)
|
| 20 | 20 |
|
| 21 | 21 |
result = cli.run(project=project, args=[
|
| 22 | 22 |
'show', 'target.bst'
|
| 23 | 23 |
])
|
| 24 |
- result.assert_main_error(ErrorDomain.SOURCE, 'patch-no-exist')
|
|
| 24 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
|
|
| 25 | 25 |
|
| 26 | 26 |
|
| 27 | 27 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
| 28 | 28 |
def test_non_regular_file_patch(cli, tmpdir, datafiles):
|
| 29 | 29 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 30 | 30 |
|
| 31 |
- # Add a fifo, that's not a regular file, should cause explosions
|
|
| 32 |
- patch_path = os.path.join(datafiles.dirname, datafiles.basename, 'irregular_file.patch')
|
|
| 33 |
- os.mkfifo(patch_path)
|
|
| 31 |
+ patch_path = os.path.join(project, 'irregular_file.patch')
|
|
| 32 |
+ for file_type in filetypegenerator.generate_file_types(patch_path):
|
|
| 33 |
+ result = cli.run(project=project, args=[
|
|
| 34 |
+ 'show', 'irregular.bst'
|
|
| 35 |
+ ])
|
|
| 36 |
+ if os.path.isfile(patch_path) and not os.path.islink(patch_path):
|
|
| 37 |
+ result.assert_success()
|
|
| 38 |
+ else:
|
|
| 39 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
| 40 |
+ LoadErrorReason.PROJ_PATH_INVALID_KIND)
|
|
| 34 | 41 |
|
| 35 |
- result = cli.run(project=project, args=[
|
|
| 36 |
- 'show', 'irregular.bst'
|
|
| 37 |
- ])
|
|
| 38 |
- result.assert_main_error(ErrorDomain.SOURCE, "patch-not-a-file")
|
|
| 42 |
+ |
|
| 43 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
|
| 44 |
+def test_invalid_absolute_path(cli, tmpdir, datafiles):
|
|
| 45 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 46 |
+ |
|
| 47 |
+ with open(os.path.join(project, "target.bst"), 'r') as f:
|
|
| 48 |
+ old_yaml = f.read()
|
|
| 49 |
+ new_yaml = old_yaml.replace("file_1.patch",
|
|
| 50 |
+ os.path.join(project, "file_1.patch"))
|
|
| 51 |
+ assert old_yaml != new_yaml
|
|
| 52 |
+ |
|
| 53 |
+ with open(os.path.join(project, "target.bst"), 'w') as f:
|
|
| 54 |
+ f.write(new_yaml)
|
|
| 55 |
+ |
|
| 56 |
+ result = cli.run(project=project, args=['show', 'target.bst'])
|
|
| 57 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
| 58 |
+ LoadErrorReason.PROJ_PATH_INVALID)
|
|
| 59 |
+ |
|
| 60 |
+ |
|
| 61 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'invalid-relative-path'))
|
|
| 62 |
+def test_invalid_relative_path(cli, tmpdir, datafiles):
|
|
| 63 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 64 |
+ |
|
| 65 |
+ result = cli.run(project=project, args=['show', 'irregular.bst'])
|
|
| 66 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
| 67 |
+ LoadErrorReason.PROJ_PATH_INVALID)
|
|
| 39 | 68 |
|
| 40 | 69 |
|
| 41 | 70 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
| 1 |
+diff --git a/file.txt b/file.txt
|
|
| 2 |
+index a496efe..341ef26 100644
|
|
| 3 |
+--- a/file.txt
|
|
| 4 |
++++ b/file.txt
|
|
| 5 |
+@@ -1 +1 @@
|
|
| 6 |
+-This is a text file
|
|
| 7 |
++This is text file with superpowers
|
| 1 |
+kind: import
|
|
| 2 |
+description: This is the pony
|
|
| 3 |
+sources:
|
|
| 4 |
+- kind: patch
|
|
| 5 |
+ path: ../invalid-relative-path/irregular_file.patch
|
| 1 |
+# Basic project
|
|
| 2 |
+name: foo
|
| ... | ... | @@ -18,11 +18,12 @@ from buildstream import _yaml |
| 18 | 18 |
# Returns:
|
| 19 | 19 |
# Nothing (creates a .bst file of specified size)
|
| 20 | 20 |
#
|
| 21 |
-def create_element_size(name, path, dependencies, size):
|
|
| 22 |
- os.makedirs(path, exist_ok=True)
|
|
| 21 |
+def create_element_size(name, project_dir, elements_path, dependencies, size):
|
|
| 22 |
+ full_elements_path = os.path.join(project_dir, elements_path)
|
|
| 23 |
+ os.makedirs(full_elements_path, exist_ok=True)
|
|
| 23 | 24 |
|
| 24 | 25 |
# Create a file to be included in this element's artifact
|
| 25 |
- with open(os.path.join(path, name + '_data'), 'wb+') as f:
|
|
| 26 |
+ with open(os.path.join(project_dir, name + '_data'), 'wb+') as f:
|
|
| 26 | 27 |
f.write(os.urandom(size))
|
| 27 | 28 |
|
| 28 | 29 |
# Simplest case: We want this file (of specified size) to just
|
| ... | ... | @@ -32,9 +33,9 @@ def create_element_size(name, path, dependencies, size): |
| 32 | 33 |
'sources': [
|
| 33 | 34 |
{
|
| 34 | 35 |
'kind': 'local',
|
| 35 |
- 'path': os.path.join(path, name + '_data')
|
|
| 36 |
+ 'path': name + '_data'
|
|
| 36 | 37 |
}
|
| 37 | 38 |
],
|
| 38 | 39 |
'depends': dependencies
|
| 39 | 40 |
}
|
| 40 |
- _yaml.dump(element, os.path.join(path, name))
|
|
| 41 |
+ _yaml.dump(element, os.path.join(project_dir, elements_path, name))
|
| 1 |
+#
|
|
| 2 |
+# Copyright (C) 2018 Codethink Limited
|
|
| 3 |
+#
|
|
| 4 |
+# This program is free software; you can redistribute it and/or
|
|
| 5 |
+# modify it under the terms of the GNU Lesser General Public
|
|
| 6 |
+# License as published by the Free Software Foundation; either
|
|
| 7 |
+# version 2 of the License, or (at your option) any later version.
|
|
| 8 |
+#
|
|
| 9 |
+# This library is distributed in the hope that it will be useful,
|
|
| 10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
| 11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
| 12 |
+# Lesser General Public License for more details.
|
|
| 13 |
+#
|
|
| 14 |
+# You should have received a copy of the GNU Lesser General Public
|
|
| 15 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
| 16 |
+#
|
|
| 17 |
+# Authors:
|
|
| 18 |
+# Tiago Gomes <tiago gomes codethink co uk>
|
|
| 19 |
+ |
|
| 20 |
+import os
|
|
| 21 |
+import socket
|
|
| 22 |
+ |
|
| 23 |
+ |
|
| 24 |
+# generate_file_types()
|
|
| 25 |
+#
|
|
| 26 |
+# Generator that creates a regular file directory, symbolic link, fifo
|
|
| 27 |
+# and socket at the specified path.
|
|
| 28 |
+#
|
|
| 29 |
+# Args:
|
|
| 30 |
+# path: (str) path where to create each different type of file
|
|
| 31 |
+#
|
|
| 32 |
+def generate_file_types(path):
|
|
| 33 |
+ def clean():
|
|
| 34 |
+ if os.path.exists(path):
|
|
| 35 |
+ if os.path.isdir(path):
|
|
| 36 |
+ os.rmdir(path)
|
|
| 37 |
+ else:
|
|
| 38 |
+ os.remove(path)
|
|
| 39 |
+ |
|
| 40 |
+ clean()
|
|
| 41 |
+ |
|
| 42 |
+ with open(path, 'w') as f:
|
|
| 43 |
+ pass
|
|
| 44 |
+ yield
|
|
| 45 |
+ clean()
|
|
| 46 |
+ |
|
| 47 |
+ os.makedirs(path)
|
|
| 48 |
+ yield
|
|
| 49 |
+ clean()
|
|
| 50 |
+ |
|
| 51 |
+ os.symlink("project.conf", path)
|
|
| 52 |
+ yield
|
|
| 53 |
+ clean()
|
|
| 54 |
+ |
|
| 55 |
+ os.mkfifo(path)
|
|
| 56 |
+ yield
|
|
| 57 |
+ clean()
|
|
| 58 |
+ |
|
| 59 |
+ s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
|
| 60 |
+ s.bind(path)
|
|
| 61 |
+ yield
|
|
| 62 |
+ clean()
|
