Tiago Gomes pushed to branch tiagogomes/issue-195 at BuildStream / buildstream
Commits:
-
695c0cb0
by Phil Dawson at 2018-07-30T16:07:36Z
-
fcb87b0a
by Phil Dawson at 2018-07-30T16:53:37Z
-
6a9a6789
by Tiago Gomes at 2018-07-31T12:23:54Z
-
77bd72aa
by Tiago Gomes at 2018-07-31T12:23:57Z
-
f7e8f506
by Tiago Gomes at 2018-07-31T12:25:02Z
-
9013fbeb
by Tiago Gomes at 2018-07-31T12:25:02Z
-
997f64d3
by Tiago Gomes at 2018-07-31T12:25:02Z
-
42aa02b6
by Tiago Gomes at 2018-07-31T12:25:02Z
18 changed files:
- .pylintrc
- buildstream/_project.py
- buildstream/_yaml.py
- buildstream/plugin.py
- buildstream/plugins/sources/local.py
- buildstream/plugins/sources/ostree.py
- buildstream/plugins/sources/patch.py
- doc/examples/junctions/elements/hello-junction.bst
- + doc/examples/junctions/first-project/elements/hello.bst
- + doc/examples/junctions/first-project/hello.world
- + doc/examples/junctions/first-project/project.conf
- doc/source/advanced-features/junction-elements.rst
- tests/artifactcache/expiry.py
- tests/examples/junctions.py
- tests/frontend/push.py
- tests/sources/local.py
- tests/sources/patch.py
- tests/testutils/element_generators.py
Changes:
... | ... | @@ -184,7 +184,7 @@ ignore-on-opaque-inference=yes |
184 | 184 |
# List of class names for which member attributes should not be checked (useful
|
185 | 185 |
# for classes with dynamically set attributes). This supports the use of
|
186 | 186 |
# qualified names.
|
187 |
-ignored-classes=optparse.Values,thread._local,_thread._local,contextlib.closing,gi.repository.GLib.GError
|
|
187 |
+ignored-classes=optparse.Values,thread._local,_thread._local,contextlib.closing,gi.repository.GLib.GError,pathlib.PurePath
|
|
188 | 188 |
|
189 | 189 |
# List of module names for which member attributes should not be checked
|
190 | 190 |
# (useful for modules/projects where namespaces are manipulated during runtime
|
... | ... | @@ -291,7 +291,7 @@ class Project(): |
291 | 291 |
|
292 | 292 |
self.element_path = os.path.join(
|
293 | 293 |
self.directory,
|
294 |
- _yaml.node_get(config, str, 'element-path')
|
|
294 |
+ _yaml.node_get_project_path(config, 'element-path', self.directory)
|
|
295 | 295 |
)
|
296 | 296 |
|
297 | 297 |
# Load project options
|
... | ... | @@ -500,8 +500,11 @@ class Project(): |
500 | 500 |
if group in origin_dict:
|
501 | 501 |
del origin_dict[group]
|
502 | 502 |
if origin_dict['origin'] == 'local':
|
503 |
+ path = _yaml.node_get_project_path(origin,
|
|
504 |
+ 'path',
|
|
505 |
+ self.directory)
|
|
503 | 506 |
# paths are passed in relative to the project, but must be absolute
|
504 |
- origin_dict['path'] = os.path.join(self.directory, origin_dict['path'])
|
|
507 |
+ origin_dict['path'] = os.path.join(self.directory, path)
|
|
505 | 508 |
destination.append(origin_dict)
|
506 | 509 |
|
507 | 510 |
# _ensure_project_dir()
|
1 | 1 |
#
|
2 |
-# Copyright (C) 2016 Codethink Limited
|
|
2 |
+# Copyright (C) 2016,2018 Codethink Limited
|
|
3 | 3 |
#
|
4 | 4 |
# This program is free software; you can redistribute it and/or
|
5 | 5 |
# modify it under the terms of the GNU Lesser General Public
|
... | ... | @@ -22,6 +22,7 @@ import collections |
22 | 22 |
import string
|
23 | 23 |
from copy import deepcopy
|
24 | 24 |
from contextlib import ExitStack
|
25 |
+from pathlib import Path
|
|
25 | 26 |
|
26 | 27 |
from ruamel import yaml
|
27 | 28 |
from ruamel.yaml.representer import SafeRepresenter, RoundTripRepresenter
|
... | ... | @@ -392,6 +393,75 @@ def node_get(node, expected_type, key, indices=None, default_value=_get_sentinel |
392 | 393 |
return value
|
393 | 394 |
|
394 | 395 |
|
396 |
+# node_get_project_path()
|
|
397 |
+#
|
|
398 |
+# Fetches a project path from a dictionary node and validates it
|
|
399 |
+#
|
|
400 |
+# Only paths that exist in the filesystem and are relative to the
|
|
401 |
+# project directory are valid. In addition, paths can't refer to the
|
|
402 |
+# parent directory in the first path component, or point to symbolic
|
|
403 |
+# links, fifos, sockets and block/character devices.
|
|
404 |
+#
|
|
405 |
+# Args:
|
|
406 |
+# node (dict): A dictionary loaded from YAML
|
|
407 |
+# key (str): The key whose value contains a path to validate
|
|
408 |
+# project_dir (str): The project directory
|
|
409 |
+# is_file (bool): If ``False`` path can either point to a directory
|
|
410 |
+# or regular file. If ``True`` path must point to a
|
|
411 |
+# regular file only. Defaults to ``False``
|
|
412 |
+#
|
|
413 |
+# Returns:
|
|
414 |
+# (str): The project path
|
|
415 |
+#
|
|
416 |
+# Raises:
|
|
417 |
+# (LoadError): In case that the project path is not valid or does not
|
|
418 |
+# exist
|
|
419 |
+#
|
|
420 |
+def node_get_project_path(node, key, project_dir, is_file=False):
|
|
421 |
+ path_str = node_get(node, str, key)
|
|
422 |
+ path = Path(path_str)
|
|
423 |
+ project_dir_path = Path(project_dir)
|
|
424 |
+ |
|
425 |
+ provenance = node_get_provenance(node, key=key)
|
|
426 |
+ |
|
427 |
+ if path.parts and path.parts[0] == '..':
|
|
428 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
429 |
+ "{}: Specified path '{}' first component must "
|
|
430 |
+ "not be '..'"
|
|
431 |
+ .format(provenance, path_str))
|
|
432 |
+ |
|
433 |
+ full_resolved_path = (project_dir_path / path).resolve()
|
|
434 |
+ is_inside = project_dir_path in full_resolved_path.parents or (
|
|
435 |
+ full_resolved_path == project_dir_path)
|
|
436 |
+ |
|
437 |
+ if path.is_absolute() or not is_inside:
|
|
438 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
439 |
+ "{}: Specified path '{}' must be relative and "
|
|
440 |
+ "inside project directory"
|
|
441 |
+ .format(provenance, path_str))
|
|
442 |
+ |
|
443 |
+ if not full_resolved_path.exists():
|
|
444 |
+ raise LoadError(LoadErrorReason.MISSING_FILE,
|
|
445 |
+ "{}: Specified path '{}' does not exist"
|
|
446 |
+ .format(provenance, path_str))
|
|
447 |
+ |
|
448 |
+ if full_resolved_path.is_symlink() or (
|
|
449 |
+ full_resolved_path.is_socket() or
|
|
450 |
+ full_resolved_path.is_fifo() or
|
|
451 |
+ full_resolved_path.is_block_device()):
|
|
452 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
453 |
+ "{}: Specified path '{}' points to an unsupported "
|
|
454 |
+ "file type"
|
|
455 |
+ .format(provenance, path_str))
|
|
456 |
+ |
|
457 |
+ if is_file and not full_resolved_path.is_file():
|
|
458 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
459 |
+ "{}: Specified path '{}' is not a regular file"
|
|
460 |
+ .format(provenance, path_str))
|
|
461 |
+ |
|
462 |
+ return path_str
|
|
463 |
+ |
|
464 |
+ |
|
395 | 465 |
# node_items()
|
396 | 466 |
#
|
397 | 467 |
# A convenience generator for iterating over loaded key/value
|
1 | 1 |
#
|
2 |
-# Copyright (C) 2017 Codethink Limited
|
|
2 |
+# Copyright (C) 2017,2018 Codethink Limited
|
|
3 | 3 |
#
|
4 | 4 |
# This program is free software; you can redistribute it and/or
|
5 | 5 |
# modify it under the terms of the GNU Lesser General Public
|
... | ... | @@ -335,6 +335,43 @@ class Plugin(): |
335 | 335 |
"""
|
336 | 336 |
return _yaml.node_get(node, expected_type, member_name, default_value=default)
|
337 | 337 |
|
338 |
+ def node_get_project_path(self, node, key, is_file=False):
|
|
339 |
+ """Fetches a project path from a dictionary node and validates it
|
|
340 |
+ |
|
341 |
+ Only paths that exist in the filesystem and are relative to the
|
|
342 |
+ project directory are valid. In addition, paths can't refer to
|
|
343 |
+ the parent directory in the first path component, or point to
|
|
344 |
+ symbolic links, fifos, sockets and block/character devices.
|
|
345 |
+ |
|
346 |
+ Args:
|
|
347 |
+ node (dict): A dictionary loaded from YAML
|
|
348 |
+ key (str): The key whose value contains a path to validate
|
|
349 |
+ is_file (bool): If ``False`` path can either point to a
|
|
350 |
+ directory or regular file. If ``True`` path
|
|
351 |
+ must point to a regular file only. Defaults
|
|
352 |
+ to ``False``
|
|
353 |
+ |
|
354 |
+ Returns:
|
|
355 |
+ (str): The project path
|
|
356 |
+ |
|
357 |
+ Raises:
|
|
358 |
+ :class:`.LoadError`: In the case that the project path is not
|
|
359 |
+ valid or does not exist
|
|
360 |
+ |
|
361 |
+ *Since: 1.2*
|
|
362 |
+ |
|
363 |
+ **Example:**
|
|
364 |
+ |
|
365 |
+ .. code:: python
|
|
366 |
+ |
|
367 |
+ path = self.node_get_project_path(node, 'path')
|
|
368 |
+ |
|
369 |
+ """
|
|
370 |
+ |
|
371 |
+ return _yaml.node_get_project_path(node, key,
|
|
372 |
+ self.__project.directory,
|
|
373 |
+ is_file)
|
|
374 |
+ |
|
338 | 375 |
def node_validate(self, node, valid_keys):
|
339 | 376 |
"""This should be used in :func:`~buildstream.plugin.Plugin.configure`
|
340 | 377 |
implementations to assert that users have only entered
|
... | ... | @@ -36,7 +36,7 @@ local - stage local files and directories |
36 | 36 |
"""
|
37 | 37 |
|
38 | 38 |
import os
|
39 |
-from buildstream import Source, SourceError, Consistency
|
|
39 |
+from buildstream import Source, Consistency
|
|
40 | 40 |
from buildstream import utils
|
41 | 41 |
|
42 | 42 |
|
... | ... | @@ -51,14 +51,11 @@ class LocalSource(Source): |
51 | 51 |
|
52 | 52 |
def configure(self, node):
|
53 | 53 |
self.node_validate(node, ['path'] + Source.COMMON_CONFIG_KEYS)
|
54 |
- |
|
55 |
- self.path = self.node_get_member(node, str, 'path')
|
|
54 |
+ self.path = self.node_get_project_path(node, 'path')
|
|
56 | 55 |
self.fullpath = os.path.join(self.get_project_directory(), self.path)
|
57 | 56 |
|
58 | 57 |
def preflight(self):
|
59 |
- # Check if the configured file or directory really exists
|
|
60 |
- if not os.path.exists(self.fullpath):
|
|
61 |
- raise SourceError("Specified path '{}' does not exist".format(self.path))
|
|
58 |
+ pass
|
|
62 | 59 |
|
63 | 60 |
def get_unique_key(self):
|
64 | 61 |
if self.__unique_key is None:
|
... | ... | @@ -73,9 +73,9 @@ class OSTreeSource(Source): |
73 | 73 |
utils.url_directory_name(self.url))
|
74 | 74 |
|
75 | 75 |
# (optional) Not all repos are signed. But if they are, get the gpg key
|
76 |
- self.gpg_key = self.node_get_member(node, str, 'gpg-key', None)
|
|
77 | 76 |
self.gpg_key_path = None
|
78 |
- if self.gpg_key is not None:
|
|
77 |
+ if self.node_get_member(node, str, 'gpg-key', None):
|
|
78 |
+ self.gpg_key = self.node_get_project_path(node, 'gpg-key', True)
|
|
79 | 79 |
self.gpg_key_path = os.path.join(self.get_project_directory(), self.gpg_key)
|
80 | 80 |
|
81 | 81 |
# Our OSTree repo handle
|
... | ... | @@ -52,19 +52,11 @@ class PatchSource(Source): |
52 | 52 |
# pylint: disable=attribute-defined-outside-init
|
53 | 53 |
|
54 | 54 |
def configure(self, node):
|
55 |
- self.path = self.node_get_member(node, str, "path")
|
|
55 |
+ self.path = self.node_get_project_path(node, 'path', True)
|
|
56 | 56 |
self.strip_level = self.node_get_member(node, int, "strip-level", 1)
|
57 | 57 |
self.fullpath = os.path.join(self.get_project_directory(), self.path)
|
58 | 58 |
|
59 | 59 |
def preflight(self):
|
60 |
- # Check if the configured file really exists
|
|
61 |
- if not os.path.exists(self.fullpath):
|
|
62 |
- raise SourceError("Specified path '{}' does not exist".format(self.path),
|
|
63 |
- reason="patch-no-exist")
|
|
64 |
- elif not os.path.isfile(self.fullpath):
|
|
65 |
- raise SourceError("Specified path '{}' must be a file".format(self.path),
|
|
66 |
- reason="patch-not-a-file")
|
|
67 |
- |
|
68 | 60 |
# Check if patch is installed, get the binary at the same time
|
69 | 61 |
self.host_patch = utils.get_host_tool("patch")
|
70 | 62 |
|
1 | 1 |
kind: junction
|
2 |
- |
|
3 |
-# Specify the source of the BuildStream project
|
|
4 |
-# We are going to use the autotools examples distributed with BuildStream in the
|
|
5 |
-# doc/examples/autotools directory
|
|
6 | 2 |
sources:
|
7 | 3 |
- kind: local
|
8 |
- path: ../autotools
|
|
4 |
+ path: first-project
|
1 |
+kind: import
|
|
2 |
+ |
|
3 |
+# Use a local source to stage our file
|
|
4 |
+sources:
|
|
5 |
+- kind: local
|
|
6 |
+ path: hello.world
|
|
7 |
+ |
|
8 |
+# Configure the import element
|
|
9 |
+config:
|
|
10 |
+ |
|
11 |
+ # Place the content staged by sources at the
|
|
12 |
+ # root of the output artifact
|
|
13 |
+ target: /
|
1 |
+# Unique project name
|
|
2 |
+name: first-project
|
|
3 |
+ |
|
4 |
+# Required BuildStream format version
|
|
5 |
+format-version: 9
|
|
6 |
+ |
|
7 |
+# Subdirectory where elements are stored
|
|
8 |
+element-path: elements
|
... | ... | @@ -34,6 +34,7 @@ The below bst file describes an element which depends on the hello.bst element |
34 | 34 |
from the autotools example:
|
35 | 35 |
|
36 | 36 |
.. literalinclude:: ../../examples/junctions/elements/callHello.bst
|
37 |
+ :language: yaml
|
|
37 | 38 |
|
38 | 39 |
This element consists of a script which calls hello.bst's hello command.
|
39 | 40 |
|
... | ... | @@ -5,7 +5,7 @@ import pytest |
5 | 5 |
from buildstream import _yaml
|
6 | 6 |
from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
7 | 7 |
|
8 |
-from tests.testutils import cli
|
|
8 |
+from tests.testutils import cli, create_element_size
|
|
9 | 9 |
|
10 | 10 |
|
11 | 11 |
DATA_DIR = os.path.join(
|
... | ... | @@ -14,32 +14,12 @@ DATA_DIR = os.path.join( |
14 | 14 |
)
|
15 | 15 |
|
16 | 16 |
|
17 |
-def create_element(name, path, dependencies, size):
|
|
18 |
- os.makedirs(path, exist_ok=True)
|
|
19 |
- |
|
20 |
- # Create a file to be included in this element's artifact
|
|
21 |
- with open(os.path.join(path, name + '_data'), 'wb+') as f:
|
|
22 |
- f.write(os.urandom(size))
|
|
23 |
- |
|
24 |
- element = {
|
|
25 |
- 'kind': 'import',
|
|
26 |
- 'sources': [
|
|
27 |
- {
|
|
28 |
- 'kind': 'local',
|
|
29 |
- 'path': os.path.join(path, name + '_data')
|
|
30 |
- }
|
|
31 |
- ],
|
|
32 |
- 'depends': dependencies
|
|
33 |
- }
|
|
34 |
- _yaml.dump(element, os.path.join(path, name))
|
|
35 |
- |
|
36 |
- |
|
37 | 17 |
# Ensure that the cache successfully removes an old artifact if we do
|
38 | 18 |
# not have enough space left.
|
39 | 19 |
@pytest.mark.datafiles(DATA_DIR)
|
40 | 20 |
def test_artifact_expires(cli, datafiles, tmpdir):
|
41 | 21 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
42 |
- element_path = os.path.join(project, 'elements')
|
|
22 |
+ element_path = 'elements'
|
|
43 | 23 |
cache_location = os.path.join(project, 'cache', 'artifacts', 'ostree')
|
44 | 24 |
checkout = os.path.join(project, 'checkout')
|
45 | 25 |
|
... | ... | @@ -52,7 +32,7 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
52 | 32 |
# Create an element that uses almost the entire cache (an empty
|
53 | 33 |
# ostree cache starts at about ~10KiB, so we need a bit of a
|
54 | 34 |
# buffer)
|
55 |
- create_element('target.bst', element_path, [], 6000000)
|
|
35 |
+ create_element_size('target.bst', project, element_path, [], 6000000)
|
|
56 | 36 |
res = cli.run(project=project, args=['build', 'target.bst'])
|
57 | 37 |
res.assert_success()
|
58 | 38 |
|
... | ... | @@ -61,7 +41,7 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
61 | 41 |
# Our cache should now be almost full. Let's create another
|
62 | 42 |
# artifact and see if we can cause buildstream to delete the old
|
63 | 43 |
# one.
|
64 |
- create_element('target2.bst', element_path, [], 6000000)
|
|
44 |
+ create_element_size('target2.bst', project, element_path, [], 6000000)
|
|
65 | 45 |
res = cli.run(project=project, args=['build', 'target2.bst'])
|
66 | 46 |
res.assert_success()
|
67 | 47 |
|
... | ... | @@ -82,7 +62,7 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
82 | 62 |
@pytest.mark.datafiles(DATA_DIR)
|
83 | 63 |
def test_artifact_too_large(cli, datafiles, tmpdir, size):
|
84 | 64 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
85 |
- element_path = os.path.join(project, 'elements')
|
|
65 |
+ element_path = 'elements'
|
|
86 | 66 |
|
87 | 67 |
cli.configure({
|
88 | 68 |
'cache': {
|
... | ... | @@ -91,7 +71,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir, size): |
91 | 71 |
})
|
92 | 72 |
|
93 | 73 |
# Create an element whose artifact is too large
|
94 |
- create_element('target.bst', element_path, [], size)
|
|
74 |
+ create_element_size('target.bst', project, element_path, [], size)
|
|
95 | 75 |
res = cli.run(project=project, args=['build', 'target.bst'])
|
96 | 76 |
res.assert_main_error(ErrorDomain.STREAM, None)
|
97 | 77 |
|
... | ... | @@ -99,7 +79,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir, size): |
99 | 79 |
@pytest.mark.datafiles(DATA_DIR)
|
100 | 80 |
def test_expiry_order(cli, datafiles, tmpdir):
|
101 | 81 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
102 |
- element_path = os.path.join(project, 'elements')
|
|
82 |
+ element_path = 'elements'
|
|
103 | 83 |
cache_location = os.path.join(project, 'cache', 'artifacts', 'ostree')
|
104 | 84 |
checkout = os.path.join(project, 'workspace')
|
105 | 85 |
|
... | ... | @@ -110,21 +90,21 @@ def test_expiry_order(cli, datafiles, tmpdir): |
110 | 90 |
})
|
111 | 91 |
|
112 | 92 |
# Create an artifact
|
113 |
- create_element('dep.bst', element_path, [], 2000000)
|
|
93 |
+ create_element_size('dep.bst', project, element_path, [], 2000000)
|
|
114 | 94 |
res = cli.run(project=project, args=['build', 'dep.bst'])
|
115 | 95 |
res.assert_success()
|
116 | 96 |
|
117 | 97 |
# Create another artifact
|
118 |
- create_element('unrelated.bst', element_path, [], 2000000)
|
|
98 |
+ create_element_size('unrelated.bst', project, element_path, [], 2000000)
|
|
119 | 99 |
res = cli.run(project=project, args=['build', 'unrelated.bst'])
|
120 | 100 |
res.assert_success()
|
121 | 101 |
|
122 | 102 |
# And build something else
|
123 |
- create_element('target.bst', element_path, [], 2000000)
|
|
103 |
+ create_element_size('target.bst', project, element_path, [], 2000000)
|
|
124 | 104 |
res = cli.run(project=project, args=['build', 'target.bst'])
|
125 | 105 |
res.assert_success()
|
126 | 106 |
|
127 |
- create_element('target2.bst', element_path, [], 2000000)
|
|
107 |
+ create_element_size('target2.bst', project, element_path, [], 2000000)
|
|
128 | 108 |
res = cli.run(project=project, args=['build', 'target2.bst'])
|
129 | 109 |
res.assert_success()
|
130 | 110 |
|
... | ... | @@ -133,7 +113,7 @@ def test_expiry_order(cli, datafiles, tmpdir): |
133 | 113 |
res.assert_success()
|
134 | 114 |
|
135 | 115 |
# Finally, build something that will cause the cache to overflow
|
136 |
- create_element('expire.bst', element_path, [], 2000000)
|
|
116 |
+ create_element_size('expire.bst', project, element_path, [], 2000000)
|
|
137 | 117 |
res = cli.run(project=project, args=['build', 'expire.bst'])
|
138 | 118 |
res.assert_success()
|
139 | 119 |
|
... | ... | @@ -153,7 +133,7 @@ def test_expiry_order(cli, datafiles, tmpdir): |
153 | 133 |
@pytest.mark.datafiles(DATA_DIR)
|
154 | 134 |
def test_keep_dependencies(cli, datafiles, tmpdir):
|
155 | 135 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
156 |
- element_path = os.path.join(project, 'elements')
|
|
136 |
+ element_path = 'elements'
|
|
157 | 137 |
cache_location = os.path.join(project, 'cache', 'artifacts', 'ostree')
|
158 | 138 |
|
159 | 139 |
cli.configure({
|
... | ... | @@ -163,12 +143,12 @@ def test_keep_dependencies(cli, datafiles, tmpdir): |
163 | 143 |
})
|
164 | 144 |
|
165 | 145 |
# Create a pretty big dependency
|
166 |
- create_element('dependency.bst', element_path, [], 5000000)
|
|
146 |
+ create_element_size('dependency.bst', project, element_path, [], 5000000)
|
|
167 | 147 |
res = cli.run(project=project, args=['build', 'dependency.bst'])
|
168 | 148 |
res.assert_success()
|
169 | 149 |
|
170 | 150 |
# Now create some other unrelated artifact
|
171 |
- create_element('unrelated.bst', element_path, [], 4000000)
|
|
151 |
+ create_element_size('unrelated.bst', project, element_path, [], 4000000)
|
|
172 | 152 |
res = cli.run(project=project, args=['build', 'unrelated.bst'])
|
173 | 153 |
res.assert_success()
|
174 | 154 |
|
... | ... | @@ -184,7 +164,8 @@ def test_keep_dependencies(cli, datafiles, tmpdir): |
184 | 164 |
# duplicating artifacts (bad!) we need to make this equal in size
|
185 | 165 |
# or smaller than half the size of its dependencies.
|
186 | 166 |
#
|
187 |
- create_element('target.bst', element_path, ['dependency.bst'], 2000000)
|
|
167 |
+ create_element_size('target.bst', project,
|
|
168 |
+ element_path, ['dependency.bst'], 2000000)
|
|
188 | 169 |
res = cli.run(project=project, args=['build', 'target.bst'])
|
189 | 170 |
res.assert_success()
|
190 | 171 |
|
... | ... | @@ -197,7 +178,7 @@ def test_keep_dependencies(cli, datafiles, tmpdir): |
197 | 178 |
@pytest.mark.datafiles(DATA_DIR)
|
198 | 179 |
def test_never_delete_dependencies(cli, datafiles, tmpdir):
|
199 | 180 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
200 |
- element_path = os.path.join(project, 'elements')
|
|
181 |
+ element_path = 'elements'
|
|
201 | 182 |
|
202 | 183 |
cli.configure({
|
203 | 184 |
'cache': {
|
... | ... | @@ -206,10 +187,14 @@ def test_never_delete_dependencies(cli, datafiles, tmpdir): |
206 | 187 |
})
|
207 | 188 |
|
208 | 189 |
# Create a build tree
|
209 |
- create_element('dependency.bst', element_path, [], 8000000)
|
|
210 |
- create_element('related.bst', element_path, ['dependency.bst'], 8000000)
|
|
211 |
- create_element('target.bst', element_path, ['related.bst'], 8000000)
|
|
212 |
- create_element('target2.bst', element_path, ['target.bst'], 8000000)
|
|
190 |
+ create_element_size('dependency.bst', project,
|
|
191 |
+ element_path, [], 8000000)
|
|
192 |
+ create_element_size('related.bst', project,
|
|
193 |
+ element_path, ['dependency.bst'], 8000000)
|
|
194 |
+ create_element_size('target.bst', project,
|
|
195 |
+ element_path, ['related.bst'], 8000000)
|
|
196 |
+ create_element_size('target2.bst', project,
|
|
197 |
+ element_path, ['target.bst'], 8000000)
|
|
213 | 198 |
|
214 | 199 |
# We try to build this pipeline, but it's too big for the
|
215 | 200 |
# cache. Since all elements are required, the build should fail.
|
... | ... | @@ -249,7 +234,7 @@ def test_never_delete_dependencies(cli, datafiles, tmpdir): |
249 | 234 |
@pytest.mark.datafiles(DATA_DIR)
|
250 | 235 |
def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
|
251 | 236 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
252 |
- element_path = os.path.join(project, 'elements')
|
|
237 |
+ os.makedirs(os.path.join(project, 'elements'))
|
|
253 | 238 |
|
254 | 239 |
cli.configure({
|
255 | 240 |
'cache': {
|
... | ... | @@ -11,42 +11,12 @@ DATA_DIR = os.path.join( |
11 | 11 |
os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'junctions'
|
12 | 12 |
)
|
13 | 13 |
|
14 |
-JUNCTION_IMPORT_PATH = os.path.join(
|
|
15 |
- os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'autotools'
|
|
16 |
-)
|
|
17 |
- |
|
18 |
- |
|
19 |
-def ammend_juntion_path_paths(tmpdir):
|
|
20 |
- # The junction element in the examples/junctions project uses a local source type.
|
|
21 |
- # It's "path:" must specify a relative path from the project's root directory.
|
|
22 |
- # For the hello-junction element to function during these tests, the copy of the junctions
|
|
23 |
- # project made in the buildstream/tmp/directory, "path:" must be ammended to be the relative
|
|
24 |
- # path to the autotools example from the temporary test directory.
|
|
25 |
- junction_element = os.path.join(tmpdir, "elements", "hello-junction.bst")
|
|
26 |
- junction_element_bst = ""
|
|
27 |
- junction_relative_path = os.path.relpath(JUNCTION_IMPORT_PATH, tmpdir)
|
|
28 |
- with open(junction_element, 'r') as f:
|
|
29 |
- junction_element_bst = f.read()
|
|
30 |
- ammended_element_bst = junction_element_bst.replace("../autotools", junction_relative_path)
|
|
31 |
- with open(junction_element, 'w') as f:
|
|
32 |
- f.write(ammended_element_bst)
|
|
33 |
- |
|
34 |
- |
|
35 |
-# Check that the autotools project is where the junctions example expects and
|
|
36 |
-# contains the hello.bst element.
|
|
37 |
-@pytest.mark.datafiles(DATA_DIR)
|
|
38 |
-def test_autotools_example_is_present(datafiles):
|
|
39 |
- autotools_path = JUNCTION_IMPORT_PATH
|
|
40 |
- assert os.path.exists(autotools_path)
|
|
41 |
- assert os.path.exists(os.path.join(autotools_path, "elements", "hello.bst"))
|
|
42 |
- |
|
43 | 14 |
|
44 | 15 |
# Test that the project builds successfully
|
45 | 16 |
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
|
46 | 17 |
@pytest.mark.datafiles(DATA_DIR)
|
47 | 18 |
def test_build(cli, tmpdir, datafiles):
|
48 | 19 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
49 |
- ammend_juntion_path_paths(str(tmpdir))
|
|
50 | 20 |
|
51 | 21 |
result = cli.run(project=project, args=['build', 'callHello.bst'])
|
52 | 22 |
result.assert_success()
|
... | ... | @@ -57,7 +27,6 @@ def test_build(cli, tmpdir, datafiles): |
57 | 27 |
@pytest.mark.datafiles(DATA_DIR)
|
58 | 28 |
def test_shell_call_hello(cli, tmpdir, datafiles):
|
59 | 29 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
60 |
- ammend_juntion_path_paths(str(tmpdir))
|
|
61 | 30 |
|
62 | 31 |
result = cli.run(project=project, args=['build', 'callHello.bst'])
|
63 | 32 |
result.assert_success()
|
... | ... | @@ -73,7 +42,6 @@ def test_shell_call_hello(cli, tmpdir, datafiles): |
73 | 42 |
def test_open_cross_junction_workspace(cli, tmpdir, datafiles):
|
74 | 43 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
75 | 44 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello_junction")
|
76 |
- ammend_juntion_path_paths(str(tmpdir))
|
|
77 | 45 |
|
78 | 46 |
result = cli.run(project=project,
|
79 | 47 |
args=['workspace', 'open', 'hello-junction.bst:hello.bst', workspace_dir])
|
... | ... | @@ -202,7 +202,7 @@ def test_push_after_pull(cli, tmpdir, datafiles): |
202 | 202 |
@pytest.mark.datafiles(DATA_DIR)
|
203 | 203 |
def test_artifact_expires(cli, datafiles, tmpdir):
|
204 | 204 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
205 |
- element_path = os.path.join(project, 'elements')
|
|
205 |
+ element_path = 'elements'
|
|
206 | 206 |
|
207 | 207 |
# Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
|
208 | 208 |
# Mock a file system with 12 MB free disk space
|
... | ... | @@ -215,12 +215,12 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
215 | 215 |
})
|
216 | 216 |
|
217 | 217 |
# Create and build an element of 5 MB
|
218 |
- create_element_size('element1.bst', element_path, [], int(5e6)) # [] => no deps
|
|
218 |
+ create_element_size('element1.bst', project, element_path, [], int(5e6))
|
|
219 | 219 |
result = cli.run(project=project, args=['build', 'element1.bst'])
|
220 | 220 |
result.assert_success()
|
221 | 221 |
|
222 | 222 |
# Create and build an element of 5 MB
|
223 |
- create_element_size('element2.bst', element_path, [], int(5e6)) # [] => no deps
|
|
223 |
+ create_element_size('element2.bst', project, element_path, [], int(5e6))
|
|
224 | 224 |
result = cli.run(project=project, args=['build', 'element2.bst'])
|
225 | 225 |
result.assert_success()
|
226 | 226 |
|
... | ... | @@ -231,7 +231,7 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
231 | 231 |
assert_shared(cli, share, project, 'element2.bst')
|
232 | 232 |
|
233 | 233 |
# Create and build another element of 5 MB (This will exceed the free disk space available)
|
234 |
- create_element_size('element3.bst', element_path, [], int(5e6))
|
|
234 |
+ create_element_size('element3.bst', project, element_path, [], int(5e6))
|
|
235 | 235 |
result = cli.run(project=project, args=['build', 'element3.bst'])
|
236 | 236 |
result.assert_success()
|
237 | 237 |
|
... | ... | @@ -250,7 +250,7 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
250 | 250 |
@pytest.mark.datafiles(DATA_DIR)
|
251 | 251 |
def test_artifact_too_large(cli, datafiles, tmpdir):
|
252 | 252 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
253 |
- element_path = os.path.join(project, 'elements')
|
|
253 |
+ element_path = 'elements'
|
|
254 | 254 |
|
255 | 255 |
# Create an artifact share (remote cache) in tmpdir/artifactshare
|
256 | 256 |
# Mock a file system with 5 MB total space
|
... | ... | @@ -263,12 +263,12 @@ def test_artifact_too_large(cli, datafiles, tmpdir): |
263 | 263 |
})
|
264 | 264 |
|
265 | 265 |
# Create and push a 3MB element
|
266 |
- create_element_size('small_element.bst', element_path, [], int(3e6))
|
|
266 |
+ create_element_size('small_element.bst', project, element_path, [], int(3e6))
|
|
267 | 267 |
result = cli.run(project=project, args=['build', 'small_element.bst'])
|
268 | 268 |
result.assert_success()
|
269 | 269 |
|
270 | 270 |
# Create and try to push a 6MB element.
|
271 |
- create_element_size('large_element.bst', element_path, [], int(6e6))
|
|
271 |
+ create_element_size('large_element.bst', project, element_path, [], int(6e6))
|
|
272 | 272 |
result = cli.run(project=project, args=['build', 'large_element.bst'])
|
273 | 273 |
result.assert_success()
|
274 | 274 |
|
... | ... | @@ -285,7 +285,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir): |
285 | 285 |
@pytest.mark.datafiles(DATA_DIR)
|
286 | 286 |
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
|
287 | 287 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
288 |
- element_path = os.path.join(project, 'elements')
|
|
288 |
+ element_path = 'elements'
|
|
289 | 289 |
|
290 | 290 |
# Create an artifact share (remote cache) in tmpdir/artifactshare
|
291 | 291 |
# Mock a file system with 12 MB free disk space
|
... | ... | @@ -298,11 +298,11 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir): |
298 | 298 |
})
|
299 | 299 |
|
300 | 300 |
# Create and build 2 elements, each of 5 MB.
|
301 |
- create_element_size('element1.bst', element_path, [], int(5e6))
|
|
301 |
+ create_element_size('element1.bst', project, element_path, [], int(5e6))
|
|
302 | 302 |
result = cli.run(project=project, args=['build', 'element1.bst'])
|
303 | 303 |
result.assert_success()
|
304 | 304 |
|
305 |
- create_element_size('element2.bst', element_path, [], int(5e6))
|
|
305 |
+ create_element_size('element2.bst', project, element_path, [], int(5e6))
|
|
306 | 306 |
result = cli.run(project=project, args=['build', 'element2.bst'])
|
307 | 307 |
result.assert_success()
|
308 | 308 |
|
... | ... | @@ -327,7 +327,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir): |
327 | 327 |
assert cli.get_element_state(project, 'element1.bst') == 'cached'
|
328 | 328 |
|
329 | 329 |
# Create and build the element3 (of 5 MB)
|
330 |
- create_element_size('element3.bst', element_path, [], int(5e6))
|
|
330 |
+ create_element_size('element3.bst', project, element_path, [], int(5e6))
|
|
331 | 331 |
result = cli.run(project=project, args=['build', 'element3.bst'])
|
332 | 332 |
result.assert_success()
|
333 | 333 |
|
1 | 1 |
import os
|
2 | 2 |
import pytest
|
3 | 3 |
|
4 |
-from buildstream._exceptions import ErrorDomain
|
|
4 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
5 | 5 |
from tests.testutils import cli
|
6 | 6 |
|
7 | 7 |
DATA_DIR = os.path.join(
|
... | ... | @@ -21,7 +21,7 @@ def test_missing_file(cli, tmpdir, datafiles): |
21 | 21 |
result = cli.run(project=project, args=[
|
22 | 22 |
'show', 'target.bst'
|
23 | 23 |
])
|
24 |
- result.assert_main_error(ErrorDomain.SOURCE, None)
|
|
24 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
|
|
25 | 25 |
|
26 | 26 |
|
27 | 27 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
1 | 1 |
import os
|
2 | 2 |
import pytest
|
3 | 3 |
|
4 |
-from buildstream._exceptions import ErrorDomain
|
|
4 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
5 | 5 |
from tests.testutils import cli
|
6 | 6 |
|
7 | 7 |
DATA_DIR = os.path.join(
|
... | ... | @@ -21,7 +21,7 @@ def test_missing_patch(cli, tmpdir, datafiles): |
21 | 21 |
result = cli.run(project=project, args=[
|
22 | 22 |
'show', 'target.bst'
|
23 | 23 |
])
|
24 |
- result.assert_main_error(ErrorDomain.SOURCE, 'patch-no-exist')
|
|
24 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
|
|
25 | 25 |
|
26 | 26 |
|
27 | 27 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
... | ... | @@ -35,7 +35,7 @@ def test_non_regular_file_patch(cli, tmpdir, datafiles): |
35 | 35 |
result = cli.run(project=project, args=[
|
36 | 36 |
'show', 'irregular.bst'
|
37 | 37 |
])
|
38 |
- result.assert_main_error(ErrorDomain.SOURCE, "patch-not-a-file")
|
|
38 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
|
|
39 | 39 |
|
40 | 40 |
|
41 | 41 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
... | ... | @@ -18,11 +18,12 @@ from buildstream import _yaml |
18 | 18 |
# Returns:
|
19 | 19 |
# Nothing (creates a .bst file of specified size)
|
20 | 20 |
#
|
21 |
-def create_element_size(name, path, dependencies, size):
|
|
22 |
- os.makedirs(path, exist_ok=True)
|
|
21 |
+def create_element_size(name, project_dir, elements_path, dependencies, size):
|
|
22 |
+ full_elements_path = os.path.join(project_dir, elements_path)
|
|
23 |
+ os.makedirs(full_elements_path, exist_ok=True)
|
|
23 | 24 |
|
24 | 25 |
# Create a file to be included in this element's artifact
|
25 |
- with open(os.path.join(path, name + '_data'), 'wb+') as f:
|
|
26 |
+ with open(os.path.join(project_dir, name + '_data'), 'wb+') as f:
|
|
26 | 27 |
f.write(os.urandom(size))
|
27 | 28 |
|
28 | 29 |
# Simplest case: We want this file (of specified size) to just
|
... | ... | @@ -32,9 +33,9 @@ def create_element_size(name, path, dependencies, size): |
32 | 33 |
'sources': [
|
33 | 34 |
{
|
34 | 35 |
'kind': 'local',
|
35 |
- 'path': os.path.join(path, name + '_data')
|
|
36 |
+ 'path': name + '_data'
|
|
36 | 37 |
}
|
37 | 38 |
],
|
38 | 39 |
'depends': dependencies
|
39 | 40 |
}
|
40 |
- _yaml.dump(element, os.path.join(path, name))
|
|
41 |
+ _yaml.dump(element, os.path.join(project_dir, elements_path, name))
|