Angelos Evripiotis pushed to branch aevri/contributing_gitlab at BuildStream / buildstream
Commits:
-
8e64ccef
by James Ennis at 2018-11-07T11:06:11Z
-
09ab676d
by James Ennis at 2018-11-07T11:06:11Z
-
f514124f
by James Ennis at 2018-11-07T11:06:11Z
-
029ba17d
by richardmaw-codethink at 2018-11-07T11:32:20Z
-
261e2cd3
by Jim MacArthur at 2018-11-07T12:02:43Z
-
8931e42c
by Jim MacArthur at 2018-11-07T12:26:06Z
-
6ccfab0b
by Valentin David at 2018-11-07T16:28:47Z
-
f24e20e9
by richardmaw-codethink at 2018-11-07T16:59:50Z
-
e28f668a
by Valentin David at 2018-11-08T09:34:11Z
-
83d15350
by Valentin David at 2018-11-08T10:01:39Z
-
9f0e12f1
by Benjamin Schubert at 2018-11-08T10:21:12Z
-
cf2e0059
by Benjamin Schubert at 2018-11-08T10:21:12Z
-
c51ba01b
by Benjamin Schubert at 2018-11-08T10:21:12Z
-
f116b9b7
by Benjamin Schubert at 2018-11-08T11:07:12Z
-
cc2e6ae5
by Valentin David at 2018-11-08T12:41:36Z
-
e578a89f
by Valentin David at 2018-11-08T13:11:10Z
-
c0a8bb66
by Angelos Evripiotis at 2018-11-08T15:49:16Z
-
f7231e90
by Angelos Evripiotis at 2018-11-08T15:49:16Z
-
f7643440
by Angelos Evripiotis at 2018-11-08T15:49:16Z
27 changed files:
- CONTRIBUTING.rst
- buildstream/_platform/linux.py
- buildstream/_site.py
- buildstream/data/projectconfig.yaml
- buildstream/element.py
- buildstream/sandbox/_sandboxdummy.py
- buildstream/scriptelement.py
- conftest.py
- doc/source/format_project.rst
- doc/source/using_config.rst
- doc/source/using_configuring_artifact_server.rst
- tests/cachekey/project/elements/build1.expected
- tests/cachekey/project/elements/build2.expected
- tests/cachekey/project/target.expected
- + tests/integration/project/elements/script/corruption-2.bst
- + tests/integration/project/elements/script/corruption-image.bst
- + tests/integration/project/elements/script/corruption-integration.bst
- + tests/integration/project/elements/script/corruption.bst
- + tests/integration/project/elements/script/marked-tmpdir.bst
- + tests/integration/project/elements/script/no-tmpdir.bst
- + tests/integration/project/elements/script/tmpdir.bst
- + tests/integration/project/files/canary
- tests/integration/script.py
- + tests/sandboxes/missing-dependencies/elements/base.bst
- + tests/sandboxes/missing-dependencies/files/base/bin/sh
- + tests/sandboxes/missing-dependencies/project.conf
- + tests/sandboxes/missing_dependencies.py
Changes:
| ... | ... | @@ -97,7 +97,13 @@ a new merge request. You can also `create a merge request for an existing branch |
| 97 | 97 |
You may open merge requests for the branches you create before you are ready
|
| 98 | 98 |
to have them reviewed and considered for inclusion if you like. Until your merge
|
| 99 | 99 |
request is ready for review, the merge request title must be prefixed with the
|
| 100 |
-``WIP:`` identifier.
|
|
| 100 |
+``WIP:`` identifier. GitLab `treats this specially
|
|
| 101 |
+<https://docs.gitlab.com/ee/user/project/merge_requests/work_in_progress_merge_requests.html>`_,
|
|
| 102 |
+which helps reviewers.
|
|
| 103 |
+ |
|
| 104 |
+Consider marking a merge request as WIP again if you are taking a while to
|
|
| 105 |
+address a review point. This signals that the next action is on you, and it
|
|
| 106 |
+won't appear in a reviewer's search for non-WIP merge requests to review.
|
|
| 101 | 107 |
|
| 102 | 108 |
|
| 103 | 109 |
Organized commits
|
| ... | ... | @@ -122,6 +128,12 @@ If a commit in your branch modifies behavior such that a test must also |
| 122 | 128 |
be changed to match the new behavior, then the tests should be updated
|
| 123 | 129 |
with the same commit, so that every commit passes its own tests.
|
| 124 | 130 |
|
| 131 |
+These principles apply whenever a branch is non-WIP. So for example, don't push
|
|
| 132 |
+'fixup!' commits when addressing review comments, instead amend the commits
|
|
| 133 |
+directly before pushing. GitLab has `good support
|
|
| 134 |
+<https://docs.gitlab.com/ee/user/project/merge_requests/versions.html>`_ for
|
|
| 135 |
+diffing between pushes, so 'fixup!' commits are not necessary for reviewers.
|
|
| 136 |
+ |
|
| 125 | 137 |
|
| 126 | 138 |
Commit messages
|
| 127 | 139 |
~~~~~~~~~~~~~~~
|
| ... | ... | @@ -144,6 +156,16 @@ number must be referenced in the commit message. |
| 144 | 156 |
|
| 145 | 157 |
Fixes #123
|
| 146 | 158 |
|
| 159 |
+Note that the 'why' of a change is as important as the 'what'.
|
|
| 160 |
+ |
|
| 161 |
+When reviewing this, folks can suggest better alternatives when they know the
|
|
| 162 |
+'why'. Perhaps there are other ways to avoid an error when things are not
|
|
| 163 |
+frobnicated.
|
|
| 164 |
+ |
|
| 165 |
+When folks modify this code, there may be uncertainty around whether the foos
|
|
| 166 |
+should always be frobnicated. The comments, the commit message, and issue #123
|
|
| 167 |
+should shed some light on that.
|
|
| 168 |
+ |
|
| 147 | 169 |
In the case that you have a commit which necessarily modifies multiple
|
| 148 | 170 |
components, then the summary line should still mention generally what
|
| 149 | 171 |
changed (if possible), followed by a colon and a brief summary.
|
| ... | ... | @@ -18,9 +18,9 @@ |
| 18 | 18 |
# Tristan Maat <tristan maat codethink co uk>
|
| 19 | 19 |
|
| 20 | 20 |
import os
|
| 21 |
+import shutil
|
|
| 21 | 22 |
import subprocess
|
| 22 | 23 |
|
| 23 |
-from .. import _site
|
|
| 24 | 24 |
from .. import utils
|
| 25 | 25 |
from ..sandbox import SandboxDummy
|
| 26 | 26 |
|
| ... | ... | @@ -37,12 +37,19 @@ class Linux(Platform): |
| 37 | 37 |
self._gid = os.getegid()
|
| 38 | 38 |
|
| 39 | 39 |
self._have_fuse = os.path.exists("/dev/fuse")
|
| 40 |
- self._bwrap_exists = _site.check_bwrap_version(0, 0, 0)
|
|
| 41 |
- self._have_good_bwrap = _site.check_bwrap_version(0, 1, 2)
|
|
| 42 | 40 |
|
| 43 |
- self._local_sandbox_available = self._have_fuse and self._have_good_bwrap
|
|
| 41 |
+ bwrap_version = self._get_bwrap_version()
|
|
| 44 | 42 |
|
| 45 |
- self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
|
|
| 43 |
+ if bwrap_version is None:
|
|
| 44 |
+ self._bwrap_exists = False
|
|
| 45 |
+ self._have_good_bwrap = False
|
|
| 46 |
+ self._die_with_parent_available = False
|
|
| 47 |
+ else:
|
|
| 48 |
+ self._bwrap_exists = True
|
|
| 49 |
+ self._have_good_bwrap = (0, 1, 2) <= bwrap_version
|
|
| 50 |
+ self._die_with_parent_available = (0, 1, 8) <= bwrap_version
|
|
| 51 |
+ |
|
| 52 |
+ self._local_sandbox_available = self._have_fuse and self._have_good_bwrap
|
|
| 46 | 53 |
|
| 47 | 54 |
if self._local_sandbox_available:
|
| 48 | 55 |
self._user_ns_available = self._check_user_ns_available()
|
| ... | ... | @@ -112,3 +119,21 @@ class Linux(Platform): |
| 112 | 119 |
output = ''
|
| 113 | 120 |
|
| 114 | 121 |
return output == 'root'
|
| 122 |
+ |
|
| 123 |
+ def _get_bwrap_version(self):
|
|
| 124 |
+ # Get the current bwrap version
|
|
| 125 |
+ #
|
|
| 126 |
+ # returns None if no bwrap was found
|
|
| 127 |
+ # otherwise returns a tuple of 3 int: major, minor, patch
|
|
| 128 |
+ bwrap_path = shutil.which('bwrap')
|
|
| 129 |
+ |
|
| 130 |
+ if not bwrap_path:
|
|
| 131 |
+ return None
|
|
| 132 |
+ |
|
| 133 |
+ cmd = [bwrap_path, "--version"]
|
|
| 134 |
+ try:
|
|
| 135 |
+ version = str(subprocess.check_output(cmd).split()[1], "utf-8")
|
|
| 136 |
+ except subprocess.CalledProcessError:
|
|
| 137 |
+ return None
|
|
| 138 |
+ |
|
| 139 |
+ return tuple(int(x) for x in version.split("."))
|
| ... | ... | @@ -18,8 +18,6 @@ |
| 18 | 18 |
# Tristan Van Berkom <tristan vanberkom codethink co uk>
|
| 19 | 19 |
|
| 20 | 20 |
import os
|
| 21 |
-import shutil
|
|
| 22 |
-import subprocess
|
|
| 23 | 21 |
|
| 24 | 22 |
#
|
| 25 | 23 |
# Private module declaring some info about where the buildstream
|
| ... | ... | @@ -46,44 +44,3 @@ build_all_template = os.path.join(root, 'data', 'build-all.sh.in') |
| 46 | 44 |
|
| 47 | 45 |
# Module building script template
|
| 48 | 46 |
build_module_template = os.path.join(root, 'data', 'build-module.sh.in')
|
| 49 |
- |
|
| 50 |
-# Cached bwrap version
|
|
| 51 |
-_bwrap_major = None
|
|
| 52 |
-_bwrap_minor = None
|
|
| 53 |
-_bwrap_patch = None
|
|
| 54 |
- |
|
| 55 |
- |
|
| 56 |
-# check_bwrap_version()
|
|
| 57 |
-#
|
|
| 58 |
-# Checks the version of installed bwrap against the requested version
|
|
| 59 |
-#
|
|
| 60 |
-# Args:
|
|
| 61 |
-# major (int): The required major version
|
|
| 62 |
-# minor (int): The required minor version
|
|
| 63 |
-# patch (int): The required patch level
|
|
| 64 |
-#
|
|
| 65 |
-# Returns:
|
|
| 66 |
-# (bool): Whether installed bwrap meets the requirements
|
|
| 67 |
-#
|
|
| 68 |
-def check_bwrap_version(major, minor, patch):
|
|
| 69 |
- # pylint: disable=global-statement
|
|
| 70 |
- |
|
| 71 |
- global _bwrap_major
|
|
| 72 |
- global _bwrap_minor
|
|
| 73 |
- global _bwrap_patch
|
|
| 74 |
- |
|
| 75 |
- # Parse bwrap version and save into cache, if not already cached
|
|
| 76 |
- if _bwrap_major is None:
|
|
| 77 |
- bwrap_path = shutil.which('bwrap')
|
|
| 78 |
- if not bwrap_path:
|
|
| 79 |
- return False
|
|
| 80 |
- cmd = [bwrap_path, "--version"]
|
|
| 81 |
- try:
|
|
| 82 |
- version = str(subprocess.check_output(cmd).split()[1], "utf-8")
|
|
| 83 |
- except subprocess.CalledProcessError:
|
|
| 84 |
- # Failure trying to run bubblewrap
|
|
| 85 |
- return False
|
|
| 86 |
- _bwrap_major, _bwrap_minor, _bwrap_patch = map(int, version.split("."))
|
|
| 87 |
- |
|
| 88 |
- # Check whether the installed version meets the requirements
|
|
| 89 |
- return (_bwrap_major, _bwrap_minor, _bwrap_patch) >= (major, minor, patch)
|
| ... | ... | @@ -62,6 +62,11 @@ variables: |
| 62 | 62 |
-o -name '*.cmxs' -o -name '*.node' ')' \
|
| 63 | 63 |
-exec sh -ec \
|
| 64 | 64 |
'read -n4 hdr <"$1" # check for elf header
|
| 65 |
+ case "$1" in
|
|
| 66 |
+ %{install-root}%{debugdir}/*)
|
|
| 67 |
+ exit 0
|
|
| 68 |
+ ;;
|
|
| 69 |
+ esac
|
|
| 65 | 70 |
if [ "$hdr" != "$(printf \\x7fELF)" ]; then
|
| 66 | 71 |
exit 0
|
| 67 | 72 |
fi
|
| ... | ... | @@ -1410,16 +1410,9 @@ class Element(Plugin): |
| 1410 | 1410 |
|
| 1411 | 1411 |
finally:
|
| 1412 | 1412 |
# Staging may produce directories with less than 'rwx' permissions
|
| 1413 |
- # for the owner, which will break tempfile, so we need to use chmod
|
|
| 1414 |
- # occasionally.
|
|
| 1415 |
- def make_dir_writable(fn, path, excinfo):
|
|
| 1416 |
- os.chmod(os.path.dirname(path), 0o777)
|
|
| 1417 |
- if os.path.isdir(path):
|
|
| 1418 |
- os.rmdir(path)
|
|
| 1419 |
- else:
|
|
| 1420 |
- os.remove(path)
|
|
| 1421 |
- shutil.rmtree(temp_staging_directory, onerror=make_dir_writable)
|
|
| 1422 |
- |
|
| 1413 |
+ # for the owner, which breaks tempfile. _force_rmtree will deal
|
|
| 1414 |
+ # with these.
|
|
| 1415 |
+ utils._force_rmtree(temp_staging_directory)
|
|
| 1423 | 1416 |
# Ensure deterministic mtime of sources at build time
|
| 1424 | 1417 |
vdirectory.set_deterministic_mtime()
|
| 1425 | 1418 |
# Ensure deterministic owners of sources at build time
|
| ... | ... | @@ -42,4 +42,5 @@ class SandboxDummy(Sandbox): |
| 42 | 42 |
"'{}'".format(command[0]),
|
| 43 | 43 |
reason='missing-command')
|
| 44 | 44 |
|
| 45 |
- raise SandboxError("This platform does not support local builds: {}".format(self._reason))
|
|
| 45 |
+ raise SandboxError("This platform does not support local builds: {}".format(self._reason),
|
|
| 46 |
+ reason="unavailable-local-sandbox")
|
| ... | ... | @@ -201,16 +201,20 @@ class ScriptElement(Element): |
| 201 | 201 |
# Setup environment
|
| 202 | 202 |
sandbox.set_environment(self.get_environment())
|
| 203 | 203 |
|
| 204 |
+ # Tell the sandbox to mount the install root
|
|
| 205 |
+ directories = {self.__install_root: False}
|
|
| 206 |
+ |
|
| 204 | 207 |
# Mark the artifact directories in the layout
|
| 205 | 208 |
for item in self.__layout:
|
| 206 |
- if item['destination'] != '/':
|
|
| 207 |
- if item['element']:
|
|
| 208 |
- sandbox.mark_directory(item['destination'], artifact=True)
|
|
| 209 |
- else:
|
|
| 210 |
- sandbox.mark_directory(item['destination'])
|
|
| 211 |
- |
|
| 212 |
- # Tell the sandbox to mount the install root
|
|
| 213 |
- sandbox.mark_directory(self.__install_root)
|
|
| 209 |
+ destination = item['destination']
|
|
| 210 |
+ was_artifact = directories.get(destination, False)
|
|
| 211 |
+ directories[destination] = item['element'] or was_artifact
|
|
| 212 |
+ |
|
| 213 |
+ for directory, artifact in directories.items():
|
|
| 214 |
+ # Root does not need to be marked as it is always mounted
|
|
| 215 |
+ # with artifact (unless explicitly marked non-artifact)
|
|
| 216 |
+ if directory != '/':
|
|
| 217 |
+ sandbox.mark_directory(directory, artifact=artifact)
|
|
| 214 | 218 |
|
| 215 | 219 |
def stage(self, sandbox):
|
| 216 | 220 |
|
| ... | ... | @@ -23,6 +23,8 @@ import shutil |
| 23 | 23 |
|
| 24 | 24 |
import pytest
|
| 25 | 25 |
|
| 26 |
+from buildstream._platform.platform import Platform
|
|
| 27 |
+ |
|
| 26 | 28 |
|
| 27 | 29 |
def pytest_addoption(parser):
|
| 28 | 30 |
parser.addoption('--integration', action='store_true', default=False,
|
| ... | ... | @@ -52,3 +54,8 @@ def integration_cache(request): |
| 52 | 54 |
shutil.rmtree(os.path.join(cache_dir, 'artifacts'))
|
| 53 | 55 |
except FileNotFoundError:
|
| 54 | 56 |
pass
|
| 57 |
+ |
|
| 58 |
+ |
|
| 59 |
+@pytest.fixture(autouse=True)
|
|
| 60 |
+def clean_platform_cache():
|
|
| 61 |
+ Platform._instance = None
|
| ... | ... | @@ -190,19 +190,34 @@ for more detail. |
| 190 | 190 |
Artifact server
|
| 191 | 191 |
~~~~~~~~~~~~~~~
|
| 192 | 192 |
If you have setup an :ref:`artifact server <artifacts>` for your
|
| 193 |
-project then it is convenient to configure this in your ``project.conf``
|
|
| 193 |
+project then it is convenient to configure the following in your ``project.conf``
|
|
| 194 | 194 |
so that users need not have any additional configuration to communicate
|
| 195 | 195 |
with an artifact share.
|
| 196 | 196 |
|
| 197 | 197 |
.. code:: yaml
|
| 198 | 198 |
|
| 199 |
+ #
|
|
| 200 |
+ # Artifacts
|
|
| 201 |
+ #
|
|
| 199 | 202 |
artifacts:
|
| 203 |
+ # A remote cache from which to download prebuilt artifacts
|
|
| 204 |
+ - url: https://foo.com/artifacts:11001
|
|
| 205 |
+ server.cert: server.crt
|
|
| 206 |
+ # A remote cache from which to upload/download built/prebuilt artifacts
|
|
| 207 |
+ - url: https://foo.com/artifacts:11002
|
|
| 208 |
+ server-cert: server.crt
|
|
| 209 |
+ client-cert: client.crt
|
|
| 210 |
+ client-key: client.key
|
|
| 200 | 211 |
|
| 201 |
- # A url from which to download prebuilt artifacts
|
|
| 202 |
- url: https://foo.com/artifacts
|
|
| 212 |
+.. note::
|
|
| 213 |
+ |
|
| 214 |
+ You can also specify a list of different caches here; earlier entries in the
|
|
| 215 |
+ list will have higher priority than later ones.
|
|
| 216 |
+ |
|
| 217 |
+The use of ports are required to distinguish between pull only access and
|
|
| 218 |
+push/pull access. For information regarding the server/client certificates
|
|
| 219 |
+and keys, please see: :ref:`Key pair for the server <server_authentication>`.
|
|
| 203 | 220 |
|
| 204 |
-You can also specify a list of caches here; earlier entries in the list
|
|
| 205 |
-will have higher priority than later ones.
|
|
| 206 | 221 |
|
| 207 | 222 |
Remote execution
|
| 208 | 223 |
~~~~~~~~~~~~~~~~
|
| ... | ... | @@ -32,38 +32,75 @@ the supported configurations on a project wide basis are listed here. |
| 32 | 32 |
|
| 33 | 33 |
Artifact server
|
| 34 | 34 |
~~~~~~~~~~~~~~~
|
| 35 |
-The project you build will often specify a :ref:`remote artifact cache
|
|
| 36 |
-<artifacts>` already, but you may want to specify extra caches. There are two
|
|
| 37 |
-ways to do this. You can add one or more global caches:
|
|
| 35 |
+Although project's often specify a :ref:`remote artifact cache <artifacts>` in
|
|
| 36 |
+their ``project.conf``, you may also want to specify extra caches.
|
|
| 38 | 37 |
|
| 39 |
-**Example**
|
|
| 38 |
+Assuming that your host/server is reachable on the internet as ``artifacts.com``
|
|
| 39 |
+(for example), there are two ways to declare remote caches in your user
|
|
| 40 |
+configuration:
|
|
| 41 |
+ |
|
| 42 |
+1. Adding global caches:
|
|
| 40 | 43 |
|
| 41 | 44 |
.. code:: yaml
|
| 42 | 45 |
|
| 46 |
+ #
|
|
| 47 |
+ # Artifacts
|
|
| 48 |
+ #
|
|
| 43 | 49 |
artifacts:
|
| 44 |
- url: https://artifacts.com/artifacts
|
|
| 50 |
+ # Add a cache to pull from
|
|
| 51 |
+ - url: https://artifacts.com/artifacts:11001
|
|
| 52 |
+ server-cert: server.crt
|
|
| 53 |
+ # Add a cache to push/pull to/from
|
|
| 54 |
+ - url: https://artifacts.com/artifacts:11002
|
|
| 55 |
+ server-cert: server.crt
|
|
| 56 |
+ client-cert: client.crt
|
|
| 57 |
+ client-key: client.key
|
|
| 58 |
+ push: true
|
|
| 59 |
+ # Add another cache to pull from
|
|
| 60 |
+ - url: https://anothercache.com/artifacts:8080
|
|
| 61 |
+ server-cert: another_server.crt
|
|
| 62 |
+ |
|
| 63 |
+.. note::
|
|
| 45 | 64 |
|
| 46 |
-Caches listed there will be considered lower priority than those specified
|
|
| 47 |
-by the project configuration.
|
|
| 65 |
+ Caches declared here will be used by **all** BuildStream project's on the user's
|
|
| 66 |
+ machine and are considered a lower priority than those specified in the project
|
|
| 67 |
+ configuration.
|
|
| 48 | 68 |
|
| 49 |
-You can also add project-specific caches:
|
|
| 50 | 69 |
|
| 51 |
-**Example**
|
|
| 70 |
+2. Specifying caches for a specific project within the user configuration:
|
|
| 52 | 71 |
|
| 53 | 72 |
.. code:: yaml
|
| 54 | 73 |
|
| 55 | 74 |
projects:
|
| 56 | 75 |
project-name:
|
| 57 | 76 |
artifacts:
|
| 58 |
- - url: https://artifacts.com/artifacts1
|
|
| 59 |
- - url: ssh://user artifacts com/artifacts2
|
|
| 77 |
+ # Add a cache to pull from
|
|
| 78 |
+ - url: https://artifacts.com/artifacts:11001
|
|
| 79 |
+ server-cert: server.crt
|
|
| 80 |
+ # Add a cache to push/pull to/from
|
|
| 81 |
+ - url: https://artifacts.com/artifacts:11002
|
|
| 82 |
+ server-cert: server.crt
|
|
| 83 |
+ client-cert: client.crt
|
|
| 84 |
+ client-key: client.key
|
|
| 60 | 85 |
push: true
|
| 86 |
+ # Add another cache to pull from
|
|
| 87 |
+ - url: https://ourprojectcache.com/artifacts:8080
|
|
| 88 |
+ server-cert: project_server.crt
|
|
| 89 |
+ |
|
| 90 |
+ |
|
| 91 |
+.. note::
|
|
| 92 |
+ |
|
| 93 |
+ Caches listed here will be considered a higher priority than those specified
|
|
| 94 |
+ by the project. Furthermore, for a given list of URLs, earlier entries will
|
|
| 95 |
+ have higher priority.
|
|
| 96 |
+ |
|
| 97 |
+ |
|
| 98 |
+Notice that the use of different ports for the same server distinguishes between
|
|
| 99 |
+pull only access and push/pull access. For information regarding this and the
|
|
| 100 |
+server/client certificates and keys, please see:
|
|
| 101 |
+:ref:`Key pair for the server <server_authentication>`.
|
|
| 61 | 102 |
|
| 62 |
-Caches listed here will be considered higher priority than those specified
|
|
| 63 |
-by the project.
|
|
| 64 | 103 |
|
| 65 |
-If you give a list of URLs, earlier entries in the list will have higher
|
|
| 66 |
-priority than later ones.
|
|
| 67 | 104 |
|
| 68 | 105 |
Strict build plan
|
| 69 | 106 |
~~~~~~~~~~~~~~~~~
|
| ... | ... | @@ -98,6 +98,8 @@ Command reference |
| 98 | 98 |
:prog: bst-artifact-server
|
| 99 | 99 |
|
| 100 | 100 |
|
| 101 |
+.. _server_authentication:
|
|
| 102 |
+ |
|
| 101 | 103 |
Key pair for the server
|
| 102 | 104 |
~~~~~~~~~~~~~~~~~~~~~~~
|
| 103 | 105 |
|
| ... | ... | @@ -237,52 +239,12 @@ We can then check if the services are successfully running with: |
| 237 | 239 |
For more information on systemd services see:
|
| 238 | 240 |
`Creating Systemd Service Files <https://www.devdungeon.com/content/creating-systemd-service-files>`_.
|
| 239 | 241 |
|
| 240 |
-User configuration
|
|
| 241 |
-~~~~~~~~~~~~~~~~~~
|
|
| 242 |
-The user configuration for artifacts is documented with the rest
|
|
| 243 |
-of the :ref:`user configuration documentation <user_config>`.
|
|
| 244 |
- |
|
| 245 |
-Note that for self-signed certificates, the public key fields are mandatory.
|
|
| 246 |
- |
|
| 247 |
-Assuming you have the same setup used in this document, and that your
|
|
| 248 |
-host is reachable on the internet as ``artifacts.com`` (for example),
|
|
| 249 |
-then a user can use the following user configuration:
|
|
| 250 |
- |
|
| 251 |
-Pull-only:
|
|
| 252 |
- |
|
| 253 |
-.. code:: yaml
|
|
| 254 |
- |
|
| 255 |
- #
|
|
| 256 |
- # Artifacts
|
|
| 257 |
- #
|
|
| 258 |
- artifacts:
|
|
| 259 |
- |
|
| 260 |
- url: https://artifacts.com:11001
|
|
| 261 |
- |
|
| 262 |
- # Optional server certificate if not trusted by system root certificates
|
|
| 263 |
- server-cert: server.crt
|
|
| 242 |
+Declaring remote artifact caches
|
|
| 243 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
| 244 |
+Remote artifact caches can be declared within either:
|
|
| 264 | 245 |
|
| 265 |
-Pull and push:
|
|
| 266 |
- |
|
| 267 |
-.. code:: yaml
|
|
| 268 |
- |
|
| 269 |
- #
|
|
| 270 |
- # Artifacts
|
|
| 271 |
- #
|
|
| 272 |
- artifacts:
|
|
| 273 |
- |
|
| 274 |
- url: https://artifacts.com:11002
|
|
| 275 |
- |
|
| 276 |
- # Optional server certificate if not trusted by system root certificates
|
|
| 277 |
- server-cert: server.crt
|
|
| 278 |
- |
|
| 279 |
- # Optional client key pair for authentication
|
|
| 280 |
- client-key: client.key
|
|
| 281 |
- client-cert: client.crt
|
|
| 282 |
- |
|
| 283 |
- push: true
|
|
| 284 |
- |
|
| 285 |
-.. note::
|
|
| 246 |
+1. The :ref:`project configuration <project_essentials_artifacts>`, or
|
|
| 247 |
+2. The :ref:`user configuration <config_artifacts>`.
|
|
| 286 | 248 |
|
| 287 |
- Equivalent statements can be delcared in a project's configuration file
|
|
| 288 |
- (the ``project.conf``).
|
|
| 249 |
+Please follow the above links to see examples showing how we declare remote
|
|
| 250 |
+caches in both the project configuration and the user configuration, respectively.
|
| 1 |
-dd5e29baefb84f68eb4abac3a1befc332077ec4c97bb2572e57f3ca98ba46707
|
|
| \ No newline at end of file | ||
| 1 |
+ce0ddf7126d45d14f5ec1a525337c39ec8ddbbe4b0ec2ef51bae777619ed39bb
|
|
| \ No newline at end of file |
| 1 |
-99d80454cce44645597c885800edf0bf254d1c3606d869f2ccdd5043ec7685cb
|
|
| \ No newline at end of file | ||
| 1 |
+5e2a48dbeae43f6bab84071dbd02345a3aa32a473c189645ab26f3d5d6cfe547
|
|
| \ No newline at end of file |
| 1 |
-29a1252ec30dd6ae73c772381f0eb417e3874c75710d08be819f5715dcaa942b
|
|
| \ No newline at end of file | ||
| 1 |
+125d9e7dcf4f49e5f80d85b7f144b43ed43186064afc2e596e57f26cce679cf5
|
|
| \ No newline at end of file |
| 1 |
+kind: script
|
|
| 2 |
+ |
|
| 3 |
+depends:
|
|
| 4 |
+- filename: base.bst
|
|
| 5 |
+ type: build
|
|
| 6 |
+- filename: script/corruption-image.bst
|
|
| 7 |
+ type: build
|
|
| 8 |
+ |
|
| 9 |
+config:
|
|
| 10 |
+ commands:
|
|
| 11 |
+ - echo smashed >>/canary
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+- kind: local
|
|
| 4 |
+ path: files/canary
|
| 1 |
+kind: stack
|
|
| 2 |
+ |
|
| 3 |
+public:
|
|
| 4 |
+ bst:
|
|
| 5 |
+ integration-commands:
|
|
| 6 |
+ - echo smashed >>/canary
|
|
| 7 |
+ |
| 1 |
+kind: script
|
|
| 2 |
+ |
|
| 3 |
+depends:
|
|
| 4 |
+- filename: base.bst
|
|
| 5 |
+ type: build
|
|
| 6 |
+- filename: script/corruption-image.bst
|
|
| 7 |
+ type: build
|
|
| 8 |
+- filename: script/corruption-integration.bst
|
|
| 9 |
+ type: build
|
|
| 10 |
+ |
|
| 11 |
+variables:
|
|
| 12 |
+ install-root: "/"
|
|
| 13 |
+ |
|
| 14 |
+config:
|
|
| 15 |
+ layout:
|
|
| 16 |
+ - element: base.bst
|
|
| 17 |
+ destination: "/"
|
|
| 18 |
+ - element: script/corruption-image.bst
|
|
| 19 |
+ destination: "/"
|
|
| 20 |
+ - element: script/corruption-integration.bst
|
|
| 21 |
+ destination: "/"
|
| 1 |
+kind: compose
|
|
| 2 |
+ |
|
| 3 |
+depends:
|
|
| 4 |
+- filename: base.bst
|
|
| 5 |
+ type: build
|
|
| 6 |
+ |
|
| 7 |
+public:
|
|
| 8 |
+ bst:
|
|
| 9 |
+ split-rules:
|
|
| 10 |
+ remove:
|
|
| 11 |
+ - "/tmp/**"
|
|
| 12 |
+ - "/tmp"
|
| 1 |
+kind: filter
|
|
| 2 |
+ |
|
| 3 |
+depends:
|
|
| 4 |
+- filename: script/marked-tmpdir.bst
|
|
| 5 |
+ type: build
|
|
| 6 |
+ |
|
| 7 |
+config:
|
|
| 8 |
+ exclude:
|
|
| 9 |
+ - remove
|
|
| 10 |
+ include-orphans: True
|
|
| 11 |
+ |
|
| 12 |
+ |
| 1 |
+kind: script
|
|
| 2 |
+ |
|
| 3 |
+depends:
|
|
| 4 |
+- filename: script/no-tmpdir.bst
|
|
| 5 |
+ type: build
|
|
| 6 |
+ |
|
| 7 |
+config:
|
|
| 8 |
+ commands:
|
|
| 9 |
+ - |
|
|
| 10 |
+ mkdir -p /tmp/blah
|
| 1 |
+alive
|
| ... | ... | @@ -155,3 +155,70 @@ def test_script_layout(cli, tmpdir, datafiles): |
| 155 | 155 |
text = f.read()
|
| 156 | 156 |
|
| 157 | 157 |
assert text == "Hi\n"
|
| 158 |
+ |
|
| 159 |
+ |
|
| 160 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 161 |
+def test_regression_cache_corruption(cli, tmpdir, datafiles):
|
|
| 162 |
+ project = str(datafiles)
|
|
| 163 |
+ checkout_original = os.path.join(cli.directory, 'checkout-original')
|
|
| 164 |
+ checkout_after = os.path.join(cli.directory, 'checkout-after')
|
|
| 165 |
+ element_name = 'script/corruption.bst'
|
|
| 166 |
+ canary_element_name = 'script/corruption-image.bst'
|
|
| 167 |
+ |
|
| 168 |
+ res = cli.run(project=project, args=['build', canary_element_name])
|
|
| 169 |
+ assert res.exit_code == 0
|
|
| 170 |
+ |
|
| 171 |
+ res = cli.run(project=project, args=['checkout', canary_element_name,
|
|
| 172 |
+ checkout_original])
|
|
| 173 |
+ assert res.exit_code == 0
|
|
| 174 |
+ |
|
| 175 |
+ with open(os.path.join(checkout_original, 'canary')) as f:
|
|
| 176 |
+ assert f.read() == 'alive\n'
|
|
| 177 |
+ |
|
| 178 |
+ res = cli.run(project=project, args=['build', element_name])
|
|
| 179 |
+ assert res.exit_code == 0
|
|
| 180 |
+ |
|
| 181 |
+ res = cli.run(project=project, args=['checkout', canary_element_name,
|
|
| 182 |
+ checkout_after])
|
|
| 183 |
+ assert res.exit_code == 0
|
|
| 184 |
+ |
|
| 185 |
+ with open(os.path.join(checkout_after, 'canary')) as f:
|
|
| 186 |
+ assert f.read() == 'alive\n'
|
|
| 187 |
+ |
|
| 188 |
+ |
|
| 189 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 190 |
+def test_regression_tmpdir(cli, tmpdir, datafiles):
|
|
| 191 |
+ project = str(datafiles)
|
|
| 192 |
+ element_name = 'script/tmpdir.bst'
|
|
| 193 |
+ |
|
| 194 |
+ res = cli.run(project=project, args=['build', element_name])
|
|
| 195 |
+ assert res.exit_code == 0
|
|
| 196 |
+ |
|
| 197 |
+ |
|
| 198 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 199 |
+def test_regression_cache_corruption_2(cli, tmpdir, datafiles):
|
|
| 200 |
+ project = str(datafiles)
|
|
| 201 |
+ checkout_original = os.path.join(cli.directory, 'checkout-original')
|
|
| 202 |
+ checkout_after = os.path.join(cli.directory, 'checkout-after')
|
|
| 203 |
+ element_name = 'script/corruption-2.bst'
|
|
| 204 |
+ canary_element_name = 'script/corruption-image.bst'
|
|
| 205 |
+ |
|
| 206 |
+ res = cli.run(project=project, args=['build', canary_element_name])
|
|
| 207 |
+ assert res.exit_code == 0
|
|
| 208 |
+ |
|
| 209 |
+ res = cli.run(project=project, args=['checkout', canary_element_name,
|
|
| 210 |
+ checkout_original])
|
|
| 211 |
+ assert res.exit_code == 0
|
|
| 212 |
+ |
|
| 213 |
+ with open(os.path.join(checkout_original, 'canary')) as f:
|
|
| 214 |
+ assert f.read() == 'alive\n'
|
|
| 215 |
+ |
|
| 216 |
+ res = cli.run(project=project, args=['build', element_name])
|
|
| 217 |
+ assert res.exit_code == 0
|
|
| 218 |
+ |
|
| 219 |
+ res = cli.run(project=project, args=['checkout', canary_element_name,
|
|
| 220 |
+ checkout_after])
|
|
| 221 |
+ assert res.exit_code == 0
|
|
| 222 |
+ |
|
| 223 |
+ with open(os.path.join(checkout_after, 'canary')) as f:
|
|
| 224 |
+ assert f.read() == 'alive\n'
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+- kind: local
|
|
| 4 |
+ path: files/base/
|
| 1 |
+# This is the original bash
|
| 1 |
+# Project config for missing dependencies test
|
|
| 2 |
+name: test
|
|
| 3 |
+ |
|
| 4 |
+element-path: elements
|
| 1 |
+import os
|
|
| 2 |
+import pytest
|
|
| 3 |
+from tests.testutils import cli
|
|
| 4 |
+from tests.testutils.site import IS_LINUX
|
|
| 5 |
+ |
|
| 6 |
+from buildstream import _yaml
|
|
| 7 |
+from buildstream._exceptions import ErrorDomain
|
|
| 8 |
+ |
|
| 9 |
+ |
|
| 10 |
+# Project directory
|
|
| 11 |
+DATA_DIR = os.path.join(
|
|
| 12 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
| 13 |
+ "missing-dependencies",
|
|
| 14 |
+)
|
|
| 15 |
+ |
|
| 16 |
+ |
|
| 17 |
+@pytest.mark.skipif(not IS_LINUX, reason='Only available on Linux')
|
|
| 18 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 19 |
+def test_missing_brwap_has_nice_error_message(cli, datafiles):
|
|
| 20 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 21 |
+ element_path = os.path.join(project, 'elements', 'element.bst')
|
|
| 22 |
+ |
|
| 23 |
+ # Write out our test target
|
|
| 24 |
+ element = {
|
|
| 25 |
+ 'kind': 'script',
|
|
| 26 |
+ 'depends': [
|
|
| 27 |
+ {
|
|
| 28 |
+ 'filename': 'base.bst',
|
|
| 29 |
+ 'type': 'build',
|
|
| 30 |
+ },
|
|
| 31 |
+ ],
|
|
| 32 |
+ 'config': {
|
|
| 33 |
+ 'commands': [
|
|
| 34 |
+ 'false',
|
|
| 35 |
+ ],
|
|
| 36 |
+ },
|
|
| 37 |
+ }
|
|
| 38 |
+ _yaml.dump(element, element_path)
|
|
| 39 |
+ |
|
| 40 |
+ # Build without access to host tools, this should fail with a nice error
|
|
| 41 |
+ result = cli.run(
|
|
| 42 |
+ project=project, args=['build', 'element.bst'], env={'PATH': ''})
|
|
| 43 |
+ result.assert_task_error(ErrorDomain.SANDBOX, 'unavailable-local-sandbox')
|
|
| 44 |
+ assert "not found" in result.stderr
|
|
| 45 |
+ |
|
| 46 |
+ |
|
| 47 |
+@pytest.mark.skipif(not IS_LINUX, reason='Only available on Linux')
|
|
| 48 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 49 |
+def test_old_brwap_has_nice_error_message(cli, datafiles, tmp_path):
|
|
| 50 |
+ bwrap = tmp_path.joinpath('bin/bwrap')
|
|
| 51 |
+ bwrap.parent.mkdir()
|
|
| 52 |
+ with bwrap.open('w') as fp:
|
|
| 53 |
+ fp.write('''
|
|
| 54 |
+ #!/bin/sh
|
|
| 55 |
+ echo bubblewrap 0.0.1
|
|
| 56 |
+ '''.strip())
|
|
| 57 |
+ |
|
| 58 |
+ bwrap.chmod(0o755)
|
|
| 59 |
+ |
|
| 60 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 61 |
+ element_path = os.path.join(project, 'elements', 'element3.bst')
|
|
| 62 |
+ |
|
| 63 |
+ # Write out our test target
|
|
| 64 |
+ element = {
|
|
| 65 |
+ 'kind': 'script',
|
|
| 66 |
+ 'depends': [
|
|
| 67 |
+ {
|
|
| 68 |
+ 'filename': 'base.bst',
|
|
| 69 |
+ 'type': 'build',
|
|
| 70 |
+ },
|
|
| 71 |
+ ],
|
|
| 72 |
+ 'config': {
|
|
| 73 |
+ 'commands': [
|
|
| 74 |
+ 'false',
|
|
| 75 |
+ ],
|
|
| 76 |
+ },
|
|
| 77 |
+ }
|
|
| 78 |
+ _yaml.dump(element, element_path)
|
|
| 79 |
+ |
|
| 80 |
+ # Build without access to host tools, this should fail with a nice error
|
|
| 81 |
+ result = cli.run(
|
|
| 82 |
+ project=project,
|
|
| 83 |
+ args=['--debug', '--verbose', 'build', 'element3.bst'],
|
|
| 84 |
+ env={'PATH': str(tmp_path.joinpath('bin'))})
|
|
| 85 |
+ result.assert_task_error(ErrorDomain.SANDBOX, 'unavailable-local-sandbox')
|
|
| 86 |
+ assert "too old" in result.stderr
|
