Valentin David pushed to branch valentindavid/remote_execution_configuration at BuildStream / buildstream
Commits:
-
bb9df2a1
by Chandan Singh at 2019-01-02T11:59:35Z
-
43eaf6ee
by Chandan Singh at 2019-01-02T13:42:10Z
-
3b5c8a28
by Tristan Maat at 2019-01-02T15:57:13Z
-
bd60e8a9
by Javier Jardón at 2019-01-02T16:59:41Z
-
32c47d1c
by Tristan Maat at 2019-01-02T17:32:09Z
-
1cbd9a73
by Chandan Singh at 2019-01-02T19:39:58Z
-
6b0cb5f3
by Chandan Singh at 2019-01-02T19:41:21Z
-
9d2d1d4f
by Chandan Singh at 2019-01-03T03:31:52Z
-
8ae04283
by Tristan Van Berkom at 2019-01-03T03:31:52Z
-
e7e2a5c1
by Chandan Singh at 2019-01-03T03:31:52Z
-
afa0a369
by Chandan Singh at 2019-01-03T03:31:52Z
-
3fae3446
by Chandan Singh at 2019-01-03T03:44:17Z
-
1f7bc655
by Chandan Singh at 2019-01-03T03:44:17Z
-
669b55b0
by Tristan Van Berkom at 2019-01-03T15:47:27Z
-
ca547f19
by Chandan Singh at 2019-01-03T19:34:49Z
-
01c3761c
by Chandan Singh at 2019-01-03T20:03:32Z
-
7850e4e2
by Tristan Van Berkom at 2019-01-03T20:55:00Z
-
3c9d592d
by Tristan Van Berkom at 2019-01-03T20:55:00Z
-
feba48e7
by Tristan Van Berkom at 2019-01-03T20:55:00Z
-
9caa0ea0
by Tristan Van Berkom at 2019-01-03T20:55:00Z
-
91c8b634
by Tristan Van Berkom at 2019-01-03T21:04:49Z
-
892bf99e
by Tristan Van Berkom at 2019-01-03T21:31:04Z
-
7c681c42
by Tristan Van Berkom at 2019-01-03T21:32:40Z
-
d6689ed2
by Tristan Van Berkom at 2019-01-03T22:09:02Z
-
e15278d7
by Thomas Coldrick at 2019-01-03T22:14:01Z
-
a85da591
by Tristan Van Berkom at 2019-01-03T22:36:16Z
-
cf0e7d26
by Jürg Billeter at 2019-01-04T07:07:59Z
-
0751fc8f
by Jürg Billeter at 2019-01-04T11:27:58Z
-
d0a30f89
by Valentin David at 2019-01-04T14:15:48Z
-
79ef60be
by Valentin David at 2019-01-04T14:15:48Z
-
65cb17a1
by Valentin David at 2019-01-04T14:15:48Z
29 changed files:
- .gitignore
- .gitlab-ci.yml
- CONTRIBUTING.rst
- MANIFEST.in
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/_frontend/complete.py
- buildstream/_ostree.py
- buildstream/_project.py
- buildstream/_signals.py
- buildstream/_yaml.py
- buildstream/element.py
- buildstream/sandbox/_sandboxremote.py
- doc/Makefile
- doc/source/format_project.rst
- doc/source/using_config.rst
- setup.cfg
- setup.py
- tests/frontend/buildcheckout.py
- − tests/frontend/project/elements/invalid-chars|<>-in-name.bst
- tests/testutils/http_server.py
- tests/testutils/site.py
- dev-requirements.txt → tools/dev-requirements.in
- + tools/dev-requirements.txt
- + tools/plugin-requirements.in
- + tools/plugin-requirements.txt
- + tools/requirements.in
- + tools/requirements.txt
- + tox.ini
Changes:
| ... | ... | @@ -17,6 +17,7 @@ tmp |
| 17 | 17 |
.cache
|
| 18 | 18 |
.pytest_cache/
|
| 19 | 19 |
*.bst/
|
| 20 |
+.tox
|
|
| 20 | 21 |
|
| 21 | 22 |
# Pycache, in case buildstream is ran directly from within the source
|
| 22 | 23 |
# tree
|
| 1 |
-image: buildstream/testsuite-debian:9-master-123-7ce6581b
|
|
| 1 |
+image: buildstream/testsuite-debian:9-5da27168-32c47d1c
|
|
| 2 | 2 |
|
| 3 | 3 |
cache:
|
| 4 | 4 |
key: "$CI_JOB_NAME-"
|
| ... | ... | @@ -13,7 +13,7 @@ stages: |
| 13 | 13 |
variables:
|
| 14 | 14 |
PYTEST_ADDOPTS: "--color=yes"
|
| 15 | 15 |
INTEGRATION_CACHE: "${CI_PROJECT_DIR}/cache/integration-cache"
|
| 16 |
- TEST_COMMAND: 'python3 setup.py test --index-url invalid://uri --addopts --integration'
|
|
| 16 |
+ TEST_COMMAND: "tox -- --color=yes --integration"
|
|
| 17 | 17 |
|
| 18 | 18 |
#####################################################
|
| 19 | 19 |
# Prepare stage #
|
| ... | ... | @@ -68,10 +68,6 @@ source_dist: |
| 68 | 68 |
- mount
|
| 69 | 69 |
- df -h
|
| 70 | 70 |
|
| 71 |
- # Unpack
|
|
| 72 |
- - cd dist && ./unpack.sh
|
|
| 73 |
- - cd buildstream
|
|
| 74 |
- |
|
| 75 | 71 |
script:
|
| 76 | 72 |
- useradd -Um buildstream
|
| 77 | 73 |
- chown -R buildstream:buildstream .
|
| ... | ... | @@ -83,7 +79,7 @@ source_dist: |
| 83 | 79 |
after_script:
|
| 84 | 80 |
# Collect our reports
|
| 85 | 81 |
- mkdir -p ${COVERAGE_DIR}
|
| 86 |
- - cp dist/buildstream/.coverage ${COVERAGE_DIR}/coverage."${CI_JOB_NAME}"
|
|
| 82 |
+ - cp .coverage ${COVERAGE_DIR}/coverage."${CI_JOB_NAME}"
|
|
| 87 | 83 |
except:
|
| 88 | 84 |
- schedules
|
| 89 | 85 |
artifacts:
|
| ... | ... | @@ -91,23 +87,23 @@ source_dist: |
| 91 | 87 |
- ${COVERAGE_DIR}
|
| 92 | 88 |
|
| 93 | 89 |
tests-debian-9:
|
| 94 |
- image: buildstream/testsuite-debian:9-master-123-7ce6581b
|
|
| 90 |
+ image: buildstream/testsuite-debian:9-5da27168-32c47d1c
|
|
| 95 | 91 |
<<: *tests
|
| 96 | 92 |
|
| 97 | 93 |
tests-fedora-27:
|
| 98 |
- image: buildstream/testsuite-fedora:27-master-123-7ce6581b
|
|
| 94 |
+ image: buildstream/testsuite-fedora:27-5da27168-32c47d1c
|
|
| 99 | 95 |
<<: *tests
|
| 100 | 96 |
|
| 101 | 97 |
tests-fedora-28:
|
| 102 |
- image: buildstream/testsuite-fedora:28-master-123-7ce6581b
|
|
| 98 |
+ image: buildstream/testsuite-fedora:28-5da27168-32c47d1c
|
|
| 103 | 99 |
<<: *tests
|
| 104 | 100 |
|
| 105 | 101 |
tests-ubuntu-18.04:
|
| 106 |
- image: buildstream/testsuite-ubuntu:18.04-master-123-7ce6581b
|
|
| 102 |
+ image: buildstream/testsuite-ubuntu:18.04-5da27168-32c47d1c
|
|
| 107 | 103 |
<<: *tests
|
| 108 | 104 |
|
| 109 | 105 |
overnight-fedora-28-aarch64:
|
| 110 |
- image: buildstream/testsuite-fedora:aarch64-28-master-123-7ce6581b
|
|
| 106 |
+ image: buildstream/testsuite-fedora:aarch64-28-06bab030-32a101f6
|
|
| 111 | 107 |
tags:
|
| 112 | 108 |
- aarch64
|
| 113 | 109 |
<<: *tests
|
| ... | ... | @@ -120,7 +116,7 @@ overnight-fedora-28-aarch64: |
| 120 | 116 |
tests-unix:
|
| 121 | 117 |
# Use fedora here, to a) run a test on fedora and b) ensure that we
|
| 122 | 118 |
# can get rid of ostree - this is not possible with debian-8
|
| 123 |
- image: buildstream/testsuite-fedora:27-master-123-7ce6581b
|
|
| 119 |
+ image: buildstream/testsuite-fedora:27-5da27168-32c47d1c
|
|
| 124 | 120 |
<<: *tests
|
| 125 | 121 |
variables:
|
| 126 | 122 |
BST_FORCE_BACKEND: "unix"
|
| ... | ... | @@ -140,7 +136,7 @@ tests-unix: |
| 140 | 136 |
|
| 141 | 137 |
tests-fedora-missing-deps:
|
| 142 | 138 |
# Ensure that tests behave nicely while missing bwrap and ostree
|
| 143 |
- image: buildstream/testsuite-fedora:28-master-123-7ce6581b
|
|
| 139 |
+ image: buildstream/testsuite-fedora:28-5da27168-32c47d1c
|
|
| 144 | 140 |
<<: *tests
|
| 145 | 141 |
|
| 146 | 142 |
script:
|
| ... | ... | @@ -155,23 +151,28 @@ tests-fedora-missing-deps: |
| 155 | 151 |
|
| 156 | 152 |
- ${TEST_COMMAND}
|
| 157 | 153 |
|
| 154 |
+# Lint separately from testing
|
|
| 155 |
+lint:
|
|
| 156 |
+ stage: test
|
|
| 157 |
+ |
|
| 158 |
+ before_script:
|
|
| 159 |
+ # Diagnostics
|
|
| 160 |
+ - python3 --version
|
|
| 161 |
+ |
|
| 162 |
+ script:
|
|
| 163 |
+ - tox -e lint
|
|
| 164 |
+ except:
|
|
| 165 |
+ - schedules
|
|
| 158 | 166 |
|
| 159 | 167 |
# Automatically build documentation for every commit, we want to know
|
| 160 | 168 |
# if building documentation fails even if we're not deploying it.
|
| 161 |
-# Note: We still do not enforce a consistent installation of python3-sphinx,
|
|
| 162 |
-# as it will significantly grow the backing image.
|
|
| 163 | 169 |
docs:
|
| 164 | 170 |
stage: test
|
| 171 |
+ variables:
|
|
| 172 |
+ BST_FORCE_SESSION_REBUILD: 1
|
|
| 165 | 173 |
script:
|
| 166 |
- - export BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources"
|
|
| 167 |
- # Currently sphinx_rtd_theme does not support Sphinx >1.8, this breaks search functionality
|
|
| 168 |
- - pip3 install sphinx==1.7.9
|
|
| 169 |
- - pip3 install sphinx-click
|
|
| 170 |
- - pip3 install sphinx_rtd_theme
|
|
| 171 |
- - cd dist && ./unpack.sh && cd buildstream
|
|
| 172 |
- - make BST_FORCE_SESSION_REBUILD=1 -C doc
|
|
| 173 |
- - cd ../..
|
|
| 174 |
- - mv dist/buildstream/doc/build/html public
|
|
| 174 |
+ - env BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources" tox -e docs
|
|
| 175 |
+ - mv doc/build/html public
|
|
| 175 | 176 |
except:
|
| 176 | 177 |
- schedules
|
| 177 | 178 |
artifacts:
|
| ... | ... | @@ -275,6 +276,7 @@ coverage: |
| 275 | 276 |
coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/'
|
| 276 | 277 |
script:
|
| 277 | 278 |
- cd dist && ./unpack.sh && cd buildstream
|
| 279 |
+ - pip3 install -r tools/requirements.txt -r tools/dev-requirements.txt
|
|
| 278 | 280 |
- pip3 install --no-index .
|
| 279 | 281 |
- mkdir report
|
| 280 | 282 |
- cd report
|
| ... | ... | @@ -1222,27 +1222,13 @@ For further information about using the reStructuredText with sphinx, please see |
| 1222 | 1222 |
|
| 1223 | 1223 |
Building Docs
|
| 1224 | 1224 |
~~~~~~~~~~~~~
|
| 1225 |
-The documentation build is not integrated into the ``setup.py`` and is
|
|
| 1226 |
-difficult (or impossible) to do so, so there is a little bit of setup
|
|
| 1227 |
-you need to take care of first.
|
|
| 1228 |
- |
|
| 1229 |
-Before you can build the BuildStream documentation yourself, you need
|
|
| 1230 |
-to first install ``sphinx`` along with some additional plugins and dependencies,
|
|
| 1231 |
-using pip or some other mechanism::
|
|
| 1232 |
- |
|
| 1233 |
- # Install sphinx
|
|
| 1234 |
- pip3 install --user sphinx
|
|
| 1235 |
- |
|
| 1236 |
- # Install some sphinx extensions
|
|
| 1237 |
- pip3 install --user sphinx-click
|
|
| 1238 |
- pip3 install --user sphinx_rtd_theme
|
|
| 1239 |
- |
|
| 1240 |
- # Additional optional dependencies required
|
|
| 1241 |
- pip3 install --user arpy
|
|
| 1225 |
+Before you can build the docs, you will end to ensure that you have installed
|
|
| 1226 |
+the required :ref:`buid dependencies <contributing_build_deps>` as mentioned
|
|
| 1227 |
+in the testing section above.
|
|
| 1242 | 1228 |
|
| 1243 | 1229 |
To build the documentation, just run the following::
|
| 1244 | 1230 |
|
| 1245 |
- make -C doc
|
|
| 1231 |
+ tox -e docs
|
|
| 1246 | 1232 |
|
| 1247 | 1233 |
This will give you a ``doc/build/html`` directory with the html docs which
|
| 1248 | 1234 |
you can view in your browser locally to test.
|
| ... | ... | @@ -1260,9 +1246,10 @@ will make the docs build reuse already downloaded sources:: |
| 1260 | 1246 |
|
| 1261 | 1247 |
export BST_SOURCE_CACHE=~/.cache/buildstream/sources
|
| 1262 | 1248 |
|
| 1263 |
-To force rebuild session html while building the doc, simply build the docs like this::
|
|
| 1249 |
+To force rebuild session html while building the doc, simply run `tox` with the
|
|
| 1250 |
+``BST_FORCE_SESSION_REBUILD`` environment variable set, like so::
|
|
| 1264 | 1251 |
|
| 1265 |
- make BST_FORCE_SESSION_REBUILD=1 -C doc
|
|
| 1252 |
+ env BST_FORCE_SESSION_REBUILD=1 tox -e docs
|
|
| 1266 | 1253 |
|
| 1267 | 1254 |
|
| 1268 | 1255 |
Man pages
|
| ... | ... | @@ -1468,57 +1455,123 @@ regenerate them locally in order to build the docs. |
| 1468 | 1455 |
|
| 1469 | 1456 |
Testing
|
| 1470 | 1457 |
-------
|
| 1471 |
-BuildStream uses pytest for regression tests and testing out
|
|
| 1472 |
-the behavior of newly added components.
|
|
| 1458 |
+BuildStream uses `tox <https://tox.readthedocs.org/>`_ as a frontend to run the
|
|
| 1459 |
+tests which are implemented using `pytest <https://pytest.org/>`_. We use
|
|
| 1460 |
+pytest for regression tests and testing out the behavior of newly added
|
|
| 1461 |
+components.
|
|
| 1473 | 1462 |
|
| 1474 | 1463 |
The elaborate documentation for pytest can be found here: http://doc.pytest.org/en/latest/contents.html
|
| 1475 | 1464 |
|
| 1476 | 1465 |
Don't get lost in the docs if you don't need to, follow existing examples instead.
|
| 1477 | 1466 |
|
| 1478 | 1467 |
|
| 1468 |
+.. _contributing_build_deps:
|
|
| 1469 |
+ |
|
| 1470 |
+Installing build dependencies
|
|
| 1471 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
| 1472 |
+Some of BuildStream's dependencies have non-python build dependencies. When
|
|
| 1473 |
+running tests with ``tox``, you will first need to install these dependencies.
|
|
| 1474 |
+Exact steps to install these will depend on your oprtation systemm. Commands
|
|
| 1475 |
+for installing them for some common distributions are lised below.
|
|
| 1476 |
+ |
|
| 1477 |
+For Fedora-based systems::
|
|
| 1478 |
+ |
|
| 1479 |
+ dnf install gcc pkg-config python3-devel cairo-gobject-devel glib2-devel gobject-introspection-devel
|
|
| 1480 |
+ |
|
| 1481 |
+ |
|
| 1482 |
+For Debian-based systems::
|
|
| 1483 |
+ |
|
| 1484 |
+ apt install gcc pkg-config python3-dev libcairo2-dev libgirepository1.0-dev
|
|
| 1485 |
+ |
|
| 1486 |
+ |
|
| 1479 | 1487 |
Running tests
|
| 1480 | 1488 |
~~~~~~~~~~~~~
|
| 1481 |
-To run the tests, just type::
|
|
| 1489 |
+To run the tests, simply navigate to the toplevel directory of your BuildStream
|
|
| 1490 |
+checkout and run::
|
|
| 1491 |
+ |
|
| 1492 |
+ tox
|
|
| 1493 |
+ |
|
| 1494 |
+By default, the test suite will be run against every supported python version
|
|
| 1495 |
+found on your host. If you have multiple python versions installed, you may
|
|
| 1496 |
+want to run tests against only one version and you can do that using the ``-e``
|
|
| 1497 |
+option when running tox::
|
|
| 1482 | 1498 |
|
| 1483 |
- ./setup.py test
|
|
| 1499 |
+ tox -e py37
|
|
| 1484 | 1500 |
|
| 1485 |
-At the toplevel.
|
|
| 1501 |
+Linting is performed separately from testing. In order to run the linting step which
|
|
| 1502 |
+consists of running the ``pycodestyle`` and ``pylint`` tools, run the following::
|
|
| 1486 | 1503 |
|
| 1487 |
-When debugging a test, it can be desirable to see the stdout
|
|
| 1488 |
-and stderr generated by a test, to do this use the ``--addopts``
|
|
| 1489 |
-function to feed arguments to pytest as such::
|
|
| 1504 |
+ tox -e lint
|
|
| 1490 | 1505 |
|
| 1491 |
- ./setup.py test --addopts -s
|
|
| 1506 |
+.. tip::
|
|
| 1507 |
+ |
|
| 1508 |
+ The project specific pylint and pycodestyle configurations are stored in the
|
|
| 1509 |
+ toplevel buildstream directory in the ``.pylintrc`` file and ``setup.cfg`` files
|
|
| 1510 |
+ respectively. These configurations can be interesting to use with IDEs and
|
|
| 1511 |
+ other developer tooling.
|
|
| 1512 |
+ |
|
| 1513 |
+The output of all failing tests will always be printed in the summary, but
|
|
| 1514 |
+if you want to observe the stdout and stderr generated by a passing test,
|
|
| 1515 |
+you can pass the ``-s`` option to pytest as such::
|
|
| 1516 |
+ |
|
| 1517 |
+ tox -- -s
|
|
| 1518 |
+ |
|
| 1519 |
+.. tip::
|
|
| 1520 |
+ |
|
| 1521 |
+ The ``-s`` option is `a pytest option <https://docs.pytest.org/latest/usage.html>`_.
|
|
| 1522 |
+ |
|
| 1523 |
+ Any options specified before the ``--`` separator are consumed by ``tox``,
|
|
| 1524 |
+ and any options after the ``--`` separator will be passed along to pytest.
|
|
| 1492 | 1525 |
|
| 1493 | 1526 |
You can always abort on the first failure by running::
|
| 1494 | 1527 |
|
| 1495 |
- ./setup.py test --addopts -x
|
|
| 1528 |
+ tox -- -x
|
|
| 1496 | 1529 |
|
| 1497 | 1530 |
If you want to run a specific test or a group of tests, you
|
| 1498 | 1531 |
can specify a prefix to match. E.g. if you want to run all of
|
| 1499 | 1532 |
the frontend tests you can do::
|
| 1500 | 1533 |
|
| 1501 |
- ./setup.py test --addopts 'tests/frontend/'
|
|
| 1534 |
+ tox -- tests/frontend/
|
|
| 1502 | 1535 |
|
| 1503 | 1536 |
Specific tests can be chosen by using the :: delimeter after the test module.
|
| 1504 | 1537 |
If you wanted to run the test_build_track test within frontend/buildtrack.py you could do::
|
| 1505 | 1538 |
|
| 1506 |
- ./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track'
|
|
| 1539 |
+ tox -- tests/frontend/buildtrack.py::test_build_track
|
|
| 1507 | 1540 |
|
| 1508 | 1541 |
We also have a set of slow integration tests that are disabled by
|
| 1509 | 1542 |
default - you will notice most of them marked with SKIP in the pytest
|
| 1510 | 1543 |
output. To run them, you can use::
|
| 1511 | 1544 |
|
| 1512 |
- ./setup.py test --addopts '--integration'
|
|
| 1545 |
+ tox -- --integration
|
|
| 1513 | 1546 |
|
| 1514 |
-By default, buildstream also runs pylint on all files. Should you want
|
|
| 1515 |
-to run just pylint (these checks are a lot faster), you can do so
|
|
| 1516 |
-with::
|
|
| 1547 |
+In case BuildStream's dependencies were updated since you last ran the
|
|
| 1548 |
+tests, you might see some errors like
|
|
| 1549 |
+``pytest: error: unrecognized arguments: --codestyle``. If this happens, you
|
|
| 1550 |
+will need to force ``tox`` to recreate the test environment(s). To do so, you
|
|
| 1551 |
+can run ``tox`` with ``-r`` or ``--recreate`` option.
|
|
| 1552 |
+ |
|
| 1553 |
+.. note::
|
|
| 1554 |
+ |
|
| 1555 |
+ By default, we do not allow use of site packages in our ``tox``
|
|
| 1556 |
+ confguration to enable running the tests in an isolated environment.
|
|
| 1557 |
+ If you need to enable use of site packages for whatever reason, you can
|
|
| 1558 |
+ do so by passing the ``--sitepackages`` option to ``tox``. Also, you will
|
|
| 1559 |
+ not need to install any of the build dependencies mentioned above if you
|
|
| 1560 |
+ use this approach.
|
|
| 1561 |
+ |
|
| 1562 |
+.. note::
|
|
| 1563 |
+ |
|
| 1564 |
+ While using ``tox`` is practical for developers running tests in
|
|
| 1565 |
+ more predictable execution environments, it is still possible to
|
|
| 1566 |
+ execute the test suite against a specific installation environment
|
|
| 1567 |
+ using pytest directly::
|
|
| 1568 |
+ |
|
| 1569 |
+ ./setup.py test
|
|
| 1517 | 1570 |
|
| 1518 |
- ./setup.py test --addopts '-m pylint'
|
|
| 1571 |
+ Specific options can be passed to ``pytest`` using the ``--addopts``
|
|
| 1572 |
+ option::
|
|
| 1519 | 1573 |
|
| 1520 |
-Alternatively, any IDE plugin that uses pytest should automatically
|
|
| 1521 |
-detect the ``.pylintrc`` in the project's root directory.
|
|
| 1574 |
+ ./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track'
|
|
| 1522 | 1575 |
|
| 1523 | 1576 |
|
| 1524 | 1577 |
Adding tests
|
| ... | ... | @@ -24,6 +24,7 @@ recursive-include doc/sessions *.run |
| 24 | 24 |
# Tests
|
| 25 | 25 |
recursive-include tests *
|
| 26 | 26 |
include conftest.py
|
| 27 |
+include tox.ini
|
|
| 27 | 28 |
include .coveragerc
|
| 28 | 29 |
include .pylintrc
|
| 29 | 30 |
|
| ... | ... | @@ -31,7 +32,12 @@ include .pylintrc |
| 31 | 32 |
recursive-include buildstream/_protos *.proto
|
| 32 | 33 |
|
| 33 | 34 |
# Requirements files
|
| 34 |
-include dev-requirements.txt
|
|
| 35 |
+include tools/requirements.in
|
|
| 36 |
+include tools/requirements.txt
|
|
| 37 |
+include tools/dev-requirements.in
|
|
| 38 |
+include tools/dev-requirements.txt
|
|
| 39 |
+include tools/plugin-requirements.in
|
|
| 40 |
+include tools/plugin-requirements.txt
|
|
| 35 | 41 |
|
| 36 | 42 |
# Versioneer
|
| 37 | 43 |
include versioneer.py
|
| ... | ... | @@ -34,6 +34,7 @@ from ._artifactcache import ArtifactCache |
| 34 | 34 |
from ._artifactcache.cascache import CASCache
|
| 35 | 35 |
from ._workspaces import Workspaces, WorkspaceProjectCache, WORKSPACE_PROJECT_FILE
|
| 36 | 36 |
from .plugin import _plugin_lookup
|
| 37 |
+from .sandbox import SandboxRemote
|
|
| 37 | 38 |
|
| 38 | 39 |
|
| 39 | 40 |
# Context()
|
| ... | ... | @@ -72,6 +73,9 @@ class Context(): |
| 72 | 73 |
# The locations from which to push and pull prebuilt artifacts
|
| 73 | 74 |
self.artifact_cache_specs = None
|
| 74 | 75 |
|
| 76 |
+ # The global remote execution configuration
|
|
| 77 |
+ self.remote_execution_specs = None
|
|
| 78 |
+ |
|
| 75 | 79 |
# The directory to store build logs
|
| 76 | 80 |
self.logdir = None
|
| 77 | 81 |
|
| ... | ... | @@ -187,7 +191,7 @@ class Context(): |
| 187 | 191 |
_yaml.node_validate(defaults, [
|
| 188 | 192 |
'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
| 189 | 193 |
'scheduler', 'artifacts', 'logging', 'projects',
|
| 190 |
- 'cache', 'prompt', 'workspacedir',
|
|
| 194 |
+ 'cache', 'prompt', 'workspacedir', 'remote-execution'
|
|
| 191 | 195 |
])
|
| 192 | 196 |
|
| 193 | 197 |
for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
|
| ... | ... | @@ -212,6 +216,8 @@ class Context(): |
| 212 | 216 |
# Load artifact share configuration
|
| 213 | 217 |
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
|
| 214 | 218 |
|
| 219 |
+ self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
|
|
| 220 |
+ |
|
| 215 | 221 |
# Load pull build trees configuration
|
| 216 | 222 |
self.pull_buildtrees = _yaml.node_get(cache, bool, 'pull-buildtrees')
|
| 217 | 223 |
|
| ... | ... | @@ -271,7 +277,8 @@ class Context(): |
| 271 | 277 |
# Shallow validation of overrides, parts of buildstream which rely
|
| 272 | 278 |
# on the overrides are expected to validate elsewhere.
|
| 273 | 279 |
for _, overrides in _yaml.node_items(self._project_overrides):
|
| 274 |
- _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror'])
|
|
| 280 |
+ _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror',
|
|
| 281 |
+ 'remote-execution'])
|
|
| 275 | 282 |
|
| 276 | 283 |
profile_end(Topics.LOAD_CONTEXT, 'load')
|
| 277 | 284 |
|
| ... | ... | @@ -719,6 +719,11 @@ def source_fetch(app, elements, deps, track_, except_, track_cross_junctions): |
| 719 | 719 |
deps = PipelineSelection.ALL
|
| 720 | 720 |
|
| 721 | 721 |
with app.initialized(session_name="Fetch"):
|
| 722 |
+ if not elements:
|
|
| 723 |
+ guessed_target = app.context.guess_element()
|
|
| 724 |
+ if guessed_target:
|
|
| 725 |
+ elements = (guessed_target,)
|
|
| 726 |
+ |
|
| 722 | 727 |
app.stream.fetch(elements,
|
| 723 | 728 |
selection=deps,
|
| 724 | 729 |
except_targets=except_,
|
| ... | ... | @@ -755,6 +760,11 @@ def source_track(app, elements, deps, except_, cross_junctions): |
| 755 | 760 |
all: All dependencies of all specified elements
|
| 756 | 761 |
"""
|
| 757 | 762 |
with app.initialized(session_name="Track"):
|
| 763 |
+ if not elements:
|
|
| 764 |
+ guessed_target = app.context.guess_element()
|
|
| 765 |
+ if guessed_target:
|
|
| 766 |
+ elements = (guessed_target,)
|
|
| 767 |
+ |
|
| 758 | 768 |
# Substitute 'none' for 'redirect' so that element redirections
|
| 759 | 769 |
# will be done
|
| 760 | 770 |
if deps == 'none':
|
| ... | ... | @@ -31,7 +31,7 @@ |
| 31 | 31 |
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 32 | 32 |
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 33 | 33 |
#
|
| 34 |
-import collections
|
|
| 34 |
+import collections.abc
|
|
| 35 | 35 |
import copy
|
| 36 | 36 |
import os
|
| 37 | 37 |
|
| ... | ... | @@ -218,7 +218,7 @@ def is_incomplete_argument(current_params, cmd_param): |
| 218 | 218 |
return True
|
| 219 | 219 |
if cmd_param.nargs == -1:
|
| 220 | 220 |
return True
|
| 221 |
- if isinstance(current_param_values, collections.Iterable) \
|
|
| 221 |
+ if isinstance(current_param_values, collections.abc.Iterable) \
|
|
| 222 | 222 |
and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs:
|
| 223 | 223 |
return True
|
| 224 | 224 |
return False
|
| ... | ... | @@ -34,7 +34,7 @@ from ._exceptions import BstError, ErrorDomain |
| 34 | 34 |
|
| 35 | 35 |
# pylint: disable=wrong-import-position,wrong-import-order
|
| 36 | 36 |
gi.require_version('OSTree', '1.0')
|
| 37 |
-from gi.repository import GLib, Gio, OSTree # nopep8
|
|
| 37 |
+from gi.repository import GLib, Gio, OSTree # noqa
|
|
| 38 | 38 |
|
| 39 | 39 |
|
| 40 | 40 |
# For users of this file, they must expect (except) it.
|
| ... | ... | @@ -507,7 +507,16 @@ class Project(): |
| 507 | 507 |
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
|
| 508 | 508 |
|
| 509 | 509 |
# Load remote-execution configuration for this project
|
| 510 |
- self.remote_execution_specs = SandboxRemote.specs_from_config_node(config, self.directory)
|
|
| 510 |
+ project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
|
|
| 511 |
+ override_specs = SandboxRemote.specs_from_config_node(
|
|
| 512 |
+ self._context.get_overrides(self.name), self.directory)
|
|
| 513 |
+ |
|
| 514 |
+ if override_specs is not None:
|
|
| 515 |
+ self.remote_execution_specs = override_specs
|
|
| 516 |
+ elif project_specs is not None:
|
|
| 517 |
+ self.remote_execution_specs = project_specs
|
|
| 518 |
+ else:
|
|
| 519 |
+ self.remote_execution_specs = self._context.remote_execution_specs
|
|
| 511 | 520 |
|
| 512 | 521 |
# Load sandbox environment variables
|
| 513 | 522 |
self.base_environment = _yaml.node_get(config, Mapping, 'environment')
|
| ... | ... | @@ -38,7 +38,7 @@ def terminator_handler(signal_, frame): |
| 38 | 38 |
terminator_ = terminator_stack.pop()
|
| 39 | 39 |
try:
|
| 40 | 40 |
terminator_()
|
| 41 |
- except: # pylint: disable=bare-except
|
|
| 41 |
+ except: # noqa pylint: disable=bare-except
|
|
| 42 | 42 |
# Ensure we print something if there's an exception raised when
|
| 43 | 43 |
# processing the handlers. Note that the default exception
|
| 44 | 44 |
# handler won't be called because we os._exit next, so we must
|
| ... | ... | @@ -287,7 +287,7 @@ def node_decorate_dict(filename, target, source, toplevel): |
| 287 | 287 |
provenance.members[key] = member
|
| 288 | 288 |
|
| 289 | 289 |
target_value = target.get(key)
|
| 290 |
- if isinstance(value, collections.Mapping):
|
|
| 290 |
+ if isinstance(value, collections.abc.Mapping):
|
|
| 291 | 291 |
node_decorate_dict(filename, target_value, value, toplevel)
|
| 292 | 292 |
elif isinstance(value, list):
|
| 293 | 293 |
member.elements = node_decorate_list(filename, target_value, value, toplevel)
|
| ... | ... | @@ -302,7 +302,7 @@ def node_decorate_list(filename, target, source, toplevel): |
| 302 | 302 |
target_item = target[idx]
|
| 303 | 303 |
element = ElementProvenance(filename, source, idx, toplevel)
|
| 304 | 304 |
|
| 305 |
- if isinstance(item, collections.Mapping):
|
|
| 305 |
+ if isinstance(item, collections.abc.Mapping):
|
|
| 306 | 306 |
node_decorate_dict(filename, target_item, item, toplevel)
|
| 307 | 307 |
elif isinstance(item, list):
|
| 308 | 308 |
element.elements = node_decorate_list(filename, target_item, item, toplevel)
|
| ... | ... | @@ -578,7 +578,7 @@ def is_ruamel_str(value): |
| 578 | 578 |
#
|
| 579 | 579 |
def is_composite_list(node):
|
| 580 | 580 |
|
| 581 |
- if isinstance(node, collections.Mapping):
|
|
| 581 |
+ if isinstance(node, collections.abc.Mapping):
|
|
| 582 | 582 |
has_directives = False
|
| 583 | 583 |
has_keys = False
|
| 584 | 584 |
|
| ... | ... | @@ -847,7 +847,7 @@ def composite_dict(target, source, path=None): |
| 847 | 847 |
|
| 848 | 848 |
target_value = target.get(key)
|
| 849 | 849 |
|
| 850 |
- if isinstance(source_value, collections.Mapping):
|
|
| 850 |
+ if isinstance(source_value, collections.abc.Mapping):
|
|
| 851 | 851 |
|
| 852 | 852 |
# Handle creating new dicts on target side
|
| 853 | 853 |
if target_value is None:
|
| ... | ... | @@ -862,7 +862,7 @@ def composite_dict(target, source, path=None): |
| 862 | 862 |
# Add a new provenance member element to the containing dict
|
| 863 | 863 |
target_provenance.members[key] = source_provenance.members[key]
|
| 864 | 864 |
|
| 865 |
- if not isinstance(target_value, collections.Mapping):
|
|
| 865 |
+ if not isinstance(target_value, collections.abc.Mapping):
|
|
| 866 | 866 |
raise CompositeTypeError(thispath, type(target_value), type(source_value))
|
| 867 | 867 |
|
| 868 | 868 |
# Recurse into matching dictionary
|
| ... | ... | @@ -923,7 +923,7 @@ RoundTripRepresenter.add_representer(SanitizedDict, |
| 923 | 923 |
#
|
| 924 | 924 |
def node_sanitize(node):
|
| 925 | 925 |
|
| 926 |
- if isinstance(node, collections.Mapping):
|
|
| 926 |
+ if isinstance(node, collections.abc.Mapping):
|
|
| 927 | 927 |
|
| 928 | 928 |
result = SanitizedDict()
|
| 929 | 929 |
|
| ... | ... | @@ -1067,7 +1067,7 @@ class ChainMap(collections.ChainMap): |
| 1067 | 1067 |
def node_chain_copy(source):
|
| 1068 | 1068 |
copy = ChainMap({}, source)
|
| 1069 | 1069 |
for key, value in source.items():
|
| 1070 |
- if isinstance(value, collections.Mapping):
|
|
| 1070 |
+ if isinstance(value, collections.abc.Mapping):
|
|
| 1071 | 1071 |
copy[key] = node_chain_copy(value)
|
| 1072 | 1072 |
elif isinstance(value, list):
|
| 1073 | 1073 |
copy[key] = list_chain_copy(value)
|
| ... | ... | @@ -1080,7 +1080,7 @@ def node_chain_copy(source): |
| 1080 | 1080 |
def list_chain_copy(source):
|
| 1081 | 1081 |
copy = []
|
| 1082 | 1082 |
for item in source:
|
| 1083 |
- if isinstance(item, collections.Mapping):
|
|
| 1083 |
+ if isinstance(item, collections.abc.Mapping):
|
|
| 1084 | 1084 |
copy.append(node_chain_copy(item))
|
| 1085 | 1085 |
elif isinstance(item, list):
|
| 1086 | 1086 |
copy.append(list_chain_copy(item))
|
| ... | ... | @@ -1095,7 +1095,7 @@ def list_chain_copy(source): |
| 1095 | 1095 |
def node_copy(source):
|
| 1096 | 1096 |
copy = {}
|
| 1097 | 1097 |
for key, value in source.items():
|
| 1098 |
- if isinstance(value, collections.Mapping):
|
|
| 1098 |
+ if isinstance(value, collections.abc.Mapping):
|
|
| 1099 | 1099 |
copy[key] = node_copy(value)
|
| 1100 | 1100 |
elif isinstance(value, list):
|
| 1101 | 1101 |
copy[key] = list_copy(value)
|
| ... | ... | @@ -1112,7 +1112,7 @@ def node_copy(source): |
| 1112 | 1112 |
def list_copy(source):
|
| 1113 | 1113 |
copy = []
|
| 1114 | 1114 |
for item in source:
|
| 1115 |
- if isinstance(item, collections.Mapping):
|
|
| 1115 |
+ if isinstance(item, collections.abc.Mapping):
|
|
| 1116 | 1116 |
copy.append(node_copy(item))
|
| 1117 | 1117 |
elif isinstance(item, list):
|
| 1118 | 1118 |
copy.append(list_copy(item))
|
| ... | ... | @@ -1147,7 +1147,7 @@ def node_final_assertions(node): |
| 1147 | 1147 |
raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE,
|
| 1148 | 1148 |
"{}: Attempt to override non-existing list".format(provenance))
|
| 1149 | 1149 |
|
| 1150 |
- if isinstance(value, collections.Mapping):
|
|
| 1150 |
+ if isinstance(value, collections.abc.Mapping):
|
|
| 1151 | 1151 |
node_final_assertions(value)
|
| 1152 | 1152 |
elif isinstance(value, list):
|
| 1153 | 1153 |
list_final_assertions(value)
|
| ... | ... | @@ -1155,7 +1155,7 @@ def node_final_assertions(node): |
| 1155 | 1155 |
|
| 1156 | 1156 |
def list_final_assertions(values):
|
| 1157 | 1157 |
for value in values:
|
| 1158 |
- if isinstance(value, collections.Mapping):
|
|
| 1158 |
+ if isinstance(value, collections.abc.Mapping):
|
|
| 1159 | 1159 |
node_final_assertions(value)
|
| 1160 | 1160 |
elif isinstance(value, list):
|
| 1161 | 1161 |
list_final_assertions(value)
|
| ... | ... | @@ -1568,7 +1568,7 @@ class Element(Plugin): |
| 1568 | 1568 |
utils._force_rmtree(rootdir)
|
| 1569 | 1569 |
|
| 1570 | 1570 |
with _signals.terminator(cleanup_rootdir), \
|
| 1571 |
- self.__sandbox(rootdir, output_file, output_file, self.__sandbox_config) as sandbox: # nopep8
|
|
| 1571 |
+ self.__sandbox(rootdir, output_file, output_file, self.__sandbox_config) as sandbox: # noqa
|
|
| 1572 | 1572 |
|
| 1573 | 1573 |
# By default, the dynamic public data is the same as the static public data.
|
| 1574 | 1574 |
# The plugin's assemble() method may modify this, though.
|
| ... | ... | @@ -62,10 +62,32 @@ class SandboxRemote(Sandbox): |
| 62 | 62 |
self.storage_url = config.storage_service['url']
|
| 63 | 63 |
self.exec_url = config.exec_service['url']
|
| 64 | 64 |
|
| 65 |
+ exec_certs = {}
|
|
| 66 |
+ for key in ['client-cert', 'client-key', 'server-cert']:
|
|
| 67 |
+ if key in config.exec_service:
|
|
| 68 |
+ with open(config.exec_service[key], 'rb') as f:
|
|
| 69 |
+ exec_certs[key] = f.read()
|
|
| 70 |
+ |
|
| 71 |
+ self.exec_credentials = grpc.ssl_channel_credentials(
|
|
| 72 |
+ root_certificates=exec_certs.get('server-cert'),
|
|
| 73 |
+ private_key=exec_certs.get('client-key'),
|
|
| 74 |
+ certificate_chain=exec_certs.get('client-cert'))
|
|
| 75 |
+ |
|
| 76 |
+ action_certs = {}
|
|
| 77 |
+ for key in ['client-cert', 'client-key', 'server-cert']:
|
|
| 78 |
+ if key in config.action_service:
|
|
| 79 |
+ with open(config.action_service[key], 'rb') as f:
|
|
| 80 |
+ action_certs[key] = f.read()
|
|
| 81 |
+ |
|
| 65 | 82 |
if config.action_service:
|
| 66 | 83 |
self.action_url = config.action_service['url']
|
| 84 |
+ self.action_credentials = grpc.ssl_channel_credentials(
|
|
| 85 |
+ root_certificates=action_certs.get('server-cert'),
|
|
| 86 |
+ private_key=action_certs.get('client-key'),
|
|
| 87 |
+ certificate_chain=action_certs.get('client-cert'))
|
|
| 67 | 88 |
else:
|
| 68 | 89 |
self.action_url = None
|
| 90 |
+ self.action_credentials = None
|
|
| 69 | 91 |
|
| 70 | 92 |
self.server_instance = config.exec_service.get('instance', None)
|
| 71 | 93 |
self.storage_instance = config.storage_service.get('instance', None)
|
| ... | ... | @@ -81,7 +103,7 @@ class SandboxRemote(Sandbox): |
| 81 | 103 |
self._get_context().message(Message(None, MessageType.INFO, msg))
|
| 82 | 104 |
|
| 83 | 105 |
@staticmethod
|
| 84 |
- def specs_from_config_node(config_node, basedir):
|
|
| 106 |
+ def specs_from_config_node(config_node, basedir=None):
|
|
| 85 | 107 |
|
| 86 | 108 |
def require_node(config, keyname):
|
| 87 | 109 |
val = config.get(keyname)
|
| ... | ... | @@ -109,10 +131,10 @@ class SandboxRemote(Sandbox): |
| 109 | 131 |
remote_exec_storage_config = require_node(remote_config, 'storage-service')
|
| 110 | 132 |
remote_exec_action_config = remote_config.get('action-cache-service', {})
|
| 111 | 133 |
|
| 112 |
- _yaml.node_validate(remote_exec_service_config, ['url', 'instance'])
|
|
| 134 |
+ _yaml.node_validate(remote_exec_service_config, ['url', 'instance'] + tls_keys)
|
|
| 113 | 135 |
_yaml.node_validate(remote_exec_storage_config, ['url', 'instance'] + tls_keys)
|
| 114 | 136 |
if remote_exec_action_config:
|
| 115 |
- _yaml.node_validate(remote_exec_action_config, ['url'])
|
|
| 137 |
+ _yaml.node_validate(remote_exec_action_config, ['url'] + tls_keys)
|
|
| 116 | 138 |
else:
|
| 117 | 139 |
remote_config['action-service'] = None
|
| 118 | 140 |
|
| ... | ... | @@ -135,6 +157,19 @@ class SandboxRemote(Sandbox): |
| 135 | 157 |
"remote-execution configuration. Your config is missing '{}'."
|
| 136 | 158 |
.format(str(provenance), tls_keys, key))
|
| 137 | 159 |
|
| 160 |
+ def resolve_path(path):
|
|
| 161 |
+ if basedir and path:
|
|
| 162 |
+ return os.path.join(basedir, path)
|
|
| 163 |
+ else:
|
|
| 164 |
+ return path
|
|
| 165 |
+ |
|
| 166 |
+ for key in tls_keys:
|
|
| 167 |
+ for d in (remote_config['execution-service'],
|
|
| 168 |
+ remote_config['storage-service'],
|
|
| 169 |
+ remote_exec_action_config):
|
|
| 170 |
+ if key in d:
|
|
| 171 |
+ d[key] = resolve_path(d[key])
|
|
| 172 |
+ |
|
| 138 | 173 |
spec = RemoteExecutionSpec(remote_config['execution-service'],
|
| 139 | 174 |
remote_config['storage-service'],
|
| 140 | 175 |
remote_exec_action_config)
|
| ... | ... | @@ -295,6 +330,8 @@ class SandboxRemote(Sandbox): |
| 295 | 330 |
"for example: http://buildservice:50051.")
|
| 296 | 331 |
if url.scheme == 'http':
|
| 297 | 332 |
channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
|
| 333 |
+ elif url.scheme == 'https':
|
|
| 334 |
+ channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.exec_credentials)
|
|
| 298 | 335 |
else:
|
| 299 | 336 |
raise SandboxError("Remote execution currently only supports the 'http' protocol "
|
| 300 | 337 |
"and '{}' was supplied.".format(url.scheme))
|
| ... | ... | @@ -352,11 +389,11 @@ class SandboxRemote(Sandbox): |
| 352 | 389 |
if not url.port:
|
| 353 | 390 |
raise SandboxError("You must supply a protocol and port number in the action-cache-service url, "
|
| 354 | 391 |
"for example: http://buildservice:50051.")
|
| 355 |
- if not url.scheme == "http":
|
|
| 356 |
- raise SandboxError("Currently only support http for the action cache"
|
|
| 357 |
- "and {} was supplied".format(url.scheme))
|
|
| 392 |
+ if url.scheme == 'http':
|
|
| 393 |
+ channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
|
|
| 394 |
+ elif url.scheme == 'https':
|
|
| 395 |
+ channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.action_credentials)
|
|
| 358 | 396 |
|
| 359 |
- channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
|
|
| 360 | 397 |
request = remote_execution_pb2.GetActionResultRequest(action_digest=action_digest)
|
| 361 | 398 |
stub = remote_execution_pb2_grpc.ActionCacheStub(channel)
|
| 362 | 399 |
try:
|
| ... | ... | @@ -6,10 +6,10 @@ |
| 6 | 6 |
# we dont use the standard `sphinx-build` and `sphinx-apidoc` entry points.
|
| 7 | 7 |
#
|
| 8 | 8 |
# The following technique works as long as sphinx is installed for python3,
|
| 9 |
-# regardless of the entry point which might be in /usr/bin or PATH, but
|
|
| 10 |
-# will stop working in sphinx >= 2.0. Hopefully by then, the mentioned
|
|
| 11 |
-# bug will be fixed and we can use a standard python3 specific script to
|
|
| 12 |
-# invoke sphnix.
|
|
| 9 |
+# regardless of the entry point which might be in /usr/bin or PATH.
|
|
| 10 |
+#
|
|
| 11 |
+# Since Sphinx 2.0 is planned to be Python 3-only, this workaround should not
|
|
| 12 |
+# be needed once Spinx 2.0 is released, and we upgrade to it
|
|
| 13 | 13 |
#
|
| 14 | 14 |
SPHINXOPTS =
|
| 15 | 15 |
SPHINXBUILD = python3 -m sphinx
|
| ... | ... | @@ -218,6 +218,7 @@ The use of ports are required to distinguish between pull only access and |
| 218 | 218 |
push/pull access. For information regarding the server/client certificates
|
| 219 | 219 |
and keys, please see: :ref:`Key pair for the server <server_authentication>`.
|
| 220 | 220 |
|
| 221 |
+.. _project_remote_execution:
|
|
| 221 | 222 |
|
| 222 | 223 |
Remote execution
|
| 223 | 224 |
~~~~~~~~~~~~~~~~
|
| ... | ... | @@ -243,9 +244,6 @@ using the `remote-execution` option: |
| 243 | 244 |
action-cache-service:
|
| 244 | 245 |
url: http://bar.action.com:50052
|
| 245 | 246 |
|
| 246 |
-The execution-service part of remote execution does not support encrypted
|
|
| 247 |
-connections yet, so the protocol must always be http.
|
|
| 248 |
- |
|
| 249 | 247 |
storage-service specifies a remote CAS store and the parameters are the
|
| 250 | 248 |
same as those used to specify an :ref:`artifact server <artifacts>`.
|
| 251 | 249 |
|
| ... | ... | @@ -268,6 +266,9 @@ instance names. |
| 268 | 266 |
|
| 269 | 267 |
The Remote Execution API can be found via https://github.com/bazelbuild/remote-apis.
|
| 270 | 268 |
|
| 269 |
+Remote execution configuration can be also provided in the `user
|
|
| 270 |
+configuration <user_config_remote_execution>`.
|
|
| 271 |
+ |
|
| 271 | 272 |
.. _project_essentials_mirrors:
|
| 272 | 273 |
|
| 273 | 274 |
Mirrors
|
| ... | ... | @@ -100,6 +100,54 @@ pull only access and push/pull access. For information regarding this and the |
| 100 | 100 |
server/client certificates and keys, please see:
|
| 101 | 101 |
:ref:`Key pair for the server <server_authentication>`.
|
| 102 | 102 |
|
| 103 |
+.. _user_config_remote_execution:
|
|
| 104 |
+ |
|
| 105 |
+Remote execution
|
|
| 106 |
+~~~~~~~~~~~~~~~~
|
|
| 107 |
+ |
|
| 108 |
+The same configuration for :ref:`remote execution <project_remote_execution>`
|
|
| 109 |
+in ``project.conf`` can be provided in the user configuation.
|
|
| 110 |
+ |
|
| 111 |
+There is only one remote execution configuration used per project.
|
|
| 112 |
+ |
|
| 113 |
+The project overrides will be taken in priority. The global
|
|
| 114 |
+configuration will be used as fallback.
|
|
| 115 |
+ |
|
| 116 |
+1. Global remote execution fallback:
|
|
| 117 |
+ |
|
| 118 |
+.. code:: yaml
|
|
| 119 |
+ |
|
| 120 |
+ remote-execution:
|
|
| 121 |
+ execution-service:
|
|
| 122 |
+ url: http://execution.fallback.example.com:50051
|
|
| 123 |
+ instance-name: main
|
|
| 124 |
+ storage-service:
|
|
| 125 |
+ url: https://storage.fallback.example.com:11002/
|
|
| 126 |
+ server-cert: /keys/server.crt
|
|
| 127 |
+ client-cert: /keys/client.crt
|
|
| 128 |
+ client-key: /keys/client.key
|
|
| 129 |
+ instance-name: main
|
|
| 130 |
+ action-cache-service:
|
|
| 131 |
+ url: http://action.flalback.example.com:50052
|
|
| 132 |
+ |
|
| 133 |
+2. Project override:
|
|
| 134 |
+ |
|
| 135 |
+.. code:: yaml
|
|
| 136 |
+ |
|
| 137 |
+ projects:
|
|
| 138 |
+ some_project:
|
|
| 139 |
+ remote-execution:
|
|
| 140 |
+ execution-service:
|
|
| 141 |
+ url: http://execution.some_project.example.com:50051
|
|
| 142 |
+ instance-name: main
|
|
| 143 |
+ storage-service:
|
|
| 144 |
+ url: https://storage.some_project.example.com:11002/
|
|
| 145 |
+ server-cert: /some_project_keys/server.crt
|
|
| 146 |
+ client-cert: /some_project_keys/client.crt
|
|
| 147 |
+ client-key: /some_project_keys/client.key
|
|
| 148 |
+ instance-name: main
|
|
| 149 |
+ action-cache-service:
|
|
| 150 |
+ url: http://action.some_project.example.com:50052
|
|
| 103 | 151 |
|
| 104 | 152 |
|
| 105 | 153 |
Strict build plan
|
| ... | ... | @@ -11,20 +11,13 @@ parentdir_prefix = BuildStream- |
| 11 | 11 |
test=pytest
|
| 12 | 12 |
|
| 13 | 13 |
[tool:pytest]
|
| 14 |
-addopts = --verbose --basetemp ./tmp --pep8 --pylint --pylint-rcfile=.pylintrc --cov=buildstream --cov-config .coveragerc --durations=20
|
|
| 14 |
+addopts = --verbose --basetemp ./tmp --cov=buildstream --cov-config .coveragerc --durations=20
|
|
| 15 | 15 |
norecursedirs = tests/integration/project integration-cache tmp __pycache__ .eggs
|
| 16 | 16 |
python_files = tests/*/*.py
|
| 17 |
-pep8maxlinelength = 119
|
|
| 18 |
-pep8ignore =
|
|
| 19 |
- * E129
|
|
| 20 |
- * E125
|
|
| 21 |
- doc/source/conf.py ALL
|
|
| 22 |
- tmp/* ALL
|
|
| 23 |
- */lib/python3* ALL
|
|
| 24 |
- */bin/* ALL
|
|
| 25 |
- buildstream/_fuse/fuse.py ALL
|
|
| 26 |
- .eggs/* ALL
|
|
| 27 |
- *_pb2.py ALL
|
|
| 28 |
- *_pb2_grpc.py ALL
|
|
| 29 | 17 |
env =
|
| 30 | 18 |
D:BST_TEST_SUITE=True
|
| 19 |
+ |
|
| 20 |
+[pycodestyle]
|
|
| 21 |
+max-line-length = 119
|
|
| 22 |
+ignore = E129,E125,W504,W605
|
|
| 23 |
+exclude = .git/**,.tox/**,doc/source/conf.py,buildstream/_fuse/fuse.py,buildstream/_protos/**/*py
|
| ... | ... | @@ -270,9 +270,12 @@ def get_cmdclass(): |
| 270 | 270 |
#####################################################
|
| 271 | 271 |
# Gather requirements #
|
| 272 | 272 |
#####################################################
|
| 273 |
-with open('dev-requirements.txt') as dev_reqs:
|
|
| 273 |
+with open('tools/dev-requirements.in') as dev_reqs:
|
|
| 274 | 274 |
dev_requires = dev_reqs.read().splitlines()
|
| 275 | 275 |
|
| 276 |
+with open('tools/requirements.in') as install_reqs:
|
|
| 277 |
+ install_requires = install_reqs.read().splitlines()
|
|
| 278 |
+ |
|
| 276 | 279 |
#####################################################
|
| 277 | 280 |
# Prepare package description from README #
|
| 278 | 281 |
#####################################################
|
| ... | ... | @@ -334,23 +337,7 @@ setup(name='BuildStream', |
| 334 | 337 |
os.path.join('buildstream', 'data', 'bst')
|
| 335 | 338 |
])
|
| 336 | 339 |
],
|
| 337 |
- install_requires=[
|
|
| 338 |
- 'setuptools',
|
|
| 339 |
- 'psutil',
|
|
| 340 |
- # According to ruamel.yaml's PyPI page, we are suppose to use
|
|
| 341 |
- # "<=0.15" in production until 0.15 becomes API stable.
|
|
| 342 |
- # However we need ruamel.yaml 0.15.41 or greater for Python 3.7.
|
|
| 343 |
- # We know that ruamel.yaml 0.15.52 breaks API in a way that
|
|
| 344 |
- # is incompatible with BuildStream.
|
|
| 345 |
- #
|
|
| 346 |
- # See issues #571 and #790.
|
|
| 347 |
- 'ruamel.yaml >= 0.15.41, < 0.15.52',
|
|
| 348 |
- 'pluginbase',
|
|
| 349 |
- 'Click >= 7.0',
|
|
| 350 |
- 'jinja2 >= 2.10',
|
|
| 351 |
- 'protobuf >= 3.5',
|
|
| 352 |
- 'grpcio >= 1.10',
|
|
| 353 |
- ],
|
|
| 340 |
+ install_requires=install_requires,
|
|
| 354 | 341 |
entry_points=bst_install_entry_points,
|
| 355 | 342 |
tests_require=dev_requires,
|
| 356 | 343 |
zip_safe=False)
|
| ... | ... | @@ -3,6 +3,7 @@ import tarfile |
| 3 | 3 |
import hashlib
|
| 4 | 4 |
import pytest
|
| 5 | 5 |
from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
|
| 6 |
+from tests.testutils.site import IS_WINDOWS
|
|
| 6 | 7 |
|
| 7 | 8 |
from buildstream import _yaml
|
| 8 | 9 |
from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
| ... | ... | @@ -85,16 +86,37 @@ def test_build_invalid_suffix_dep(datafiles, cli, strict, hardlinks): |
| 85 | 86 |
result.assert_main_error(ErrorDomain.LOAD, "bad-element-suffix")
|
| 86 | 87 |
|
| 87 | 88 |
|
| 89 |
+@pytest.mark.skipif(IS_WINDOWS, reason='Not available on Windows')
|
|
| 88 | 90 |
@pytest.mark.datafiles(DATA_DIR)
|
| 89 | 91 |
def test_build_invalid_filename_chars(datafiles, cli):
|
| 90 | 92 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 91 |
- result = cli.run(project=project, args=strict_args(['build', 'invalid-chars|<>-in-name.bst'], 'non-strict'))
|
|
| 93 |
+ element_name = 'invalid-chars|<>-in-name.bst'
|
|
| 94 |
+ |
|
| 95 |
+ # The name of this file contains characters that are not allowed by
|
|
| 96 |
+ # BuildStream, using it should raise a warning.
|
|
| 97 |
+ element = {
|
|
| 98 |
+ 'kind': 'stack',
|
|
| 99 |
+ }
|
|
| 100 |
+ _yaml.dump(element, os.path.join(project, 'elements', element_name))
|
|
| 101 |
+ |
|
| 102 |
+ result = cli.run(project=project, args=strict_args(['build', element_name], 'non-strict'))
|
|
| 92 | 103 |
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
|
| 93 | 104 |
|
| 94 | 105 |
|
| 106 |
+@pytest.mark.skipif(IS_WINDOWS, reason='Not available on Windows')
|
|
| 95 | 107 |
@pytest.mark.datafiles(DATA_DIR)
|
| 96 | 108 |
def test_build_invalid_filename_chars_dep(datafiles, cli):
|
| 97 | 109 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 110 |
+ element_name = 'invalid-chars|<>-in-name.bst'
|
|
| 111 |
+ |
|
| 112 |
+ # The name of this file contains characters that are not allowed by
|
|
| 113 |
+ # BuildStream, and is listed as a dependency of 'invalid-chars-in-dep.bst'.
|
|
| 114 |
+ # This should also raise a warning.
|
|
| 115 |
+ element = {
|
|
| 116 |
+ 'kind': 'stack',
|
|
| 117 |
+ }
|
|
| 118 |
+ _yaml.dump(element, os.path.join(project, 'elements', element_name))
|
|
| 119 |
+ |
|
| 98 | 120 |
result = cli.run(project=project, args=strict_args(['build', 'invalid-chars-in-dep.bst'], 'non-strict'))
|
| 99 | 121 |
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
|
| 100 | 122 |
|
| 1 |
-kind: stack
|
|
| 2 |
-description: |
|
|
| 3 |
- The name of this files contains characters that are not allowed by
|
|
| 4 |
- BuildStream, using it should raise a warning.
|
| ... | ... | @@ -29,7 +29,7 @@ class RequestHandler(SimpleHTTPRequestHandler): |
| 29 | 29 |
expected_password, directory = self.server.users[user]
|
| 30 | 30 |
if password == expected_password:
|
| 31 | 31 |
return directory
|
| 32 |
- except:
|
|
| 32 |
+ except: # noqa
|
|
| 33 | 33 |
raise Unauthorized('unauthorized')
|
| 34 | 34 |
return None
|
| 35 | 35 |
|
| ... | ... | @@ -52,5 +52,6 @@ except ImportError: |
| 52 | 52 |
HAVE_ARPY = False
|
| 53 | 53 |
|
| 54 | 54 |
IS_LINUX = os.getenv('BST_FORCE_BACKEND', sys.platform).startswith('linux')
|
| 55 |
+IS_WINDOWS = (os.name == 'nt')
|
|
| 55 | 56 |
|
| 56 | 57 |
MACHINE_ARCH = Platform.get_host_arch()
|
| 1 | 1 |
coverage == 4.4.0
|
| 2 |
-pep8
|
|
| 3 | 2 |
pylint
|
| 3 |
+pycodestyle
|
|
| 4 | 4 |
pytest >= 3.9
|
| 5 | 5 |
pytest-cov >= 2.5.0
|
| 6 | 6 |
pytest-datafiles >= 2.0
|
| 7 | 7 |
pytest-env
|
| 8 |
-pytest-pep8
|
|
| 9 |
-pytest-pylint
|
|
| 10 | 8 |
pytest-xdist
|
| 11 | 9 |
pytest-timeout
|
| 12 | 10 |
pyftpdlib
|
| 1 |
+coverage==4.4
|
|
| 2 |
+pylint==2.2.2
|
|
| 3 |
+pycodestyle==2.4.0
|
|
| 4 |
+pytest==4.0.2
|
|
| 5 |
+pytest-cov==2.6.0
|
|
| 6 |
+pytest-datafiles==2.0
|
|
| 7 |
+pytest-env==0.6.2
|
|
| 8 |
+pytest-xdist==1.25.0
|
|
| 9 |
+pytest-timeout==1.3.3
|
|
| 10 |
+pyftpdlib==1.5.4
|
|
| 11 |
+## The following requirements were added by pip freeze:
|
|
| 12 |
+apipkg==1.5
|
|
| 13 |
+astroid==2.1.0
|
|
| 14 |
+atomicwrites==1.2.1
|
|
| 15 |
+attrs==18.2.0
|
|
| 16 |
+execnet==1.5.0
|
|
| 17 |
+isort==4.3.4
|
|
| 18 |
+lazy-object-proxy==1.3.1
|
|
| 19 |
+mccabe==0.6.1
|
|
| 20 |
+more-itertools==5.0.0
|
|
| 21 |
+pluggy==0.8.0
|
|
| 22 |
+py==1.7.0
|
|
| 23 |
+pytest-forked==0.2
|
|
| 24 |
+six==1.12.0
|
|
| 25 |
+wrapt==1.10.11
|
| 1 |
+arpy
|
|
| 2 |
+PyGObject
|
| 1 |
+arpy==1.1.1
|
|
| 2 |
+PyGObject==3.30.4
|
|
| 3 |
+## The following requirements were added by pip freeze:
|
|
| 4 |
+pycairo==1.18.0
|
| 1 |
+Click
|
|
| 2 |
+grpcio >= 1.10
|
|
| 3 |
+Jinja2 >= 2.10
|
|
| 4 |
+pluginbase
|
|
| 5 |
+protobuf >= 3.5
|
|
| 6 |
+psutil
|
|
| 7 |
+# According to ruamel.yaml's PyPI page, we are suppose to use
|
|
| 8 |
+# "<=0.15" in production until 0.15 becomes API stable.
|
|
| 9 |
+# However we need ruamel.yaml 0.15.41 or greater for Python 3.7.
|
|
| 10 |
+# We know that ruamel.yaml 0.15.52 breaks API in a way that
|
|
| 11 |
+# is incompatible with BuildStream.
|
|
| 12 |
+#
|
|
| 13 |
+# See issues #571 and #790.
|
|
| 14 |
+ruamel.yaml >= 0.15.41, < 0.15.52
|
|
| 15 |
+setuptools
|
| 1 |
+Click==7.0
|
|
| 2 |
+grpcio==1.17.1
|
|
| 3 |
+Jinja2==2.10
|
|
| 4 |
+pluginbase==0.7
|
|
| 5 |
+protobuf==3.6.1
|
|
| 6 |
+psutil==5.4.8
|
|
| 7 |
+# According to ruamel.yaml's PyPI page, we are suppose to use
|
|
| 8 |
+# "<=0.15" in production until 0.15 becomes API stable.
|
|
| 9 |
+# However we need ruamel.yaml 0.15.41 or greater for Python 3.7.
|
|
| 10 |
+# We know that ruamel.yaml 0.15.52 breaks API in a way that
|
|
| 11 |
+# is incompatible with BuildStream.
|
|
| 12 |
+#
|
|
| 13 |
+# See issues #571 and #790.
|
|
| 14 |
+ruamel.yaml==0.15.51
|
|
| 15 |
+setuptools==39.0.1
|
|
| 16 |
+## The following requirements were added by pip freeze:
|
|
| 17 |
+MarkupSafe==1.1.0
|
|
| 18 |
+six==1.12.0
|
| 1 |
+[tox]
|
|
| 2 |
+envlist = py35,py36,py37
|
|
| 3 |
+skip_missing_interpreters = true
|
|
| 4 |
+ |
|
| 5 |
+[testenv]
|
|
| 6 |
+commands = pytest {posargs}
|
|
| 7 |
+deps =
|
|
| 8 |
+ -rtools/requirements.txt
|
|
| 9 |
+ -rtools/dev-requirements.txt
|
|
| 10 |
+ -rtools/plugin-requirements.txt
|
|
| 11 |
+passenv =
|
|
| 12 |
+ BST_FORCE_BACKEND
|
|
| 13 |
+ GI_TYPELIB_PATH
|
|
| 14 |
+ INTEGRATION_CACHE
|
|
| 15 |
+ |
|
| 16 |
+[testenv:lint]
|
|
| 17 |
+commands =
|
|
| 18 |
+ pycodestyle
|
|
| 19 |
+ pylint buildstream
|
|
| 20 |
+deps =
|
|
| 21 |
+ -rtools/requirements.txt
|
|
| 22 |
+ -rtools/dev-requirements.txt
|
|
| 23 |
+ -rtools/plugin-requirements.txt
|
|
| 24 |
+ |
|
| 25 |
+[testenv:docs]
|
|
| 26 |
+commands =
|
|
| 27 |
+ make -C doc
|
|
| 28 |
+# Currently sphinx_rtd_theme does not support Sphinx >1.8, this breaks search functionality
|
|
| 29 |
+deps =
|
|
| 30 |
+ sphinx==1.7.9
|
|
| 31 |
+ sphinx-click
|
|
| 32 |
+ sphinx_rtd_theme
|
|
| 33 |
+ -rtools/requirements.txt
|
|
| 34 |
+ -rtools/plugin-requirements.txt
|
|
| 35 |
+passenv =
|
|
| 36 |
+ BST_FORCE_SESSION_REBUILD
|
|
| 37 |
+ BST_SOURCE_CACHE
|
|
| 38 |
+ HOME
|
|
| 39 |
+ LANG
|
|
| 40 |
+ LC_ALL
|
|
| 41 |
+whitelist_externals =
|
|
| 42 |
+ make
|
