Tom Pollard pushed to branch tpollard/566 at BuildStream / buildstream
Commits:
-
1cbd9a73
by Chandan Singh at 2019-01-02T19:39:58Z
-
6b0cb5f3
by Chandan Singh at 2019-01-02T19:41:21Z
-
9d2d1d4f
by Chandan Singh at 2019-01-03T03:31:52Z
-
8ae04283
by Tristan Van Berkom at 2019-01-03T03:31:52Z
-
e7e2a5c1
by Chandan Singh at 2019-01-03T03:31:52Z
-
afa0a369
by Chandan Singh at 2019-01-03T03:31:52Z
-
3fae3446
by Chandan Singh at 2019-01-03T03:44:17Z
-
1f7bc655
by Chandan Singh at 2019-01-03T03:44:17Z
-
669b55b0
by Tristan Van Berkom at 2019-01-03T15:47:27Z
-
ca547f19
by Chandan Singh at 2019-01-03T19:34:49Z
-
01c3761c
by Chandan Singh at 2019-01-03T20:03:32Z
-
7850e4e2
by Tristan Van Berkom at 2019-01-03T20:55:00Z
-
3c9d592d
by Tristan Van Berkom at 2019-01-03T20:55:00Z
-
feba48e7
by Tristan Van Berkom at 2019-01-03T20:55:00Z
-
9caa0ea0
by Tristan Van Berkom at 2019-01-03T20:55:00Z
-
91c8b634
by Tristan Van Berkom at 2019-01-03T21:04:49Z
-
892bf99e
by Tristan Van Berkom at 2019-01-03T21:31:04Z
-
7c681c42
by Tristan Van Berkom at 2019-01-03T21:32:40Z
-
d6689ed2
by Tristan Van Berkom at 2019-01-03T22:09:02Z
-
e15278d7
by Thomas Coldrick at 2019-01-03T22:14:01Z
-
a85da591
by Tristan Van Berkom at 2019-01-03T22:36:16Z
-
cf0e7d26
by Jürg Billeter at 2019-01-04T07:07:59Z
-
0751fc8f
by Jürg Billeter at 2019-01-04T11:27:58Z
-
ebd965fe
by Chandan Singh at 2019-01-04T15:21:35Z
-
bb712886
by Chandan Singh at 2019-01-04T15:21:35Z
-
b9792837
by Chandan Singh at 2019-01-04T15:21:35Z
-
45692335
by Tristan Van Berkom at 2019-01-04T15:50:14Z
-
f42dcb54
by Chandan Singh at 2019-01-04T17:16:43Z
-
ee29f207
by Chandan Singh at 2019-01-04T17:41:02Z
-
f122cfe8
by Jürg Billeter at 2019-01-06T17:57:33Z
-
7dfb85b3
by Tristan Van Berkom at 2019-01-06T18:31:25Z
-
9913bfc6
by Tom Pollard at 2019-01-07T12:25:42Z
26 changed files:
- .gitignore
- .gitlab-ci.yml
- CONTRIBUTING.rst
- MANIFEST.in
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_frontend/app.py
- buildstream/_frontend/cli.py
- buildstream/_frontend/complete.py
- buildstream/_yaml.py
- buildstream/element.py
- + requirements/Makefile
- dev-requirements.txt → requirements/dev-requirements.in
- + requirements/dev-requirements.txt
- + requirements/plugin-requirements.in
- + requirements/plugin-requirements.txt
- + requirements/requirements.in
- + requirements/requirements.txt
- setup.cfg
- setup.py
- tests/frontend/buildcheckout.py
- − tests/frontend/project/elements/invalid-chars|<>-in-name.bst
- + tests/integration/pushbuildtrees.py
- tests/testutils/runcli.py
- tests/testutils/site.py
- + tox.ini
Changes:
... | ... | @@ -17,6 +17,7 @@ tmp |
17 | 17 |
.cache
|
18 | 18 |
.pytest_cache/
|
19 | 19 |
*.bst/
|
20 |
+.tox
|
|
20 | 21 |
|
21 | 22 |
# Pycache, in case buildstream is ran directly from within the source
|
22 | 23 |
# tree
|
1 |
-image: buildstream/testsuite-debian:9-06bab030-b848172c
|
|
1 |
+image: buildstream/testsuite-debian:9-5da27168-32c47d1c
|
|
2 | 2 |
|
3 | 3 |
cache:
|
4 | 4 |
key: "$CI_JOB_NAME-"
|
... | ... | @@ -6,49 +6,13 @@ cache: |
6 | 6 |
- cache/
|
7 | 7 |
|
8 | 8 |
stages:
|
9 |
- - prepare
|
|
10 | 9 |
- test
|
11 | 10 |
- post
|
12 | 11 |
|
13 | 12 |
variables:
|
14 | 13 |
PYTEST_ADDOPTS: "--color=yes"
|
15 | 14 |
INTEGRATION_CACHE: "${CI_PROJECT_DIR}/cache/integration-cache"
|
16 |
- TEST_COMMAND: 'python3 setup.py test --index-url invalid://uri --addopts --integration'
|
|
17 |
- |
|
18 |
-#####################################################
|
|
19 |
-# Prepare stage #
|
|
20 |
-#####################################################
|
|
21 |
- |
|
22 |
-# Create a source distribution
|
|
23 |
-#
|
|
24 |
-source_dist:
|
|
25 |
- stage: prepare
|
|
26 |
- script:
|
|
27 |
- |
|
28 |
- # Generate the source distribution tarball
|
|
29 |
- #
|
|
30 |
- - python3 setup.py sdist
|
|
31 |
- - tar -ztf dist/*
|
|
32 |
- - tarball=$(cd dist && echo $(ls *))
|
|
33 |
- |
|
34 |
- # Verify that the source distribution tarball can be installed correctly
|
|
35 |
- #
|
|
36 |
- - pip3 install dist/*.tar.gz
|
|
37 |
- - bst --version
|
|
38 |
- |
|
39 |
- # unpack tarball as `dist/buildstream` directory
|
|
40 |
- - |
|
|
41 |
- cat > dist/unpack.sh << EOF
|
|
42 |
- #!/bin/sh
|
|
43 |
- tar -zxf ${tarball}
|
|
44 |
- mv ${tarball%.tar.gz} buildstream
|
|
45 |
- EOF
|
|
46 |
- |
|
47 |
- # Make our helpers executable
|
|
48 |
- - chmod +x dist/unpack.sh
|
|
49 |
- artifacts:
|
|
50 |
- paths:
|
|
51 |
- - dist/
|
|
15 |
+ TEST_COMMAND: "tox -- --color=yes --integration"
|
|
52 | 16 |
|
53 | 17 |
|
54 | 18 |
#####################################################
|
... | ... | @@ -68,22 +32,17 @@ source_dist: |
68 | 32 |
- mount
|
69 | 33 |
- df -h
|
70 | 34 |
|
71 |
- # Unpack
|
|
72 |
- - cd dist && ./unpack.sh
|
|
73 |
- - cd buildstream
|
|
74 |
- |
|
75 | 35 |
script:
|
76 | 36 |
- useradd -Um buildstream
|
77 | 37 |
- chown -R buildstream:buildstream .
|
78 | 38 |
|
79 |
- # Run the tests from the source distribution, We run as a simple
|
|
80 |
- # user to test for permission issues
|
|
39 |
+ # Run the tests as a simple user to test for permission issues
|
|
81 | 40 |
- su buildstream -c "${TEST_COMMAND}"
|
82 | 41 |
|
83 | 42 |
after_script:
|
84 | 43 |
# Collect our reports
|
85 | 44 |
- mkdir -p ${COVERAGE_DIR}
|
86 |
- - cp dist/buildstream/.coverage ${COVERAGE_DIR}/coverage."${CI_JOB_NAME}"
|
|
45 |
+ - cp .coverage ${COVERAGE_DIR}/coverage."${CI_JOB_NAME}"
|
|
87 | 46 |
except:
|
88 | 47 |
- schedules
|
89 | 48 |
artifacts:
|
... | ... | @@ -91,19 +50,19 @@ source_dist: |
91 | 50 |
- ${COVERAGE_DIR}
|
92 | 51 |
|
93 | 52 |
tests-debian-9:
|
94 |
- image: buildstream/testsuite-debian:9-06bab030-b848172c
|
|
53 |
+ image: buildstream/testsuite-debian:9-5da27168-32c47d1c
|
|
95 | 54 |
<<: *tests
|
96 | 55 |
|
97 | 56 |
tests-fedora-27:
|
98 |
- image: buildstream/testsuite-fedora:27-06bab030-b848172c
|
|
57 |
+ image: buildstream/testsuite-fedora:27-5da27168-32c47d1c
|
|
99 | 58 |
<<: *tests
|
100 | 59 |
|
101 | 60 |
tests-fedora-28:
|
102 |
- image: buildstream/testsuite-fedora:28-06bab030-b848172c
|
|
61 |
+ image: buildstream/testsuite-fedora:28-5da27168-32c47d1c
|
|
103 | 62 |
<<: *tests
|
104 | 63 |
|
105 | 64 |
tests-ubuntu-18.04:
|
106 |
- image: buildstream/testsuite-ubuntu:18.04-06bab030-b848172c
|
|
65 |
+ image: buildstream/testsuite-ubuntu:18.04-5da27168-32c47d1c
|
|
107 | 66 |
<<: *tests
|
108 | 67 |
|
109 | 68 |
overnight-fedora-28-aarch64:
|
... | ... | @@ -120,7 +79,7 @@ overnight-fedora-28-aarch64: |
120 | 79 |
tests-unix:
|
121 | 80 |
# Use fedora here, to a) run a test on fedora and b) ensure that we
|
122 | 81 |
# can get rid of ostree - this is not possible with debian-8
|
123 |
- image: buildstream/testsuite-fedora:27-06bab030-b848172c
|
|
82 |
+ image: buildstream/testsuite-fedora:27-5da27168-32c47d1c
|
|
124 | 83 |
<<: *tests
|
125 | 84 |
variables:
|
126 | 85 |
BST_FORCE_BACKEND: "unix"
|
... | ... | @@ -140,7 +99,7 @@ tests-unix: |
140 | 99 |
|
141 | 100 |
tests-fedora-missing-deps:
|
142 | 101 |
# Ensure that tests behave nicely while missing bwrap and ostree
|
143 |
- image: buildstream/testsuite-fedora:28-06bab030-b848172c
|
|
102 |
+ image: buildstream/testsuite-fedora:28-5da27168-32c47d1c
|
|
144 | 103 |
<<: *tests
|
145 | 104 |
|
146 | 105 |
script:
|
... | ... | @@ -155,23 +114,28 @@ tests-fedora-missing-deps: |
155 | 114 |
|
156 | 115 |
- ${TEST_COMMAND}
|
157 | 116 |
|
117 |
+# Lint separately from testing
|
|
118 |
+lint:
|
|
119 |
+ stage: test
|
|
120 |
+ |
|
121 |
+ before_script:
|
|
122 |
+ # Diagnostics
|
|
123 |
+ - python3 --version
|
|
124 |
+ |
|
125 |
+ script:
|
|
126 |
+ - tox -e lint
|
|
127 |
+ except:
|
|
128 |
+ - schedules
|
|
158 | 129 |
|
159 | 130 |
# Automatically build documentation for every commit, we want to know
|
160 | 131 |
# if building documentation fails even if we're not deploying it.
|
161 |
-# Note: We still do not enforce a consistent installation of python3-sphinx,
|
|
162 |
-# as it will significantly grow the backing image.
|
|
163 | 132 |
docs:
|
164 | 133 |
stage: test
|
134 |
+ variables:
|
|
135 |
+ BST_FORCE_SESSION_REBUILD: 1
|
|
165 | 136 |
script:
|
166 |
- - export BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources"
|
|
167 |
- # Currently sphinx_rtd_theme does not support Sphinx >1.8, this breaks search functionality
|
|
168 |
- - pip3 install sphinx==1.7.9
|
|
169 |
- - pip3 install sphinx-click
|
|
170 |
- - pip3 install sphinx_rtd_theme
|
|
171 |
- - cd dist && ./unpack.sh && cd buildstream
|
|
172 |
- - make BST_FORCE_SESSION_REBUILD=1 -C doc
|
|
173 |
- - cd ../..
|
|
174 |
- - mv dist/buildstream/doc/build/html public
|
|
137 |
+ - env BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources" tox -e docs
|
|
138 |
+ - mv doc/build/html public
|
|
175 | 139 |
except:
|
176 | 140 |
- schedules
|
177 | 141 |
artifacts:
|
... | ... | @@ -191,7 +155,8 @@ docs: |
191 | 155 |
scheduler:
|
192 | 156 |
fetchers: 2
|
193 | 157 |
EOF
|
194 |
- - (cd dist && ./unpack.sh && cd buildstream && pip3 install .)
|
|
158 |
+ - pip3 install -r requirements/requirements.txt -r requirements/plugin-requirements.txt
|
|
159 |
+ - pip3 install --no-index .
|
|
195 | 160 |
- pip3 install --user -e ${BST_EXT_URL}@${BST_EXT_REF}#egg=bst_ext
|
196 | 161 |
- git clone https://gitlab.com/freedesktop-sdk/freedesktop-sdk.git
|
197 | 162 |
- git -C freedesktop-sdk checkout ${FD_SDK_REF}
|
... | ... | @@ -274,12 +239,12 @@ coverage: |
274 | 239 |
stage: post
|
275 | 240 |
coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/'
|
276 | 241 |
script:
|
277 |
- - cd dist && ./unpack.sh && cd buildstream
|
|
242 |
+ - pip3 install -r requirements/requirements.txt -r requirements/dev-requirements.txt
|
|
278 | 243 |
- pip3 install --no-index .
|
279 | 244 |
- mkdir report
|
280 | 245 |
- cd report
|
281 |
- - cp ../../../coverage-unix/coverage.* .
|
|
282 |
- - cp ../../../coverage-linux/coverage.* .
|
|
246 |
+ - cp ../coverage-unix/coverage.* .
|
|
247 |
+ - cp ../coverage-linux/coverage.* .
|
|
283 | 248 |
- ls coverage.*
|
284 | 249 |
- coverage combine --rcfile=../.coveragerc -a coverage.*
|
285 | 250 |
- coverage report --rcfile=../.coveragerc -m
|
... | ... | @@ -288,7 +253,6 @@ coverage: |
288 | 253 |
- tests-fedora-27
|
289 | 254 |
- tests-fedora-28
|
290 | 255 |
- tests-unix
|
291 |
- - source_dist
|
|
292 | 256 |
except:
|
293 | 257 |
- schedules
|
294 | 258 |
|
... | ... | @@ -297,7 +261,6 @@ coverage: |
297 | 261 |
pages:
|
298 | 262 |
stage: post
|
299 | 263 |
dependencies:
|
300 |
- - source_dist
|
|
301 | 264 |
- docs
|
302 | 265 |
variables:
|
303 | 266 |
ACME_DIR: public/.well-known/acme-challenge
|
... | ... | @@ -1222,27 +1222,13 @@ For further information about using the reStructuredText with sphinx, please see |
1222 | 1222 |
|
1223 | 1223 |
Building Docs
|
1224 | 1224 |
~~~~~~~~~~~~~
|
1225 |
-The documentation build is not integrated into the ``setup.py`` and is
|
|
1226 |
-difficult (or impossible) to do so, so there is a little bit of setup
|
|
1227 |
-you need to take care of first.
|
|
1228 |
- |
|
1229 |
-Before you can build the BuildStream documentation yourself, you need
|
|
1230 |
-to first install ``sphinx`` along with some additional plugins and dependencies,
|
|
1231 |
-using pip or some other mechanism::
|
|
1232 |
- |
|
1233 |
- # Install sphinx
|
|
1234 |
- pip3 install --user sphinx
|
|
1235 |
- |
|
1236 |
- # Install some sphinx extensions
|
|
1237 |
- pip3 install --user sphinx-click
|
|
1238 |
- pip3 install --user sphinx_rtd_theme
|
|
1239 |
- |
|
1240 |
- # Additional optional dependencies required
|
|
1241 |
- pip3 install --user arpy
|
|
1225 |
+Before you can build the docs, you will end to ensure that you have installed
|
|
1226 |
+the required :ref:`buid dependencies <contributing_build_deps>` as mentioned
|
|
1227 |
+in the testing section above.
|
|
1242 | 1228 |
|
1243 | 1229 |
To build the documentation, just run the following::
|
1244 | 1230 |
|
1245 |
- make -C doc
|
|
1231 |
+ tox -e docs
|
|
1246 | 1232 |
|
1247 | 1233 |
This will give you a ``doc/build/html`` directory with the html docs which
|
1248 | 1234 |
you can view in your browser locally to test.
|
... | ... | @@ -1260,9 +1246,10 @@ will make the docs build reuse already downloaded sources:: |
1260 | 1246 |
|
1261 | 1247 |
export BST_SOURCE_CACHE=~/.cache/buildstream/sources
|
1262 | 1248 |
|
1263 |
-To force rebuild session html while building the doc, simply build the docs like this::
|
|
1249 |
+To force rebuild session html while building the doc, simply run `tox` with the
|
|
1250 |
+``BST_FORCE_SESSION_REBUILD`` environment variable set, like so::
|
|
1264 | 1251 |
|
1265 |
- make BST_FORCE_SESSION_REBUILD=1 -C doc
|
|
1252 |
+ env BST_FORCE_SESSION_REBUILD=1 tox -e docs
|
|
1266 | 1253 |
|
1267 | 1254 |
|
1268 | 1255 |
Man pages
|
... | ... | @@ -1468,57 +1455,123 @@ regenerate them locally in order to build the docs. |
1468 | 1455 |
|
1469 | 1456 |
Testing
|
1470 | 1457 |
-------
|
1471 |
-BuildStream uses pytest for regression tests and testing out
|
|
1472 |
-the behavior of newly added components.
|
|
1458 |
+BuildStream uses `tox <https://tox.readthedocs.org/>`_ as a frontend to run the
|
|
1459 |
+tests which are implemented using `pytest <https://pytest.org/>`_. We use
|
|
1460 |
+pytest for regression tests and testing out the behavior of newly added
|
|
1461 |
+components.
|
|
1473 | 1462 |
|
1474 | 1463 |
The elaborate documentation for pytest can be found here: http://doc.pytest.org/en/latest/contents.html
|
1475 | 1464 |
|
1476 | 1465 |
Don't get lost in the docs if you don't need to, follow existing examples instead.
|
1477 | 1466 |
|
1478 | 1467 |
|
1468 |
+.. _contributing_build_deps:
|
|
1469 |
+ |
|
1470 |
+Installing build dependencies
|
|
1471 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
1472 |
+Some of BuildStream's dependencies have non-python build dependencies. When
|
|
1473 |
+running tests with ``tox``, you will first need to install these dependencies.
|
|
1474 |
+Exact steps to install these will depend on your oprtation systemm. Commands
|
|
1475 |
+for installing them for some common distributions are lised below.
|
|
1476 |
+ |
|
1477 |
+For Fedora-based systems::
|
|
1478 |
+ |
|
1479 |
+ dnf install gcc pkg-config python3-devel cairo-gobject-devel glib2-devel gobject-introspection-devel
|
|
1480 |
+ |
|
1481 |
+ |
|
1482 |
+For Debian-based systems::
|
|
1483 |
+ |
|
1484 |
+ apt install gcc pkg-config python3-dev libcairo2-dev libgirepository1.0-dev
|
|
1485 |
+ |
|
1486 |
+ |
|
1479 | 1487 |
Running tests
|
1480 | 1488 |
~~~~~~~~~~~~~
|
1481 |
-To run the tests, just type::
|
|
1489 |
+To run the tests, simply navigate to the toplevel directory of your BuildStream
|
|
1490 |
+checkout and run::
|
|
1491 |
+ |
|
1492 |
+ tox
|
|
1493 |
+ |
|
1494 |
+By default, the test suite will be run against every supported python version
|
|
1495 |
+found on your host. If you have multiple python versions installed, you may
|
|
1496 |
+want to run tests against only one version and you can do that using the ``-e``
|
|
1497 |
+option when running tox::
|
|
1498 |
+ |
|
1499 |
+ tox -e py37
|
|
1500 |
+ |
|
1501 |
+Linting is performed separately from testing. In order to run the linting step which
|
|
1502 |
+consists of running the ``pycodestyle`` and ``pylint`` tools, run the following::
|
|
1503 |
+ |
|
1504 |
+ tox -e lint
|
|
1482 | 1505 |
|
1483 |
- ./setup.py test
|
|
1506 |
+.. tip::
|
|
1484 | 1507 |
|
1485 |
-At the toplevel.
|
|
1508 |
+ The project specific pylint and pycodestyle configurations are stored in the
|
|
1509 |
+ toplevel buildstream directory in the ``.pylintrc`` file and ``setup.cfg`` files
|
|
1510 |
+ respectively. These configurations can be interesting to use with IDEs and
|
|
1511 |
+ other developer tooling.
|
|
1486 | 1512 |
|
1487 |
-When debugging a test, it can be desirable to see the stdout
|
|
1488 |
-and stderr generated by a test, to do this use the ``--addopts``
|
|
1489 |
-function to feed arguments to pytest as such::
|
|
1513 |
+The output of all failing tests will always be printed in the summary, but
|
|
1514 |
+if you want to observe the stdout and stderr generated by a passing test,
|
|
1515 |
+you can pass the ``-s`` option to pytest as such::
|
|
1490 | 1516 |
|
1491 |
- ./setup.py test --addopts -s
|
|
1517 |
+ tox -- -s
|
|
1518 |
+ |
|
1519 |
+.. tip::
|
|
1520 |
+ |
|
1521 |
+ The ``-s`` option is `a pytest option <https://docs.pytest.org/latest/usage.html>`_.
|
|
1522 |
+ |
|
1523 |
+ Any options specified before the ``--`` separator are consumed by ``tox``,
|
|
1524 |
+ and any options after the ``--`` separator will be passed along to pytest.
|
|
1492 | 1525 |
|
1493 | 1526 |
You can always abort on the first failure by running::
|
1494 | 1527 |
|
1495 |
- ./setup.py test --addopts -x
|
|
1528 |
+ tox -- -x
|
|
1496 | 1529 |
|
1497 | 1530 |
If you want to run a specific test or a group of tests, you
|
1498 | 1531 |
can specify a prefix to match. E.g. if you want to run all of
|
1499 | 1532 |
the frontend tests you can do::
|
1500 | 1533 |
|
1501 |
- ./setup.py test --addopts 'tests/frontend/'
|
|
1534 |
+ tox -- tests/frontend/
|
|
1502 | 1535 |
|
1503 | 1536 |
Specific tests can be chosen by using the :: delimeter after the test module.
|
1504 | 1537 |
If you wanted to run the test_build_track test within frontend/buildtrack.py you could do::
|
1505 | 1538 |
|
1506 |
- ./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track'
|
|
1539 |
+ tox -- tests/frontend/buildtrack.py::test_build_track
|
|
1507 | 1540 |
|
1508 | 1541 |
We also have a set of slow integration tests that are disabled by
|
1509 | 1542 |
default - you will notice most of them marked with SKIP in the pytest
|
1510 | 1543 |
output. To run them, you can use::
|
1511 | 1544 |
|
1512 |
- ./setup.py test --addopts '--integration'
|
|
1545 |
+ tox -- --integration
|
|
1513 | 1546 |
|
1514 |
-By default, buildstream also runs pylint on all files. Should you want
|
|
1515 |
-to run just pylint (these checks are a lot faster), you can do so
|
|
1516 |
-with::
|
|
1547 |
+In case BuildStream's dependencies were updated since you last ran the
|
|
1548 |
+tests, you might see some errors like
|
|
1549 |
+``pytest: error: unrecognized arguments: --codestyle``. If this happens, you
|
|
1550 |
+will need to force ``tox`` to recreate the test environment(s). To do so, you
|
|
1551 |
+can run ``tox`` with ``-r`` or ``--recreate`` option.
|
|
1552 |
+ |
|
1553 |
+.. note::
|
|
1554 |
+ |
|
1555 |
+ By default, we do not allow use of site packages in our ``tox``
|
|
1556 |
+ confguration to enable running the tests in an isolated environment.
|
|
1557 |
+ If you need to enable use of site packages for whatever reason, you can
|
|
1558 |
+ do so by passing the ``--sitepackages`` option to ``tox``. Also, you will
|
|
1559 |
+ not need to install any of the build dependencies mentioned above if you
|
|
1560 |
+ use this approach.
|
|
1561 |
+ |
|
1562 |
+.. note::
|
|
1517 | 1563 |
|
1518 |
- ./setup.py test --addopts '-m pylint'
|
|
1564 |
+ While using ``tox`` is practical for developers running tests in
|
|
1565 |
+ more predictable execution environments, it is still possible to
|
|
1566 |
+ execute the test suite against a specific installation environment
|
|
1567 |
+ using pytest directly::
|
|
1519 | 1568 |
|
1520 |
-Alternatively, any IDE plugin that uses pytest should automatically
|
|
1521 |
-detect the ``.pylintrc`` in the project's root directory.
|
|
1569 |
+ ./setup.py test
|
|
1570 |
+ |
|
1571 |
+ Specific options can be passed to ``pytest`` using the ``--addopts``
|
|
1572 |
+ option::
|
|
1573 |
+ |
|
1574 |
+ ./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track'
|
|
1522 | 1575 |
|
1523 | 1576 |
|
1524 | 1577 |
Adding tests
|
... | ... | @@ -1683,10 +1736,8 @@ obtain profiles:: |
1683 | 1736 |
ForceCommand BST_PROFILE=artifact-receive cd /tmp && bst-artifact-receive --pull-url https://example.com/ /home/artifacts/artifacts
|
1684 | 1737 |
|
1685 | 1738 |
|
1686 |
-The MANIFEST.in and setup.py
|
|
1687 |
-----------------------------
|
|
1688 |
-When adding a dependency to BuildStream, it's important to update the setup.py accordingly.
|
|
1689 |
- |
|
1739 |
+Managing data files
|
|
1740 |
+-------------------
|
|
1690 | 1741 |
When adding data files which need to be discovered at runtime by BuildStream, update setup.py accordingly.
|
1691 | 1742 |
|
1692 | 1743 |
When adding data files for the purpose of docs or tests, or anything that is not covered by
|
... | ... | @@ -1696,3 +1747,23 @@ At any time, running the following command to create a source distribution shoul |
1696 | 1747 |
creating a tarball which contains everything we want it to include::
|
1697 | 1748 |
|
1698 | 1749 |
./setup.py sdist
|
1750 |
+ |
|
1751 |
+ |
|
1752 |
+Updating BuildStream's Python dependencies
|
|
1753 |
+------------------------------------------
|
|
1754 |
+BuildStream's Python dependencies are listed in multiple
|
|
1755 |
+`requirements files <https://pip.readthedocs.io/en/latest/reference/pip_install/#requirements-file-format>`
|
|
1756 |
+present in the ``requirements`` directory.
|
|
1757 |
+ |
|
1758 |
+All ``.txt`` files in this directory are generated from the corresponding
|
|
1759 |
+``.in`` file, and each ``.in`` file represents a set of dependencies. For
|
|
1760 |
+example, ``requirements.in`` contains all runtime dependencies of BuildStream.
|
|
1761 |
+``requirements.txt`` is generated from it, and contains pinned versions of all
|
|
1762 |
+runtime dependencies (including transitive dependencies) of BuildStream.
|
|
1763 |
+ |
|
1764 |
+When adding a new dependency to BuildStream, or updating existing dependencies,
|
|
1765 |
+it is important to update the appropriate requirements file accordingly. After
|
|
1766 |
+changing the ``.in`` file, run the following to update the matching ``.txt``
|
|
1767 |
+file::
|
|
1768 |
+ |
|
1769 |
+ make -C requirements
|
... | ... | @@ -24,6 +24,7 @@ recursive-include doc/sessions *.run |
24 | 24 |
# Tests
|
25 | 25 |
recursive-include tests *
|
26 | 26 |
include conftest.py
|
27 |
+include tox.ini
|
|
27 | 28 |
include .coveragerc
|
28 | 29 |
include .pylintrc
|
29 | 30 |
|
... | ... | @@ -31,7 +32,12 @@ include .pylintrc |
31 | 32 |
recursive-include buildstream/_protos *.proto
|
32 | 33 |
|
33 | 34 |
# Requirements files
|
34 |
-include dev-requirements.txt
|
|
35 |
+include requirements/requirements.in
|
|
36 |
+include requirements/requirements.txt
|
|
37 |
+include requirements/dev-requirements.in
|
|
38 |
+include requirements/dev-requirements.txt
|
|
39 |
+include requirements/plugin-requirements.in
|
|
40 |
+include requirements/plugin-requirements.txt
|
|
35 | 41 |
|
36 | 42 |
# Versioneer
|
37 | 43 |
include versioneer.py
|
... | ... | @@ -74,6 +74,7 @@ class ArtifactCache(): |
74 | 74 |
|
75 | 75 |
self._has_fetch_remotes = False
|
76 | 76 |
self._has_push_remotes = False
|
77 |
+ self._has_partial_push_remotes = False
|
|
77 | 78 |
|
78 | 79 |
os.makedirs(self.extractdir, exist_ok=True)
|
79 | 80 |
|
... | ... | @@ -398,6 +399,8 @@ class ArtifactCache(): |
398 | 399 |
self._has_fetch_remotes = True
|
399 | 400 |
if remote_spec.push:
|
400 | 401 |
self._has_push_remotes = True
|
402 |
+ if remote_spec.partial_push:
|
|
403 |
+ self._has_partial_push_remotes = True
|
|
401 | 404 |
|
402 | 405 |
remotes[remote_spec.url] = CASRemote(remote_spec)
|
403 | 406 |
|
... | ... | @@ -596,6 +599,31 @@ class ArtifactCache(): |
596 | 599 |
remotes_for_project = self._remotes[element._get_project()]
|
597 | 600 |
return any(remote.spec.push for remote in remotes_for_project)
|
598 | 601 |
|
602 |
+ # has_partial_push_remotes():
|
|
603 |
+ #
|
|
604 |
+ # Check whether any remote repositories are available for pushing
|
|
605 |
+ # non-complete artifacts
|
|
606 |
+ #
|
|
607 |
+ # Args:
|
|
608 |
+ # element (Element): The Element to check
|
|
609 |
+ #
|
|
610 |
+ # Returns:
|
|
611 |
+ # (bool): True if any remote repository is configured for optional
|
|
612 |
+ # partial pushes, False otherwise
|
|
613 |
+ #
|
|
614 |
+ def has_partial_push_remotes(self, *, element=None):
|
|
615 |
+ # If there's no partial push remotes available, we can't partial push at all
|
|
616 |
+ if not self._has_partial_push_remotes:
|
|
617 |
+ return False
|
|
618 |
+ elif element is None:
|
|
619 |
+ # At least one remote is set to allow partial pushes
|
|
620 |
+ return True
|
|
621 |
+ else:
|
|
622 |
+ # Check whether the specified element's project has push remotes configured
|
|
623 |
+ # to not accept partial artifact pushes
|
|
624 |
+ remotes_for_project = self._remotes[element._get_project()]
|
|
625 |
+ return any(remote.spec.partial_push for remote in remotes_for_project)
|
|
626 |
+ |
|
599 | 627 |
# push():
|
600 | 628 |
#
|
601 | 629 |
# Push committed artifact to remote repository.
|
... | ... | @@ -603,6 +631,8 @@ class ArtifactCache(): |
603 | 631 |
# Args:
|
604 | 632 |
# element (Element): The Element whose artifact is to be pushed
|
605 | 633 |
# keys (list): The cache keys to use
|
634 |
+ # partial(bool): If the artifact is cached in a partial state
|
|
635 |
+ # subdir(string): Optional subdir to not push
|
|
606 | 636 |
#
|
607 | 637 |
# Returns:
|
608 | 638 |
# (bool): True if any remote was updated, False if no pushes were required
|
... | ... | @@ -610,12 +640,25 @@ class ArtifactCache(): |
610 | 640 |
# Raises:
|
611 | 641 |
# (ArtifactError): if there was an error
|
612 | 642 |
#
|
613 |
- def push(self, element, keys):
|
|
643 |
+ def push(self, element, keys, partial=False, subdir=None):
|
|
614 | 644 |
refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
|
615 | 645 |
|
616 | 646 |
project = element._get_project()
|
617 | 647 |
|
618 |
- push_remotes = [r for r in self._remotes[project] if r.spec.push]
|
|
648 |
+ push_remotes = []
|
|
649 |
+ partial_remotes = []
|
|
650 |
+ |
|
651 |
+ # Create list of remotes to push to, given current element and partial push config
|
|
652 |
+ if not partial:
|
|
653 |
+ push_remotes = [r for r in self._remotes[project] if (r.spec.push and not r.spec.partial_push)]
|
|
654 |
+ |
|
655 |
+ if self._has_partial_push_remotes:
|
|
656 |
+ # Create a specific list of the remotes expecting the artifact to be push in a partial
|
|
657 |
+ # state. This list needs to be pushed in a partial state, without the optional subdir if
|
|
658 |
+ # exists locally. No need to attempt pushing a partial artifact to a remote that is queued to
|
|
659 |
+ # to also recieve a full artifact
|
|
660 |
+ partial_remotes = [r for r in self._remotes[project] if (r.spec.partial_push and r.spec.push) and
|
|
661 |
+ r not in push_remotes]
|
|
619 | 662 |
|
620 | 663 |
pushed = False
|
621 | 664 |
|
... | ... | @@ -632,6 +675,19 @@ class ArtifactCache(): |
632 | 675 |
remote.spec.url, element._get_brief_display_key()
|
633 | 676 |
))
|
634 | 677 |
|
678 |
+ for remote in partial_remotes:
|
|
679 |
+ remote.init()
|
|
680 |
+ display_key = element._get_brief_display_key()
|
|
681 |
+ element.status("Pushing partial artifact {} -> {}".format(display_key, remote.spec.url))
|
|
682 |
+ |
|
683 |
+ if self.cas.push(refs, remote, subdir=subdir):
|
|
684 |
+ element.info("Pushed partial artifact {} -> {}".format(display_key, remote.spec.url))
|
|
685 |
+ pushed = True
|
|
686 |
+ else:
|
|
687 |
+ element.info("Remote ({}) already has {} partial cached".format(
|
|
688 |
+ remote.spec.url, element._get_brief_display_key()
|
|
689 |
+ ))
|
|
690 |
+ |
|
635 | 691 |
return pushed
|
636 | 692 |
|
637 | 693 |
# pull():
|
... | ... | @@ -659,14 +715,23 @@ class ArtifactCache(): |
659 | 715 |
element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
|
660 | 716 |
|
661 | 717 |
if self.cas.pull(ref, remote, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
|
662 |
- element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
663 | 718 |
if subdir:
|
664 |
- # Attempt to extract subdir into artifact extract dir if it already exists
|
|
665 |
- # without containing the subdir. If the respective artifact extract dir does not
|
|
666 |
- # exist a complete extraction will complete.
|
|
667 |
- self.extract(element, key, subdir)
|
|
668 |
- # no need to pull from additional remotes
|
|
669 |
- return True
|
|
719 |
+ if not self.contains_subdir_artifact(element, key, subdir):
|
|
720 |
+ # The pull was expecting the specific subdit to be present, attempt
|
|
721 |
+ # to find it in other available remotes
|
|
722 |
+ element.info("Pulled partial artifact {} <- {}. Attempting to retrieve {} from remotes"
|
|
723 |
+ .format(display_key, remote.spec.url, subdir))
|
|
724 |
+ else:
|
|
725 |
+ element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
726 |
+ # Attempt to extract subdir into artifact extract dir if it already exists
|
|
727 |
+ # without containing the subdir. If the respective artifact extract dir does not
|
|
728 |
+ # exist a complete extraction will complete.
|
|
729 |
+ self.extract(element, key, subdir)
|
|
730 |
+ # no need to pull from additional remotes
|
|
731 |
+ return True
|
|
732 |
+ else:
|
|
733 |
+ element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
734 |
+ return True
|
|
670 | 735 |
else:
|
671 | 736 |
element.info("Remote ({}) does not have {} cached".format(
|
672 | 737 |
remote.spec.url, element._get_brief_display_key()
|
... | ... | @@ -45,7 +45,8 @@ from .. import _yaml |
45 | 45 |
_MAX_PAYLOAD_BYTES = 1024 * 1024
|
46 | 46 |
|
47 | 47 |
|
48 |
-class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
|
|
48 |
+class CASRemoteSpec(namedtuple('CASRemoteSpec',
|
|
49 |
+ 'url push partial_push server_cert client_key client_cert instance_name')):
|
|
49 | 50 |
|
50 | 51 |
# _new_from_config_node
|
51 | 52 |
#
|
... | ... | @@ -53,9 +54,13 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key |
53 | 54 |
#
|
54 | 55 |
@staticmethod
|
55 | 56 |
def _new_from_config_node(spec_node, basedir=None):
|
56 |
- _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
|
|
57 |
+ _yaml.node_validate(spec_node,
|
|
58 |
+ ['url', 'push', 'allow-partial-push', 'server-cert', 'client-key',
|
|
59 |
+ 'client-cert', 'instance_name'])
|
|
57 | 60 |
url = _yaml.node_get(spec_node, str, 'url')
|
58 | 61 |
push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
|
62 |
+ partial_push = _yaml.node_get(spec_node, bool, 'allow-partial-push', default_value=False)
|
|
63 |
+ |
|
59 | 64 |
if not url:
|
60 | 65 |
provenance = _yaml.node_get_provenance(spec_node, 'url')
|
61 | 66 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
... | ... | @@ -85,10 +90,10 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key |
85 | 90 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
86 | 91 |
"{}: 'client-cert' was specified without 'client-key'".format(provenance))
|
87 | 92 |
|
88 |
- return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
|
|
93 |
+ return CASRemoteSpec(url, push, partial_push, server_cert, client_key, client_cert, instance_name)
|
|
89 | 94 |
|
90 | 95 |
|
91 |
-CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
|
|
96 |
+CASRemoteSpec.__new__.__defaults__ = (False, None, None, None, None)
|
|
92 | 97 |
|
93 | 98 |
|
94 | 99 |
class BlobNotFound(CASError):
|
... | ... | @@ -283,34 +288,45 @@ class CASCache(): |
283 | 288 |
# (bool): True if pull was successful, False if ref was not available
|
284 | 289 |
#
|
285 | 290 |
def pull(self, ref, remote, *, progress=None, subdir=None, excluded_subdirs=None):
|
286 |
- try:
|
|
287 |
- remote.init()
|
|
288 | 291 |
|
289 |
- request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
|
|
290 |
- request.key = ref
|
|
291 |
- response = remote.ref_storage.GetReference(request)
|
|
292 |
+ tree_found = False
|
|
292 | 293 |
|
293 |
- tree = remote_execution_pb2.Digest()
|
|
294 |
- tree.hash = response.digest.hash
|
|
295 |
- tree.size_bytes = response.digest.size_bytes
|
|
294 |
+ while True:
|
|
295 |
+ try:
|
|
296 |
+ if not tree_found:
|
|
297 |
+ remote.init()
|
|
296 | 298 |
|
297 |
- # Check if the element artifact is present, if so just fetch the subdir.
|
|
298 |
- if subdir and os.path.exists(self.objpath(tree)):
|
|
299 |
- self._fetch_subdir(remote, tree, subdir)
|
|
300 |
- else:
|
|
301 |
- # Fetch artifact, excluded_subdirs determined in pullqueue
|
|
302 |
- self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
|
|
299 |
+ request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
|
|
300 |
+ request.key = ref
|
|
301 |
+ response = remote.ref_storage.GetReference(request)
|
|
303 | 302 |
|
304 |
- self.set_ref(ref, tree)
|
|
303 |
+ tree = remote_execution_pb2.Digest()
|
|
304 |
+ tree.hash = response.digest.hash
|
|
305 |
+ tree.size_bytes = response.digest.size_bytes
|
|
305 | 306 |
|
306 |
- return True
|
|
307 |
- except grpc.RpcError as e:
|
|
308 |
- if e.code() != grpc.StatusCode.NOT_FOUND:
|
|
309 |
- raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
|
|
310 |
- else:
|
|
311 |
- return False
|
|
312 |
- except BlobNotFound as e:
|
|
313 |
- return False
|
|
307 |
+ # Check if the element artifact is present, if so just fetch the subdir.
|
|
308 |
+ if subdir and os.path.exists(self.objpath(tree)):
|
|
309 |
+ self._fetch_subdir(remote, tree, subdir)
|
|
310 |
+ else:
|
|
311 |
+ # Fetch artifact, excluded_subdirs determined in pullqueue
|
|
312 |
+ self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
|
|
313 |
+ |
|
314 |
+ self.set_ref(ref, tree)
|
|
315 |
+ |
|
316 |
+ return True
|
|
317 |
+ except grpc.RpcError as e:
|
|
318 |
+ if e.code() != grpc.StatusCode.NOT_FOUND:
|
|
319 |
+ raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
|
|
320 |
+ else:
|
|
321 |
+ return False
|
|
322 |
+ except BlobNotFound as e:
|
|
323 |
+ if not excluded_subdirs and subdir:
|
|
324 |
+ # The remote has the top level digest but could not complete a full pull,
|
|
325 |
+ # attempt partial without the need to initialise and check for the artifact digest
|
|
326 |
+ tree_found = True
|
|
327 |
+ excluded_subdirs, subdir = subdir, excluded_subdirs
|
|
328 |
+ else:
|
|
329 |
+ return False
|
|
314 | 330 |
|
315 | 331 |
# pull_tree():
|
316 | 332 |
#
|
... | ... | @@ -355,6 +371,7 @@ class CASCache(): |
355 | 371 |
# Args:
|
356 | 372 |
# refs (list): The refs to push
|
357 | 373 |
# remote (CASRemote): The remote to push to
|
374 |
+ # subdir (string): Optional specific subdir to exempt from the push
|
|
358 | 375 |
#
|
359 | 376 |
# Returns:
|
360 | 377 |
# (bool): True if any remote was updated, False if no pushes were required
|
... | ... | @@ -362,7 +379,7 @@ class CASCache(): |
362 | 379 |
# Raises:
|
363 | 380 |
# (CASError): if there was an error
|
364 | 381 |
#
|
365 |
- def push(self, refs, remote):
|
|
382 |
+ def push(self, refs, remote, subdir=None):
|
|
366 | 383 |
skipped_remote = True
|
367 | 384 |
try:
|
368 | 385 |
for ref in refs:
|
... | ... | @@ -384,7 +401,7 @@ class CASCache(): |
384 | 401 |
# Intentionally re-raise RpcError for outer except block.
|
385 | 402 |
raise
|
386 | 403 |
|
387 |
- self._send_directory(remote, tree)
|
|
404 |
+ self._send_directory(remote, tree, excluded_dir=subdir)
|
|
388 | 405 |
|
389 | 406 |
request = buildstream_pb2.UpdateReferenceRequest(instance_name=remote.spec.instance_name)
|
390 | 407 |
request.keys.append(ref)
|
... | ... | @@ -866,10 +883,17 @@ class CASCache(): |
866 | 883 |
a += 1
|
867 | 884 |
b += 1
|
868 | 885 |
|
869 |
- def _reachable_refs_dir(self, reachable, tree, update_mtime=False):
|
|
886 |
+ def _reachable_refs_dir(self, reachable, tree, update_mtime=False, subdir=False):
|
|
870 | 887 |
if tree.hash in reachable:
|
871 | 888 |
return
|
872 | 889 |
|
890 |
+ # If looping through subdir digests, skip processing if
|
|
891 |
+ # ref path does not exist, allowing for partial objects
|
|
892 |
+ if subdir and not os.path.exists(self.objpath(tree)):
|
|
893 |
+ return
|
|
894 |
+ |
|
895 |
+ # Raises FileNotFound exception is path does not exist,
|
|
896 |
+ # which should only be entered on the top level digest
|
|
873 | 897 |
if update_mtime:
|
874 | 898 |
os.utime(self.objpath(tree))
|
875 | 899 |
|
... | ... | @@ -886,9 +910,9 @@ class CASCache(): |
886 | 910 |
reachable.add(filenode.digest.hash)
|
887 | 911 |
|
888 | 912 |
for dirnode in directory.directories:
|
889 |
- self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime)
|
|
913 |
+ self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime, subdir=True)
|
|
890 | 914 |
|
891 |
- def _required_blobs(self, directory_digest):
|
|
915 |
+ def _required_blobs(self, directory_digest, excluded_dir=None):
|
|
892 | 916 |
# parse directory, and recursively add blobs
|
893 | 917 |
d = remote_execution_pb2.Digest()
|
894 | 918 |
d.hash = directory_digest.hash
|
... | ... | @@ -907,7 +931,8 @@ class CASCache(): |
907 | 931 |
yield d
|
908 | 932 |
|
909 | 933 |
for dirnode in directory.directories:
|
910 |
- yield from self._required_blobs(dirnode.digest)
|
|
934 |
+ if dirnode.name != excluded_dir:
|
|
935 |
+ yield from self._required_blobs(dirnode.digest)
|
|
911 | 936 |
|
912 | 937 |
def _fetch_blob(self, remote, digest, stream):
|
913 | 938 |
resource_name_components = ['blobs', digest.hash, str(digest.size_bytes)]
|
... | ... | @@ -1029,6 +1054,7 @@ class CASCache(): |
1029 | 1054 |
objpath = self._ensure_blob(remote, dir_digest)
|
1030 | 1055 |
|
1031 | 1056 |
directory = remote_execution_pb2.Directory()
|
1057 |
+ |
|
1032 | 1058 |
with open(objpath, 'rb') as f:
|
1033 | 1059 |
directory.ParseFromString(f.read())
|
1034 | 1060 |
|
... | ... | @@ -1104,9 +1130,8 @@ class CASCache(): |
1104 | 1130 |
|
1105 | 1131 |
assert response.committed_size == digest.size_bytes
|
1106 | 1132 |
|
1107 |
- def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
|
|
1108 |
- required_blobs = self._required_blobs(digest)
|
|
1109 |
- |
|
1133 |
+ def _send_directory(self, remote, digest, u_uid=uuid.uuid4(), excluded_dir=None):
|
|
1134 |
+ required_blobs = self._required_blobs(digest, excluded_dir=excluded_dir)
|
|
1110 | 1135 |
missing_blobs = dict()
|
1111 | 1136 |
# Limit size of FindMissingBlobs request
|
1112 | 1137 |
for required_blobs_group in _grouper(required_blobs, 512):
|
... | ... | @@ -599,7 +599,7 @@ class App(): |
599 | 599 |
click.echo("\nDropping into an interactive shell in the failed build sandbox\n", err=True)
|
600 | 600 |
try:
|
601 | 601 |
prompt = self.shell_prompt(element)
|
602 |
- self.stream.shell(element, Scope.BUILD, prompt, isolate=True)
|
|
602 |
+ self.stream.shell(element, Scope.BUILD, prompt, isolate=True, usebuildtree=True)
|
|
603 | 603 |
except BstError as e:
|
604 | 604 |
click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
|
605 | 605 |
elif choice == 'log':
|
... | ... | @@ -719,6 +719,11 @@ def source_fetch(app, elements, deps, track_, except_, track_cross_junctions): |
719 | 719 |
deps = PipelineSelection.ALL
|
720 | 720 |
|
721 | 721 |
with app.initialized(session_name="Fetch"):
|
722 |
+ if not elements:
|
|
723 |
+ guessed_target = app.context.guess_element()
|
|
724 |
+ if guessed_target:
|
|
725 |
+ elements = (guessed_target,)
|
|
726 |
+ |
|
722 | 727 |
app.stream.fetch(elements,
|
723 | 728 |
selection=deps,
|
724 | 729 |
except_targets=except_,
|
... | ... | @@ -755,6 +760,11 @@ def source_track(app, elements, deps, except_, cross_junctions): |
755 | 760 |
all: All dependencies of all specified elements
|
756 | 761 |
"""
|
757 | 762 |
with app.initialized(session_name="Track"):
|
763 |
+ if not elements:
|
|
764 |
+ guessed_target = app.context.guess_element()
|
|
765 |
+ if guessed_target:
|
|
766 |
+ elements = (guessed_target,)
|
|
767 |
+ |
|
758 | 768 |
# Substitute 'none' for 'redirect' so that element redirections
|
759 | 769 |
# will be done
|
760 | 770 |
if deps == 'none':
|
... | ... | @@ -31,7 +31,7 @@ |
31 | 31 |
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
32 | 32 |
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
33 | 33 |
#
|
34 |
-import collections
|
|
34 |
+import collections.abc
|
|
35 | 35 |
import copy
|
36 | 36 |
import os
|
37 | 37 |
|
... | ... | @@ -218,7 +218,7 @@ def is_incomplete_argument(current_params, cmd_param): |
218 | 218 |
return True
|
219 | 219 |
if cmd_param.nargs == -1:
|
220 | 220 |
return True
|
221 |
- if isinstance(current_param_values, collections.Iterable) \
|
|
221 |
+ if isinstance(current_param_values, collections.abc.Iterable) \
|
|
222 | 222 |
and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs:
|
223 | 223 |
return True
|
224 | 224 |
return False
|
... | ... | @@ -287,7 +287,7 @@ def node_decorate_dict(filename, target, source, toplevel): |
287 | 287 |
provenance.members[key] = member
|
288 | 288 |
|
289 | 289 |
target_value = target.get(key)
|
290 |
- if isinstance(value, collections.Mapping):
|
|
290 |
+ if isinstance(value, collections.abc.Mapping):
|
|
291 | 291 |
node_decorate_dict(filename, target_value, value, toplevel)
|
292 | 292 |
elif isinstance(value, list):
|
293 | 293 |
member.elements = node_decorate_list(filename, target_value, value, toplevel)
|
... | ... | @@ -302,7 +302,7 @@ def node_decorate_list(filename, target, source, toplevel): |
302 | 302 |
target_item = target[idx]
|
303 | 303 |
element = ElementProvenance(filename, source, idx, toplevel)
|
304 | 304 |
|
305 |
- if isinstance(item, collections.Mapping):
|
|
305 |
+ if isinstance(item, collections.abc.Mapping):
|
|
306 | 306 |
node_decorate_dict(filename, target_item, item, toplevel)
|
307 | 307 |
elif isinstance(item, list):
|
308 | 308 |
element.elements = node_decorate_list(filename, target_item, item, toplevel)
|
... | ... | @@ -578,7 +578,7 @@ def is_ruamel_str(value): |
578 | 578 |
#
|
579 | 579 |
def is_composite_list(node):
|
580 | 580 |
|
581 |
- if isinstance(node, collections.Mapping):
|
|
581 |
+ if isinstance(node, collections.abc.Mapping):
|
|
582 | 582 |
has_directives = False
|
583 | 583 |
has_keys = False
|
584 | 584 |
|
... | ... | @@ -847,7 +847,7 @@ def composite_dict(target, source, path=None): |
847 | 847 |
|
848 | 848 |
target_value = target.get(key)
|
849 | 849 |
|
850 |
- if isinstance(source_value, collections.Mapping):
|
|
850 |
+ if isinstance(source_value, collections.abc.Mapping):
|
|
851 | 851 |
|
852 | 852 |
# Handle creating new dicts on target side
|
853 | 853 |
if target_value is None:
|
... | ... | @@ -862,7 +862,7 @@ def composite_dict(target, source, path=None): |
862 | 862 |
# Add a new provenance member element to the containing dict
|
863 | 863 |
target_provenance.members[key] = source_provenance.members[key]
|
864 | 864 |
|
865 |
- if not isinstance(target_value, collections.Mapping):
|
|
865 |
+ if not isinstance(target_value, collections.abc.Mapping):
|
|
866 | 866 |
raise CompositeTypeError(thispath, type(target_value), type(source_value))
|
867 | 867 |
|
868 | 868 |
# Recurse into matching dictionary
|
... | ... | @@ -923,7 +923,7 @@ RoundTripRepresenter.add_representer(SanitizedDict, |
923 | 923 |
#
|
924 | 924 |
def node_sanitize(node):
|
925 | 925 |
|
926 |
- if isinstance(node, collections.Mapping):
|
|
926 |
+ if isinstance(node, collections.abc.Mapping):
|
|
927 | 927 |
|
928 | 928 |
result = SanitizedDict()
|
929 | 929 |
|
... | ... | @@ -1067,7 +1067,7 @@ class ChainMap(collections.ChainMap): |
1067 | 1067 |
def node_chain_copy(source):
|
1068 | 1068 |
copy = ChainMap({}, source)
|
1069 | 1069 |
for key, value in source.items():
|
1070 |
- if isinstance(value, collections.Mapping):
|
|
1070 |
+ if isinstance(value, collections.abc.Mapping):
|
|
1071 | 1071 |
copy[key] = node_chain_copy(value)
|
1072 | 1072 |
elif isinstance(value, list):
|
1073 | 1073 |
copy[key] = list_chain_copy(value)
|
... | ... | @@ -1080,7 +1080,7 @@ def node_chain_copy(source): |
1080 | 1080 |
def list_chain_copy(source):
|
1081 | 1081 |
copy = []
|
1082 | 1082 |
for item in source:
|
1083 |
- if isinstance(item, collections.Mapping):
|
|
1083 |
+ if isinstance(item, collections.abc.Mapping):
|
|
1084 | 1084 |
copy.append(node_chain_copy(item))
|
1085 | 1085 |
elif isinstance(item, list):
|
1086 | 1086 |
copy.append(list_chain_copy(item))
|
... | ... | @@ -1095,7 +1095,7 @@ def list_chain_copy(source): |
1095 | 1095 |
def node_copy(source):
|
1096 | 1096 |
copy = {}
|
1097 | 1097 |
for key, value in source.items():
|
1098 |
- if isinstance(value, collections.Mapping):
|
|
1098 |
+ if isinstance(value, collections.abc.Mapping):
|
|
1099 | 1099 |
copy[key] = node_copy(value)
|
1100 | 1100 |
elif isinstance(value, list):
|
1101 | 1101 |
copy[key] = list_copy(value)
|
... | ... | @@ -1112,7 +1112,7 @@ def node_copy(source): |
1112 | 1112 |
def list_copy(source):
|
1113 | 1113 |
copy = []
|
1114 | 1114 |
for item in source:
|
1115 |
- if isinstance(item, collections.Mapping):
|
|
1115 |
+ if isinstance(item, collections.abc.Mapping):
|
|
1116 | 1116 |
copy.append(node_copy(item))
|
1117 | 1117 |
elif isinstance(item, list):
|
1118 | 1118 |
copy.append(list_copy(item))
|
... | ... | @@ -1147,7 +1147,7 @@ def node_final_assertions(node): |
1147 | 1147 |
raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE,
|
1148 | 1148 |
"{}: Attempt to override non-existing list".format(provenance))
|
1149 | 1149 |
|
1150 |
- if isinstance(value, collections.Mapping):
|
|
1150 |
+ if isinstance(value, collections.abc.Mapping):
|
|
1151 | 1151 |
node_final_assertions(value)
|
1152 | 1152 |
elif isinstance(value, list):
|
1153 | 1153 |
list_final_assertions(value)
|
... | ... | @@ -1155,7 +1155,7 @@ def node_final_assertions(node): |
1155 | 1155 |
|
1156 | 1156 |
def list_final_assertions(values):
|
1157 | 1157 |
for value in values:
|
1158 |
- if isinstance(value, collections.Mapping):
|
|
1158 |
+ if isinstance(value, collections.abc.Mapping):
|
|
1159 | 1159 |
node_final_assertions(value)
|
1160 | 1160 |
elif isinstance(value, list):
|
1161 | 1161 |
list_final_assertions(value)
|
... | ... | @@ -1800,13 +1800,19 @@ class Element(Plugin): |
1800 | 1800 |
# (bool): True if this element does not need a push job to be created
|
1801 | 1801 |
#
|
1802 | 1802 |
def _skip_push(self):
|
1803 |
+ |
|
1803 | 1804 |
if not self.__artifacts.has_push_remotes(element=self):
|
1804 | 1805 |
# No push remotes for this element's project
|
1805 | 1806 |
return True
|
1806 | 1807 |
|
1807 | 1808 |
# Do not push elements that aren't cached, or that are cached with a dangling buildtree
|
1808 |
- # artifact unless element type is expected to have an an empty buildtree directory
|
|
1809 |
- if not self._cached_buildtree():
|
|
1809 |
+ # artifact unless element type is expected to have an an empty buildtree directory. Check
|
|
1810 |
+ # that this default behaviour is not overriden via a remote configured to allow pushing
|
|
1811 |
+ # artifacts without their corresponding buildtree.
|
|
1812 |
+ if not self._cached():
|
|
1813 |
+ return True
|
|
1814 |
+ |
|
1815 |
+ if not self._cached_buildtree() and not self.__artifacts.has_partial_push_remotes(element=self):
|
|
1810 | 1816 |
return True
|
1811 | 1817 |
|
1812 | 1818 |
# Do not push tainted artifact
|
... | ... | @@ -1817,7 +1823,8 @@ class Element(Plugin): |
1817 | 1823 |
|
1818 | 1824 |
# _push():
|
1819 | 1825 |
#
|
1820 |
- # Push locally cached artifact to remote artifact repository.
|
|
1826 |
+ # Push locally cached artifact to remote artifact repository. An attempt
|
|
1827 |
+ # will be made to push partial artifacts given current config
|
|
1821 | 1828 |
#
|
1822 | 1829 |
# Returns:
|
1823 | 1830 |
# (bool): True if the remote was updated, False if it already existed
|
... | ... | @@ -1830,8 +1837,19 @@ class Element(Plugin): |
1830 | 1837 |
self.warn("Not pushing tainted artifact.")
|
1831 | 1838 |
return False
|
1832 | 1839 |
|
1833 |
- # Push all keys used for local commit
|
|
1834 |
- pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit())
|
|
1840 |
+ # Push all keys used for local commit, this could be full or partial,
|
|
1841 |
+ # given previous _skip_push() logic. If buildtree isn't cached, then
|
|
1842 |
+ # set partial push
|
|
1843 |
+ |
|
1844 |
+ partial = False
|
|
1845 |
+ subdir = 'buildtree'
|
|
1846 |
+ if not self._cached_buildtree():
|
|
1847 |
+ partial = True
|
|
1848 |
+ |
|
1849 |
+ pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit(), partial=partial, subdir=subdir)
|
|
1850 |
+ |
|
1851 |
+ # Artifact might be cached in the server partially with the top level ref existing.
|
|
1852 |
+ # Check if we need to attempt a push of a locally cached buildtree given current config
|
|
1835 | 1853 |
if not pushed:
|
1836 | 1854 |
return False
|
1837 | 1855 |
|
1 |
+# Makefile for updating BuildStream's requirements files.
|
|
2 |
+#
|
|
3 |
+ |
|
4 |
+REQUIREMENTS_IN := $(wildcard *.in)
|
|
5 |
+REQUIREMENTS_TXT := $(REQUIREMENTS_IN:.in=.txt)
|
|
6 |
+PYTHON := python3
|
|
7 |
+VENV := $(PYTHON) -m venv
|
|
8 |
+ |
|
9 |
+VENV_PIP = $(VENVDIR)/bin/pip
|
|
10 |
+ |
|
11 |
+ |
|
12 |
+.PHONY: all
|
|
13 |
+ |
|
14 |
+all: $(REQUIREMENTS_TXT)
|
|
15 |
+ |
|
16 |
+%.txt: %.in
|
|
17 |
+ $(eval VENVDIR := $(shell mktemp -d $(CURDIR)/.bst-venv.XXXXXX))
|
|
18 |
+ $(VENV) $(VENVDIR)
|
|
19 |
+ $(VENV_PIP) install -r $^
|
|
20 |
+ $(VENV_PIP) freeze -r $^ > $@
|
|
21 |
+ rm -rf $(VENVDIR)
|
1 | 1 |
coverage == 4.4.0
|
2 | 2 |
pylint
|
3 |
+pycodestyle
|
|
3 | 4 |
pytest >= 3.9
|
4 |
-pytest-codestyle >= 1.4.0
|
|
5 | 5 |
pytest-cov >= 2.5.0
|
6 | 6 |
pytest-datafiles >= 2.0
|
7 | 7 |
pytest-env
|
8 |
-pytest-pylint
|
|
9 | 8 |
pytest-xdist
|
10 | 9 |
pytest-timeout
|
11 | 10 |
pyftpdlib
|
1 |
+coverage==4.4
|
|
2 |
+pylint==2.2.2
|
|
3 |
+pycodestyle==2.4.0
|
|
4 |
+pytest==4.0.2
|
|
5 |
+pytest-cov==2.6.0
|
|
6 |
+pytest-datafiles==2.0
|
|
7 |
+pytest-env==0.6.2
|
|
8 |
+pytest-xdist==1.25.0
|
|
9 |
+pytest-timeout==1.3.3
|
|
10 |
+pyftpdlib==1.5.4
|
|
11 |
+## The following requirements were added by pip freeze:
|
|
12 |
+apipkg==1.5
|
|
13 |
+astroid==2.1.0
|
|
14 |
+atomicwrites==1.2.1
|
|
15 |
+attrs==18.2.0
|
|
16 |
+execnet==1.5.0
|
|
17 |
+isort==4.3.4
|
|
18 |
+lazy-object-proxy==1.3.1
|
|
19 |
+mccabe==0.6.1
|
|
20 |
+more-itertools==5.0.0
|
|
21 |
+pluggy==0.8.0
|
|
22 |
+py==1.7.0
|
|
23 |
+pytest-forked==0.2
|
|
24 |
+six==1.12.0
|
|
25 |
+wrapt==1.10.11
|
1 |
+arpy
|
|
2 |
+PyGObject
|
1 |
+arpy==1.1.1
|
|
2 |
+PyGObject==3.30.4
|
|
3 |
+## The following requirements were added by pip freeze:
|
|
4 |
+pycairo==1.18.0
|
1 |
+Click
|
|
2 |
+grpcio >= 1.10
|
|
3 |
+Jinja2 >= 2.10
|
|
4 |
+pluginbase
|
|
5 |
+protobuf >= 3.5
|
|
6 |
+psutil
|
|
7 |
+# According to ruamel.yaml's PyPI page, we are suppose to use
|
|
8 |
+# "<=0.15" in production until 0.15 becomes API stable.
|
|
9 |
+# However we need ruamel.yaml 0.15.41 or greater for Python 3.7.
|
|
10 |
+# We know that ruamel.yaml 0.15.52 breaks API in a way that
|
|
11 |
+# is incompatible with BuildStream.
|
|
12 |
+#
|
|
13 |
+# See issues #571 and #790.
|
|
14 |
+ruamel.yaml >= 0.15.41, < 0.15.52
|
|
15 |
+setuptools
|
1 |
+Click==7.0
|
|
2 |
+grpcio==1.17.1
|
|
3 |
+Jinja2==2.10
|
|
4 |
+pluginbase==0.7
|
|
5 |
+protobuf==3.6.1
|
|
6 |
+psutil==5.4.8
|
|
7 |
+# According to ruamel.yaml's PyPI page, we are suppose to use
|
|
8 |
+# "<=0.15" in production until 0.15 becomes API stable.
|
|
9 |
+# However we need ruamel.yaml 0.15.41 or greater for Python 3.7.
|
|
10 |
+# We know that ruamel.yaml 0.15.52 breaks API in a way that
|
|
11 |
+# is incompatible with BuildStream.
|
|
12 |
+#
|
|
13 |
+# See issues #571 and #790.
|
|
14 |
+ruamel.yaml==0.15.51
|
|
15 |
+setuptools==39.0.1
|
|
16 |
+## The following requirements were added by pip freeze:
|
|
17 |
+MarkupSafe==1.1.0
|
|
18 |
+six==1.12.0
|
... | ... | @@ -11,11 +11,13 @@ parentdir_prefix = BuildStream- |
11 | 11 |
test=pytest
|
12 | 12 |
|
13 | 13 |
[tool:pytest]
|
14 |
-addopts = --verbose --basetemp ./tmp --codestyle --pylint --pylint-rcfile=.pylintrc --cov=buildstream --cov-config .coveragerc --durations=20
|
|
14 |
+addopts = --verbose --basetemp ./tmp --cov=buildstream --cov-config .coveragerc --durations=20
|
|
15 | 15 |
norecursedirs = tests/integration/project integration-cache tmp __pycache__ .eggs
|
16 | 16 |
python_files = tests/*/*.py
|
17 |
-codestyle_max_line_length = 119
|
|
18 |
-codestyle_ignore = E129 E125 W504 W605
|
|
19 |
-codestyle_exclude = doc/source/conf.py buildstream/_fuse/fuse.py buildstream/_protos/**/*py
|
|
20 | 17 |
env =
|
21 | 18 |
D:BST_TEST_SUITE=True
|
19 |
+ |
|
20 |
+[pycodestyle]
|
|
21 |
+max-line-length = 119
|
|
22 |
+ignore = E129,E125,W504,W605
|
|
23 |
+exclude = .git/**,.tox/**,doc/source/conf.py,buildstream/_fuse/fuse.py,buildstream/_protos/**/*py
|
... | ... | @@ -270,9 +270,12 @@ def get_cmdclass(): |
270 | 270 |
#####################################################
|
271 | 271 |
# Gather requirements #
|
272 | 272 |
#####################################################
|
273 |
-with open('dev-requirements.txt') as dev_reqs:
|
|
273 |
+with open('requirements/dev-requirements.in') as dev_reqs:
|
|
274 | 274 |
dev_requires = dev_reqs.read().splitlines()
|
275 | 275 |
|
276 |
+with open('requirements/requirements.in') as install_reqs:
|
|
277 |
+ install_requires = install_reqs.read().splitlines()
|
|
278 |
+ |
|
276 | 279 |
#####################################################
|
277 | 280 |
# Prepare package description from README #
|
278 | 281 |
#####################################################
|
... | ... | @@ -334,23 +337,7 @@ setup(name='BuildStream', |
334 | 337 |
os.path.join('buildstream', 'data', 'bst')
|
335 | 338 |
])
|
336 | 339 |
],
|
337 |
- install_requires=[
|
|
338 |
- 'setuptools',
|
|
339 |
- 'psutil',
|
|
340 |
- # According to ruamel.yaml's PyPI page, we are suppose to use
|
|
341 |
- # "<=0.15" in production until 0.15 becomes API stable.
|
|
342 |
- # However we need ruamel.yaml 0.15.41 or greater for Python 3.7.
|
|
343 |
- # We know that ruamel.yaml 0.15.52 breaks API in a way that
|
|
344 |
- # is incompatible with BuildStream.
|
|
345 |
- #
|
|
346 |
- # See issues #571 and #790.
|
|
347 |
- 'ruamel.yaml >= 0.15.41, < 0.15.52',
|
|
348 |
- 'pluginbase',
|
|
349 |
- 'Click >= 7.0',
|
|
350 |
- 'jinja2 >= 2.10',
|
|
351 |
- 'protobuf >= 3.5',
|
|
352 |
- 'grpcio >= 1.10',
|
|
353 |
- ],
|
|
340 |
+ install_requires=install_requires,
|
|
354 | 341 |
entry_points=bst_install_entry_points,
|
355 | 342 |
tests_require=dev_requires,
|
356 | 343 |
zip_safe=False)
|
... | ... | @@ -3,6 +3,7 @@ import tarfile |
3 | 3 |
import hashlib
|
4 | 4 |
import pytest
|
5 | 5 |
from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
|
6 |
+from tests.testutils.site import IS_WINDOWS
|
|
6 | 7 |
|
7 | 8 |
from buildstream import _yaml
|
8 | 9 |
from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
... | ... | @@ -85,16 +86,37 @@ def test_build_invalid_suffix_dep(datafiles, cli, strict, hardlinks): |
85 | 86 |
result.assert_main_error(ErrorDomain.LOAD, "bad-element-suffix")
|
86 | 87 |
|
87 | 88 |
|
89 |
+@pytest.mark.skipif(IS_WINDOWS, reason='Not available on Windows')
|
|
88 | 90 |
@pytest.mark.datafiles(DATA_DIR)
|
89 | 91 |
def test_build_invalid_filename_chars(datafiles, cli):
|
90 | 92 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
91 |
- result = cli.run(project=project, args=strict_args(['build', 'invalid-chars|<>-in-name.bst'], 'non-strict'))
|
|
93 |
+ element_name = 'invalid-chars|<>-in-name.bst'
|
|
94 |
+ |
|
95 |
+ # The name of this file contains characters that are not allowed by
|
|
96 |
+ # BuildStream, using it should raise a warning.
|
|
97 |
+ element = {
|
|
98 |
+ 'kind': 'stack',
|
|
99 |
+ }
|
|
100 |
+ _yaml.dump(element, os.path.join(project, 'elements', element_name))
|
|
101 |
+ |
|
102 |
+ result = cli.run(project=project, args=strict_args(['build', element_name], 'non-strict'))
|
|
92 | 103 |
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
|
93 | 104 |
|
94 | 105 |
|
106 |
+@pytest.mark.skipif(IS_WINDOWS, reason='Not available on Windows')
|
|
95 | 107 |
@pytest.mark.datafiles(DATA_DIR)
|
96 | 108 |
def test_build_invalid_filename_chars_dep(datafiles, cli):
|
97 | 109 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
110 |
+ element_name = 'invalid-chars|<>-in-name.bst'
|
|
111 |
+ |
|
112 |
+ # The name of this file contains characters that are not allowed by
|
|
113 |
+ # BuildStream, and is listed as a dependency of 'invalid-chars-in-dep.bst'.
|
|
114 |
+ # This should also raise a warning.
|
|
115 |
+ element = {
|
|
116 |
+ 'kind': 'stack',
|
|
117 |
+ }
|
|
118 |
+ _yaml.dump(element, os.path.join(project, 'elements', element_name))
|
|
119 |
+ |
|
98 | 120 |
result = cli.run(project=project, args=strict_args(['build', 'invalid-chars-in-dep.bst'], 'non-strict'))
|
99 | 121 |
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
|
100 | 122 |
|
1 |
-kind: stack
|
|
2 |
-description: |
|
|
3 |
- The name of this files contains characters that are not allowed by
|
|
4 |
- BuildStream, using it should raise a warning.
|
1 |
+import os
|
|
2 |
+import shutil
|
|
3 |
+import pytest
|
|
4 |
+ |
|
5 |
+from tests.testutils import cli_integration as cli, create_artifact_share
|
|
6 |
+from tests.testutils.integration import assert_contains
|
|
7 |
+from tests.testutils.site import HAVE_BWRAP, IS_LINUX
|
|
8 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
9 |
+ |
|
10 |
+ |
|
11 |
+DATA_DIR = os.path.join(
|
|
12 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
13 |
+ "project"
|
|
14 |
+)
|
|
15 |
+ |
|
16 |
+ |
|
17 |
+# Remove artifact cache & set cli.config value of pull-buildtrees
|
|
18 |
+# to false, which is the default user context. The cache has to be
|
|
19 |
+# cleared as just forcefully removing the refpath leaves dangling objects.
|
|
20 |
+def default_state(cli, tmpdir, share):
|
|
21 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
22 |
+ cli.configure({
|
|
23 |
+ 'artifacts': {'url': share.repo, 'push': False},
|
|
24 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
|
|
25 |
+ 'cache': {'pull-buildtrees': False},
|
|
26 |
+ })
|
|
27 |
+ |
|
28 |
+ |
|
29 |
+# Tests to capture the integration of the optionl push of buildtrees.
|
|
30 |
+# The behaviour should encompass pushing artifacts that are already cached
|
|
31 |
+# without a buildtree as well as artifacts that are cached with their buildtree.
|
|
32 |
+# This option is handled via 'allow-partial-push' on a per artifact remote config
|
|
33 |
+# node basis. Multiple remote config nodes can point to the same url and as such can
|
|
34 |
+# have different 'allow-partial-push' options, tests need to cover this using project
|
|
35 |
+# confs.
|
|
36 |
+@pytest.mark.integration
|
|
37 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
38 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
39 |
+def test_pushbuildtrees(cli, tmpdir, datafiles, integration_cache):
|
|
40 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
41 |
+ element_name = 'autotools/amhello.bst'
|
|
42 |
+ |
|
43 |
+ # Create artifact shares for pull & push testing
|
|
44 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
|
|
45 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2,\
|
|
46 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3,\
|
|
47 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share4')) as share4:
|
|
48 |
+ |
|
49 |
+ cli.configure({
|
|
50 |
+ 'artifacts': {'url': share1.repo, 'push': True},
|
|
51 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
52 |
+ })
|
|
53 |
+ |
|
54 |
+ cli.configure({'artifacts': [{'url': share1.repo, 'push': True},
|
|
55 |
+ {'url': share2.repo, 'push': True, 'allow-partial-push': True}]})
|
|
56 |
+ |
|
57 |
+ # Build autotools element, checked pushed, delete local.
|
|
58 |
+ # As share 2 has push & allow-partial-push set a true, it
|
|
59 |
+ # should have pushed the artifacts, without the cached buildtrees,
|
|
60 |
+ # to it.
|
|
61 |
+ result = cli.run(project=project, args=['build', element_name])
|
|
62 |
+ assert result.exit_code == 0
|
|
63 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
64 |
+ elementdigest = share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
65 |
+ buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
66 |
+ elementdigest.hash, 'buildtree')
|
|
67 |
+ assert os.path.isdir(buildtreedir)
|
|
68 |
+ assert element_name in result.get_partial_pushed_elements()
|
|
69 |
+ assert element_name in result.get_pushed_elements()
|
|
70 |
+ assert share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
71 |
+ assert share2.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
72 |
+ default_state(cli, tmpdir, share1)
|
|
73 |
+ |
|
74 |
+ # Check that after explictly pulling an artifact without it's buildtree,
|
|
75 |
+ # we can push it to another remote that is configured to accept the partial
|
|
76 |
+ # artifact
|
|
77 |
+ result = cli.run(project=project, args=['pull', element_name])
|
|
78 |
+ assert element_name in result.get_pulled_elements()
|
|
79 |
+ cli.configure({'artifacts': {'url': share3.repo, 'push': True, 'allow-partial-push': True}})
|
|
80 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
81 |
+ assert not os.path.isdir(buildtreedir)
|
|
82 |
+ result = cli.run(project=project, args=['push', element_name])
|
|
83 |
+ assert result.exit_code == 0
|
|
84 |
+ assert element_name in result.get_partial_pushed_elements()
|
|
85 |
+ assert element_name not in result.get_pushed_elements()
|
|
86 |
+ assert share3.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
87 |
+ default_state(cli, tmpdir, share3)
|
|
88 |
+ |
|
89 |
+ # Delete the local cache and pull the partial artifact from share 3,
|
|
90 |
+ # this should not include the buildtree when extracted locally, even when
|
|
91 |
+ # pull-buildtrees is given as a cli parameter as no available remotes will
|
|
92 |
+ # contain the buildtree
|
|
93 |
+ assert not os.path.isdir(buildtreedir)
|
|
94 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
95 |
+ result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
|
|
96 |
+ assert element_name in result.get_partial_pulled_elements()
|
|
97 |
+ assert not os.path.isdir(buildtreedir)
|
|
98 |
+ default_state(cli, tmpdir, share3)
|
|
99 |
+ |
|
100 |
+ # Delete the local cache and attempt to pull a 'full' artifact, including its
|
|
101 |
+ # buildtree. As with before share3 being the first listed remote will not have
|
|
102 |
+ # the buildtree available and should spawn a partial pull. Having share1 as the
|
|
103 |
+ # second available remote should allow the buildtree to be pulled thus 'completing'
|
|
104 |
+ # the artifact
|
|
105 |
+ cli.configure({'artifacts': [{'url': share3.repo, 'push': True, 'allow-partial-push': True},
|
|
106 |
+ {'url': share1.repo, 'push': True}]})
|
|
107 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
108 |
+ result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
|
|
109 |
+ assert element_name in result.get_partial_pulled_elements()
|
|
110 |
+ assert element_name in result.get_pulled_elements()
|
|
111 |
+ assert "Attempting to retrieve buildtree from remotes" in result.stderr
|
|
112 |
+ assert os.path.isdir(buildtreedir)
|
|
113 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
... | ... | @@ -191,6 +191,13 @@ class Result(): |
191 | 191 |
|
192 | 192 |
return list(pushed)
|
193 | 193 |
|
194 |
+ def get_partial_pushed_elements(self):
|
|
195 |
+ pushed = re.findall(r'\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed partial artifact', self.stderr)
|
|
196 |
+ if pushed is None:
|
|
197 |
+ return []
|
|
198 |
+ |
|
199 |
+ return list(pushed)
|
|
200 |
+ |
|
194 | 201 |
def get_pulled_elements(self):
|
195 | 202 |
pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact', self.stderr)
|
196 | 203 |
if pulled is None:
|
... | ... | @@ -198,6 +205,13 @@ class Result(): |
198 | 205 |
|
199 | 206 |
return list(pulled)
|
200 | 207 |
|
208 |
+ def get_partial_pulled_elements(self):
|
|
209 |
+ pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled partial artifact', self.stderr)
|
|
210 |
+ if pulled is None:
|
|
211 |
+ return []
|
|
212 |
+ |
|
213 |
+ return list(pulled)
|
|
214 |
+ |
|
201 | 215 |
|
202 | 216 |
class Cli():
|
203 | 217 |
|
... | ... | @@ -52,5 +52,6 @@ except ImportError: |
52 | 52 |
HAVE_ARPY = False
|
53 | 53 |
|
54 | 54 |
IS_LINUX = os.getenv('BST_FORCE_BACKEND', sys.platform).startswith('linux')
|
55 |
+IS_WINDOWS = (os.name == 'nt')
|
|
55 | 56 |
|
56 | 57 |
MACHINE_ARCH = Platform.get_host_arch()
|
1 |
+[tox]
|
|
2 |
+envlist = py35,py36,py37
|
|
3 |
+skip_missing_interpreters = true
|
|
4 |
+ |
|
5 |
+[testenv]
|
|
6 |
+commands = pytest {posargs}
|
|
7 |
+deps =
|
|
8 |
+ -rrequirements/requirements.txt
|
|
9 |
+ -rrequirements/dev-requirements.txt
|
|
10 |
+ -rrequirements/plugin-requirements.txt
|
|
11 |
+passenv =
|
|
12 |
+ BST_FORCE_BACKEND
|
|
13 |
+ GI_TYPELIB_PATH
|
|
14 |
+ INTEGRATION_CACHE
|
|
15 |
+ |
|
16 |
+[testenv:lint]
|
|
17 |
+commands =
|
|
18 |
+ pycodestyle
|
|
19 |
+ pylint buildstream
|
|
20 |
+deps =
|
|
21 |
+ -rrequirements/requirements.txt
|
|
22 |
+ -rrequirements/dev-requirements.txt
|
|
23 |
+ -rrequirements/plugin-requirements.txt
|
|
24 |
+ |
|
25 |
+[testenv:docs]
|
|
26 |
+commands =
|
|
27 |
+ make -C doc
|
|
28 |
+# Currently sphinx_rtd_theme does not support Sphinx >1.8, this breaks search functionality
|
|
29 |
+deps =
|
|
30 |
+ sphinx==1.7.9
|
|
31 |
+ sphinx-click
|
|
32 |
+ sphinx_rtd_theme
|
|
33 |
+ -rrequirements/requirements.txt
|
|
34 |
+ -rrequirements/plugin-requirements.txt
|
|
35 |
+passenv =
|
|
36 |
+ BST_FORCE_SESSION_REBUILD
|
|
37 |
+ BST_SOURCE_CACHE
|
|
38 |
+ HOME
|
|
39 |
+ LANG
|
|
40 |
+ LC_ALL
|
|
41 |
+whitelist_externals =
|
|
42 |
+ make
|