Lachlan pushed to branch lachlan/pickle-yaml-test-list-composite at BuildStream / buildstream
Commits:
-
5cdfac81
by Mathieu Bridon at 2018-09-28T09:20:20Z
-
5d3f039f
by Tom Pollard at 2018-09-28T10:34:13Z
-
14e1a3b3
by Jürg Billeter at 2018-10-01T08:18:05Z
-
232662f1
by Jürg Billeter at 2018-10-01T08:53:06Z
-
f447aedd
by Tiago Gomes at 2018-10-01T10:33:11Z
-
682dddce
by Tiago Gomes at 2018-10-01T10:35:12Z
-
fafa8136
by Tiago Gomes at 2018-10-01T10:59:54Z
-
26e1a3c7
by Jürg Billeter at 2018-10-01T14:58:06Z
-
f47895c0
by Jürg Billeter at 2018-10-01T14:58:06Z
-
cf00c0a1
by Jürg Billeter at 2018-10-01T15:32:30Z
-
5f4ae90b
by Jürg Billeter at 2018-10-02T06:34:02Z
-
0458bc4e
by Jürg Billeter at 2018-10-02T07:08:35Z
-
d5b396e0
by Phillip Smyth at 2018-10-02T16:40:18Z
-
dae842fd
by Phillip Smyth at 2018-10-02T17:14:09Z
-
8dc16d3f
by Jürg Billeter at 2018-10-03T05:08:21Z
-
66446fc3
by Jürg Billeter at 2018-10-03T05:36:38Z
-
29c19bea
by Tristan Van Berkom at 2018-10-03T07:33:48Z
-
b645881c
by Tristan Van Berkom at 2018-10-03T07:33:48Z
-
c9437616
by Tristan Van Berkom at 2018-10-03T08:07:15Z
-
11320fe2
by Tristan Van Berkom at 2018-10-03T09:33:39Z
-
91271964
by Tristan Van Berkom at 2018-10-03T09:59:40Z
-
3bf895d2
by Jonathan Maw at 2018-10-03T11:48:48Z
-
e4969807
by Jonathan Maw at 2018-10-03T12:48:07Z
-
a0814399
by Tristan Van Berkom at 2018-10-03T13:05:52Z
-
0a1f8e3c
by Tristan Van Berkom at 2018-10-03T13:42:20Z
-
11161f99
by Tristan Van Berkom at 2018-10-03T13:44:02Z
-
3e797bb9
by Tristan Van Berkom at 2018-10-03T13:44:02Z
-
d9020e43
by Tristan Van Berkom at 2018-10-03T13:44:02Z
-
3e5ff5a9
by Tristan Van Berkom at 2018-10-03T14:09:51Z
-
e8179c34
by Jim MacArthur at 2018-10-03T16:06:59Z
-
3cf38c8e
by Jim MacArthur at 2018-10-03T16:44:02Z
-
59c92bda
by Daniel Silverstone at 2018-10-04T08:12:20Z
-
b9ddcd0e
by Daniel Silverstone at 2018-10-04T08:12:20Z
-
df0d5a8b
by Daniel Silverstone at 2018-10-04T09:04:21Z
-
b8421a9c
by Daniel Silverstone at 2018-10-04T09:04:21Z
-
c74bfbe5
by Daniel Silverstone at 2018-10-04T09:04:21Z
-
c46a7e87
by Daniel Silverstone at 2018-10-04T09:13:04Z
-
fd6a9573
by Jürg Billeter at 2018-10-04T09:45:58Z
-
c5778941
by Josh Smith at 2018-10-04T13:43:10Z
-
788cde6a
by Josh Smith at 2018-10-04T13:43:10Z
-
8630bac4
by Tristan Van Berkom at 2018-10-04T14:11:27Z
-
a7984218
by Valentin David at 2018-10-04T14:56:09Z
-
b7b20d9e
by Valentin David at 2018-10-04T15:31:22Z
-
42ea143d
by Chandan Singh at 2018-10-05T12:40:48Z
-
b38f9f9d
by Jonathan Maw at 2018-10-05T13:05:38Z
-
47df1e36
by Jürg Billeter at 2018-10-05T13:30:38Z
-
d3921ab8
by Jonathan Maw at 2018-10-05T13:54:17Z
-
53d9f977
by Tristan Van Berkom at 2018-10-06T14:40:46Z
-
5587715c
by Tristan Van Berkom at 2018-10-07T06:19:03Z
-
43c7ac23
by Angelos Evripiotis at 2018-10-07T15:19:58Z
-
3f0c919f
by Angelos Evripiotis at 2018-10-07T15:52:52Z
-
6f5f795e
by Tristan Van Berkom at 2018-10-08T08:46:11Z
-
abb9fb98
by Tristan Van Berkom at 2018-10-08T08:59:18Z
-
ba905e3a
by Tristan Van Berkom at 2018-10-08T09:05:18Z
-
674afe94
by Tristan Van Berkom at 2018-10-08T09:07:45Z
-
350c6796
by Tristan Van Berkom at 2018-10-08T10:44:47Z
-
a774050f
by Jonathan Maw at 2018-10-08T18:41:16Z
-
837b17b7
by Jonathan Maw at 2018-10-08T18:41:16Z
-
ec6c96e8
by Jonathan Maw at 2018-10-08T18:41:16Z
-
83a5950e
by Jonathan Maw at 2018-10-08T18:41:16Z
-
f2e2d912
by Jonathan Maw at 2018-10-08T18:41:16Z
-
243e20bb
by Jonathan Maw at 2018-10-08T18:41:16Z
-
7757e7e4
by Lachlan Mackenzie at 2018-10-08T18:41:16Z
-
08c59870
by Lachlan Mackenzie at 2018-10-08T18:41:16Z
-
a32aa004
by Lachlan Mackenzie at 2018-10-08T18:41:16Z
-
69eb5380
by Jonathan Maw at 2018-10-08T18:41:16Z
29 changed files:
- .gitlab-ci.yml
- CONTRIBUTING.rst
- buildstream/_artifactcache/cascache.py
- buildstream/_artifactcache/casserver.py
- buildstream/_loader/loadelement.py
- buildstream/_loader/loader.py
- buildstream/_platform/darwin.py
- buildstream/_platform/linux.py
- buildstream/_platform/platform.py
- buildstream/_project.py
- buildstream/_scheduler/jobs/job.py
- buildstream/_scheduler/queues/queue.py
- buildstream/_scheduler/scheduler.py
- buildstream/_site.py
- buildstream/_yaml.py
- + buildstream/_yamlcache.py
- buildstream/data/userconfig.yaml
- buildstream/element.py
- buildstream/plugins/sources/git.py
- buildstream/sandbox/_sandboxdummy.py
- buildstream/sandbox/_sandboxremote.py
- buildstream/source.py
- buildstream/utils.py
- + contrib/bst-docker-import
- setup.py
- tests/frontend/mirror.py
- tests/frontend/workspace.py
- + tests/frontend/yamlcache.py
- tests/yaml/yaml.py
Changes:
... | ... | @@ -145,7 +145,8 @@ docs: |
145 | 145 |
stage: test
|
146 | 146 |
script:
|
147 | 147 |
- export BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources"
|
148 |
- - pip3 install sphinx
|
|
148 |
+ # Currently sphinx_rtd_theme does not support Sphinx >1.8, this breaks search functionality
|
|
149 |
+ - pip3 install sphinx==1.7.9
|
|
149 | 150 |
- pip3 install sphinx-click
|
150 | 151 |
- pip3 install sphinx_rtd_theme
|
151 | 152 |
- cd dist && ./unpack.sh && cd buildstream
|
... | ... | @@ -161,14 +162,14 @@ docs: |
161 | 162 |
.overnight-tests: &overnight-tests-template
|
162 | 163 |
stage: test
|
163 | 164 |
variables:
|
164 |
- bst_ext_url: git+https://gitlab.com/BuildStream/bst-external.git
|
|
165 |
- bst_ext_ref: 1d6ab71151b93c8cbc0a91a36ffe9270f3b835f1 # 0.5.1
|
|
166 |
- fd_sdk_ref: 88d7c22c2281b987faa02edd57df80d430eecf1f # 18.08.12
|
|
165 |
+ BST_EXT_URL: git+https://gitlab.com/BuildStream/bst-external.git
|
|
166 |
+ BST_EXT_REF: 1d6ab71151b93c8cbc0a91a36ffe9270f3b835f1 # 0.5.1
|
|
167 |
+ FD_SDK_REF: 88d7c22c2281b987faa02edd57df80d430eecf1f # 18.08.11-35-g88d7c22c
|
|
167 | 168 |
before_script:
|
168 | 169 |
- (cd dist && ./unpack.sh && cd buildstream && pip3 install .)
|
169 |
- - pip3 install --user -e ${bst_ext_url}@${bst_ext_ref}#egg=bst_ext
|
|
170 |
+ - pip3 install --user -e ${BST_EXT_URL}@${BST_EXT_REF}#egg=bst_ext
|
|
170 | 171 |
- git clone https://gitlab.com/freedesktop-sdk/freedesktop-sdk.git
|
171 |
- - git -C freedesktop-sdk checkout ${fd_sdk_ref}
|
|
172 |
+ - git -C freedesktop-sdk checkout ${FD_SDK_REF}
|
|
172 | 173 |
only:
|
173 | 174 |
- schedules
|
174 | 175 |
|
... | ... | @@ -3,72 +3,137 @@ Contributing |
3 | 3 |
Some tips and guidelines for developers hacking on BuildStream
|
4 | 4 |
|
5 | 5 |
|
6 |
-Feature additions
|
|
7 |
------------------
|
|
8 |
-Major feature additions should be proposed on the
|
|
9 |
-`mailing list <https://mail.gnome.org/mailman/listinfo/buildstream-list>`_
|
|
10 |
-before being considered for inclusion, we strongly recommend proposing
|
|
11 |
-in advance of commencing work.
|
|
6 |
+.. _contributing_filing_issues:
|
|
7 |
+ |
|
8 |
+Filing issues
|
|
9 |
+-------------
|
|
10 |
+If you are experiencing an issue with BuildStream, or would like to submit a patch
|
|
11 |
+to fix an issue, then you should first search the list of `open issues <https://gitlab.com/BuildStream/buildstream/issues>`_
|
|
12 |
+to see if the issue is already filed, and `open an issue <https://gitlab.com/BuildStream/buildstream/issues/new>`_
|
|
13 |
+if no issue already exists.
|
|
14 |
+ |
|
15 |
+For policies on how to submit an issue and how to use our project labels,
|
|
16 |
+we recommend that you read the `policies guide
|
|
17 |
+<https://gitlab.com/BuildStream/nosoftware/alignment/blob/master/BuildStream_policies.md>`_
|
|
18 |
+ |
|
19 |
+ |
|
20 |
+.. _contributing_fixing_bugs:
|
|
21 |
+ |
|
22 |
+Fixing bugs
|
|
23 |
+-----------
|
|
24 |
+Before fixing a bug, it is preferred that an :ref:`issue be filed <contributing_filing_issues>`
|
|
25 |
+first in order to better document the defect, however this need not be followed to the
|
|
26 |
+letter for minor fixes.
|
|
27 |
+ |
|
28 |
+Patches which fix bugs should always come with a regression test.
|
|
29 |
+ |
|
12 | 30 |
|
13 |
-If you are experiencing an issue with BuildStream or would like to submit a small patch/feature, then
|
|
14 |
-you can `oepn an issue <https://gitlab.com/BuildStream/buildstream/issues/new?issue%5Bassignee_id%5D=&issue%5Bmilestone_id%5D=>`_
|
|
31 |
+.. _contributing_adding_features:
|
|
15 | 32 |
|
16 |
-For policies on how to submit and issue and how to use our project labels, we recommend that you read the `policies guide <https://gitlab.com/BuildStream/nosoftware/alignment/blob/master/BuildStream_policies.md>`_
|
|
33 |
+Adding new features
|
|
34 |
+-------------------
|
|
35 |
+Feature additions should be proposed on the `mailing list
|
|
36 |
+<https://mail.gnome.org/mailman/listinfo/buildstream-list>`_
|
|
37 |
+before being considered for inclusion. To save time and avoid any frustration,
|
|
38 |
+we strongly recommend proposing your new feature in advance of commencing work.
|
|
39 |
+ |
|
40 |
+Once consensus has been reached on the mailing list, then the proposing
|
|
41 |
+party should :ref:`file an issue <contributing_filing_issues>` to track the
|
|
42 |
+work. Please use the *bst_task* template for issues which represent
|
|
43 |
+feature additions.
|
|
17 | 44 |
|
18 |
-New features must be well documented and tested either in our main
|
|
19 |
-test suite if possible, or otherwise in the integration tests.
|
|
45 |
+New features must be well documented and tested in our test suite.
|
|
20 | 46 |
|
21 | 47 |
It is expected that the individual submitting the work take ownership
|
22 | 48 |
of their feature within BuildStream for a reasonable timeframe of at least
|
23 | 49 |
one release cycle after their work has landed on the master branch. This is
|
24 |
-to say that the submitter is expected to address and fix any side effects and
|
|
25 |
-bugs which may have fell through the cracks in the review process, giving us
|
|
26 |
-a reasonable timeframe for identifying these.
|
|
50 |
+to say that the submitter is expected to address and fix any side effects,
|
|
51 |
+bugs or regressions which may have fell through the cracks in the review
|
|
52 |
+process, giving us a reasonable timeframe for identifying these.
|
|
27 | 53 |
|
28 | 54 |
|
29 |
-Patch submissions
|
|
30 |
------------------
|
|
31 |
-If you want to submit a patch, do ask for developer permissions on our
|
|
32 |
-IRC channel first (GitLab's button also works, but you may need to
|
|
33 |
-shout about it - we often overlook this) - for CI reasons, it's much
|
|
34 |
-easier if patches are in branches of the main repository.
|
|
55 |
+.. _contributing_submitting_patches:
|
|
56 |
+ |
|
57 |
+Submitting patches
|
|
58 |
+------------------
|
|
35 | 59 |
|
36 |
-Branches must be submitted as merge requests in gitlab. If the branch
|
|
37 |
-fixes an issue or is related to any issues, these issues must be mentioned
|
|
38 |
-in the merge request or preferably the commit messages themselves.
|
|
39 | 60 |
|
61 |
+Ask for developer access
|
|
62 |
+~~~~~~~~~~~~~~~~~~~~~~~~
|
|
63 |
+If you want to submit a patch, do ask for developer permissions, either
|
|
64 |
+by asking us directly on our public IRC channel (irc://irc.gnome.org/#buildstream)
|
|
65 |
+or by visiting our `project page on GitLab <https://gitlab.com/BuildStream/buildstream>`_
|
|
66 |
+and using the GitLab UI to ask for permission.
|
|
67 |
+ |
|
68 |
+This will make your contribution experience smoother, as you will not
|
|
69 |
+need to setup any complicated CI settings, and rebasing your branch
|
|
70 |
+against the upstream master branch will be more painless.
|
|
71 |
+ |
|
72 |
+ |
|
73 |
+Branch names
|
|
74 |
+~~~~~~~~~~~~
|
|
40 | 75 |
Branch names for merge requests should be prefixed with the submitter's
|
41 |
-name or nickname, e.g. ``username/implement-flying-ponies``.
|
|
76 |
+name or nickname, followed by a forward slash, and then a descriptive
|
|
77 |
+name. e.g.::
|
|
42 | 78 |
|
43 |
-You may open merge requests for the branches you create before you
|
|
44 |
-are ready to have them reviewed upstream, as long as your merge request
|
|
45 |
-is not yet ready for review then it must be prefixed with the ``WIP:``
|
|
46 |
-identifier.
|
|
79 |
+ username/fix-that-bug
|
|
47 | 80 |
|
81 |
+This allows us to more easily identify which branch does what and
|
|
82 |
+belongs to whom, especially so that we can effectively cleanup stale
|
|
83 |
+branches in the upstream repository over time.
|
|
84 |
+ |
|
85 |
+ |
|
86 |
+Merge requests
|
|
87 |
+~~~~~~~~~~~~~~
|
|
88 |
+Once you have created a local branch, you can push it to the upstream
|
|
89 |
+BuildStream repository using the command line::
|
|
90 |
+ |
|
91 |
+ git push origin username/fix-that-bug:username/fix-that-bug
|
|
92 |
+ |
|
93 |
+GitLab will respond to this with a message and a link to allow you to create
|
|
94 |
+a new merge request. You can also `create a merge request for an existing branch
|
|
95 |
+<https://gitlab.com/BuildStream/buildstream/merge_requests/new>`_.
|
|
96 |
+ |
|
97 |
+You may open merge requests for the branches you create before you are ready
|
|
98 |
+to have them reviewed and considered for inclusion if you like. Until your merge
|
|
99 |
+request is ready for review, the merge request title must be prefixed with the
|
|
100 |
+``WIP:`` identifier.
|
|
101 |
+ |
|
102 |
+ |
|
103 |
+Organized commits
|
|
104 |
+~~~~~~~~~~~~~~~~~
|
|
48 | 105 |
Submitted branches must not contain a history of the work done in the
|
49 |
-feature branch. Please use git's interactive rebase feature in order to
|
|
50 |
-compose a clean patch series suitable for submission.
|
|
106 |
+feature branch. For example, if you had to change your approach, or
|
|
107 |
+have a later commit which fixes something in a previous commit on your
|
|
108 |
+branch, we do not want to include the history of how you came up with
|
|
109 |
+your patch in the upstream master branch.
|
|
110 |
+ |
|
111 |
+Please use git's interactive rebase feature in order to compose a clean
|
|
112 |
+patch series suitable for submission upstream.
|
|
113 |
+ |
|
114 |
+Every commit in series should pass the test suite, this is very important
|
|
115 |
+for tracking down regressions and performing git bisections in the future.
|
|
51 | 116 |
|
52 | 117 |
We prefer that documentation changes be submitted in separate commits from
|
53 |
-the code changes which they document, and new test cases are also preferred
|
|
54 |
-in separate commits.
|
|
118 |
+the code changes which they document, and newly added test cases are also
|
|
119 |
+preferred in separate commits.
|
|
55 | 120 |
|
56 | 121 |
If a commit in your branch modifies behavior such that a test must also
|
57 | 122 |
be changed to match the new behavior, then the tests should be updated
|
58 |
-with the same commit. Ideally every commit in the history of master passes
|
|
59 |
-its test cases, this makes bisections more easy to perform, but is not
|
|
60 |
-always practical with more complex branches.
|
|
123 |
+with the same commit, so that every commit passes it's own tests.
|
|
61 | 124 |
|
62 | 125 |
|
63 | 126 |
Commit messages
|
64 | 127 |
~~~~~~~~~~~~~~~
|
65 |
-Commit messages must be formatted with a brief summary line, optionally
|
|
66 |
-followed by an empty line and then a free form detailed description of
|
|
67 |
-the change.
|
|
128 |
+Commit messages must be formatted with a brief summary line, followed by
|
|
129 |
+an empty line and then a free form detailed description of the change.
|
|
68 | 130 |
|
69 | 131 |
The summary line must start with what changed, followed by a colon and
|
70 | 132 |
a very brief description of the change.
|
71 | 133 |
|
134 |
+If the commit fixes an issue, or is related to an issue; then the issue
|
|
135 |
+number must be referenced in the commit message.
|
|
136 |
+ |
|
72 | 137 |
**Example**::
|
73 | 138 |
|
74 | 139 |
element.py: Added the frobnicator so that foos are properly frobbed.
|
... | ... | @@ -77,33 +142,459 @@ a very brief description of the change. |
77 | 142 |
the element. Elements that are not properly frobnicated raise
|
78 | 143 |
an error to inform the user of invalid frobnication rules.
|
79 | 144 |
|
145 |
+ Fixes #123
|
|
146 |
+ |
|
147 |
+In the case that you have a commit which necessarily modifies multiple
|
|
148 |
+components, then the summary line should still mention generally what
|
|
149 |
+changed (if possible), followed by a colon and a brief summary.
|
|
150 |
+ |
|
151 |
+In this case the free form detailed description of the change should
|
|
152 |
+contain a bullet list describing what was changed in each component
|
|
153 |
+separately.
|
|
154 |
+ |
|
155 |
+**Example**::
|
|
156 |
+ |
|
157 |
+ artifact cache: Fixed automatic expiry in the local cache
|
|
80 | 158 |
|
81 |
-Coding style
|
|
82 |
-------------
|
|
83 |
-Coding style details for BuildStream
|
|
159 |
+ o _artifactcache/artifactcache.py: Updated the API contract
|
|
160 |
+ of ArtifactCache.remove() so that something detailed is
|
|
161 |
+ explained here.
|
|
84 | 162 |
|
163 |
+ o _artifactcache/cascache.py: Adhere to the new API contract
|
|
164 |
+ dictated by the abstract ArtifactCache class.
|
|
85 | 165 |
|
86 |
-Style guide
|
|
87 |
-~~~~~~~~~~~
|
|
88 |
-Python coding style for BuildStream is pep8, which is documented here: https://www.python.org/dev/peps/pep-0008/
|
|
166 |
+ o tests/artifactcache/expiry.py: Modified test expectations to
|
|
167 |
+ match the new behavior.
|
|
168 |
+ |
|
169 |
+ This is a part of #123
|
|
170 |
+ |
|
171 |
+ |
|
172 |
+Coding guidelines
|
|
173 |
+-----------------
|
|
174 |
+This section discusses coding style and other guidelines for hacking
|
|
175 |
+on BuildStream. This is important to read through for writing any non-trivial
|
|
176 |
+patches and especially outlines what people should watch out for when
|
|
177 |
+reviewing patches.
|
|
178 |
+ |
|
179 |
+Much of the rationale behind what is layed out in this section considers
|
|
180 |
+good traceability of lines of code with *git blame*, overall sensible
|
|
181 |
+modular structure, consistency in how we write code, and long term maintenance
|
|
182 |
+in mind.
|
|
183 |
+ |
|
184 |
+ |
|
185 |
+Approximate PEP-8 Style
|
|
186 |
+~~~~~~~~~~~~~~~~~~~~~~~
|
|
187 |
+Python coding style for BuildStream is approximately `pep8 <https://www.python.org/dev/peps/pep-0008/>`_.
|
|
89 | 188 |
|
90 | 189 |
We have a couple of minor exceptions to this standard, we dont want to compromise
|
91 | 190 |
code readability by being overly restrictive on line length for instance.
|
92 | 191 |
|
93 |
-The pep8 linter will run automatically when running the test suite.
|
|
192 |
+The pep8 linter will run automatically when :ref:`running the test suite <contributing_testing>`.
|
|
193 |
+ |
|
194 |
+ |
|
195 |
+Line lengths
|
|
196 |
+''''''''''''
|
|
197 |
+Regarding laxness on the line length in our linter settings, it should be clarified
|
|
198 |
+that the line length limit is a hard limit which causes the linter to bail out
|
|
199 |
+and reject commits which break the high limit - not an invitation to write exceedingly
|
|
200 |
+long lines of code, comments, or API documenting docstrings.
|
|
201 |
+ |
|
202 |
+Code, comments and docstrings should strive to remain written for approximately 80
|
|
203 |
+or 90 character lines, where exceptions can be made when code would be less readable
|
|
204 |
+when exceeding 80 or 90 characters (often this happens in conditional statements
|
|
205 |
+when raising an exception, for example). Or, when comments contain a long link that
|
|
206 |
+causes the given line to to exceed 80 or 90 characters, we don't want this to cause
|
|
207 |
+the linter to refuse the commit.
|
|
208 |
+ |
|
209 |
+ |
|
210 |
+.. _contributing_documenting_symbols:
|
|
211 |
+ |
|
212 |
+Documenting symbols
|
|
213 |
+~~~~~~~~~~~~~~~~~~~
|
|
214 |
+In BuildStream, we maintain what we call a *"Public API Surface"* that
|
|
215 |
+is guaranteed to be stable and unchanging across stable releases. The
|
|
216 |
+symbols which fall into this special class are documented using Python's
|
|
217 |
+standard *docstrings*, while all other internals of BuildStream are documented
|
|
218 |
+with comments above the related symbol.
|
|
219 |
+ |
|
220 |
+When documenting the public API surface which is rendered in the reference
|
|
221 |
+manual, we always mention the major version in which the API was introduced,
|
|
222 |
+as shown in the examples below. If a public API exists without the *Since*
|
|
223 |
+annotation, this is taken to mean that it was available since the first stable
|
|
224 |
+release 1.0.
|
|
225 |
+ |
|
226 |
+Here are some examples to get the hang of the format of API documenting
|
|
227 |
+comments and docstrings.
|
|
228 |
+ |
|
229 |
+**Public API Surface method**::
|
|
230 |
+ |
|
231 |
+ def frobnicate(self, source, *, frobilicious=False):
|
|
232 |
+ """Frobnicates this element with the specified source
|
|
233 |
+ |
|
234 |
+ Args:
|
|
235 |
+ source (Source): The Source to frobnicate with
|
|
236 |
+ frobilicious (bool): Optionally specify that frobnication should be
|
|
237 |
+ performed fribiliciously
|
|
238 |
+ |
|
239 |
+ Returns:
|
|
240 |
+ (Element): The frobnicated version of this Element.
|
|
241 |
+ |
|
242 |
+ *Since: 1.2*
|
|
243 |
+ """
|
|
244 |
+ ...
|
|
245 |
+ |
|
246 |
+**Internal method**::
|
|
247 |
+ |
|
248 |
+ # frobnicate():
|
|
249 |
+ #
|
|
250 |
+ # Frobnicates this element with the specified source
|
|
251 |
+ #
|
|
252 |
+ # Args:
|
|
253 |
+ # source (Source): The Source to frobnicate with
|
|
254 |
+ # frobilicious (bool): Optionally specify that frobnication should be
|
|
255 |
+ # performed fribiliciously
|
|
256 |
+ #
|
|
257 |
+ # Returns:
|
|
258 |
+ # (Element): The frobnicated version of this Element.
|
|
259 |
+ #
|
|
260 |
+ def frobnicate(self, source, *, frobilicious=False):
|
|
261 |
+ ...
|
|
262 |
+ |
|
263 |
+**Public API Surface instance variable**::
|
|
264 |
+ |
|
265 |
+ def __init__(self, context, element):
|
|
266 |
+ |
|
267 |
+ self.name = self._compute_name(context, element)
|
|
268 |
+ """The name of this foo
|
|
269 |
+ |
|
270 |
+ *Since: 1.2*
|
|
271 |
+ """
|
|
272 |
+ |
|
273 |
+**Internal instance variable**::
|
|
274 |
+ |
|
275 |
+ def __init__(self, context, element):
|
|
276 |
+ |
|
277 |
+ self.name = self._compute_name(context, element) # The name of this foo
|
|
278 |
+ |
|
279 |
+**Internal instance variable (long)**::
|
|
280 |
+ |
|
281 |
+ def __init__(self, context, element):
|
|
282 |
+ |
|
283 |
+ # This instance variable required a longer explanation, so
|
|
284 |
+ # it is on a line above the instance variable declaration.
|
|
285 |
+ self.name = self._compute_name(context, element)
|
|
286 |
+ |
|
287 |
+ |
|
288 |
+**Public API Surface class**::
|
|
289 |
+ |
|
290 |
+ class Foo(Bar):
|
|
291 |
+ """The main Foo object in the data model
|
|
292 |
+ |
|
293 |
+ Explanation about Foo. Note that we always document
|
|
294 |
+ the constructor arguments here, and not beside the __init__
|
|
295 |
+ method.
|
|
296 |
+ |
|
297 |
+ Args:
|
|
298 |
+ context (Context): The invocation Context
|
|
299 |
+ count (int): The number to count
|
|
300 |
+ |
|
301 |
+ *Since: 1.2*
|
|
302 |
+ """
|
|
303 |
+ ...
|
|
304 |
+ |
|
305 |
+**Internal class**::
|
|
306 |
+ |
|
307 |
+ # Foo()
|
|
308 |
+ #
|
|
309 |
+ # The main Foo object in the data model
|
|
310 |
+ #
|
|
311 |
+ # Args:
|
|
312 |
+ # context (Context): The invocation Context
|
|
313 |
+ # count (int): The number to count
|
|
314 |
+ #
|
|
315 |
+ class Foo(Bar):
|
|
316 |
+ ...
|
|
317 |
+ |
|
318 |
+ |
|
319 |
+.. _contributing_class_order:
|
|
320 |
+ |
|
321 |
+Class structure and ordering
|
|
322 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
323 |
+When creating or modifying an object class in BuildStream, it is
|
|
324 |
+important to keep in mind the order in which symbols should appear
|
|
325 |
+and keep this consistent.
|
|
326 |
+ |
|
327 |
+Here is an example to illustrate the expected ordering of symbols
|
|
328 |
+on a Python class in BuildStream::
|
|
329 |
+ |
|
330 |
+ class Foo(Bar):
|
|
331 |
+ |
|
332 |
+ # Public class-wide variables come first, if any.
|
|
333 |
+ |
|
334 |
+ # Private class-wide variables, if any
|
|
335 |
+ |
|
336 |
+ # Now we have the dunder/magic methods, always starting
|
|
337 |
+ # with the __init__() method.
|
|
338 |
+ |
|
339 |
+ def __init__(self, name):
|
|
340 |
+ |
|
341 |
+ super().__init__()
|
|
342 |
+ |
|
343 |
+ # NOTE: In the instance initializer we declare any instance variables,
|
|
344 |
+ # always declare the public instance variables (if any) before
|
|
345 |
+ # the private ones.
|
|
346 |
+ #
|
|
347 |
+ # It is preferred to avoid any public instance variables, and
|
|
348 |
+ # always expose an accessor method for it instead.
|
|
349 |
+ |
|
350 |
+ #
|
|
351 |
+ # Public instance variables
|
|
352 |
+ #
|
|
353 |
+ self.name = name # The name of this foo
|
|
354 |
+ |
|
355 |
+ #
|
|
356 |
+ # Private instance variables
|
|
357 |
+ #
|
|
358 |
+ self._count = 0 # The count of this foo
|
|
359 |
+ |
|
360 |
+ ################################################
|
|
361 |
+ # Abstract Methods #
|
|
362 |
+ ################################################
|
|
363 |
+ |
|
364 |
+ # NOTE: Abstract methods in BuildStream are allowed to have
|
|
365 |
+ # default methods.
|
|
366 |
+ #
|
|
367 |
+ # Subclasses must NEVER override any method which was
|
|
368 |
+ # not advertized as an abstract method by the parent class.
|
|
369 |
+ |
|
370 |
+ # frob()
|
|
371 |
+ #
|
|
372 |
+ # Implementors should implement this to frob this foo
|
|
373 |
+ # count times if possible.
|
|
374 |
+ #
|
|
375 |
+ # Args:
|
|
376 |
+ # count (int): The number of times to frob this foo
|
|
377 |
+ #
|
|
378 |
+ # Returns:
|
|
379 |
+ # (int): The number of times this foo was frobbed.
|
|
380 |
+ #
|
|
381 |
+ # Raises:
|
|
382 |
+ # (FooError): Implementors are expected to raise this error
|
|
383 |
+ #
|
|
384 |
+ def frob(self, count):
|
|
385 |
+ |
|
386 |
+ #
|
|
387 |
+ # An abstract method in BuildStream is allowed to have
|
|
388 |
+ # a default implementation.
|
|
389 |
+ #
|
|
390 |
+ self._count = self._do_frobbing(count)
|
|
391 |
+ |
|
392 |
+ return self._count
|
|
393 |
+ |
|
394 |
+ ################################################
|
|
395 |
+ # Implementation of abstract methods #
|
|
396 |
+ ################################################
|
|
397 |
+ |
|
398 |
+ # NOTE: Implementations of abstract methods defined by
|
|
399 |
+ # the parent class should NEVER document the API
|
|
400 |
+ # here redundantly.
|
|
401 |
+ |
|
402 |
+ def frobbish(self):
|
|
403 |
+ #
|
|
404 |
+ # Implementation of the "frobbish" abstract method
|
|
405 |
+ # defined by the parent Bar class.
|
|
406 |
+ #
|
|
407 |
+ return True
|
|
408 |
+ |
|
409 |
+ ################################################
|
|
410 |
+ # Public Methods #
|
|
411 |
+ ################################################
|
|
412 |
+ |
|
413 |
+ # NOTE: Public methods here are the ones which are expected
|
|
414 |
+ # to be called from outside of this class.
|
|
415 |
+ #
|
|
416 |
+ # These, along with any abstract methods, usually
|
|
417 |
+ # constitute the API surface of this class.
|
|
418 |
+ |
|
419 |
+ # frobnicate()
|
|
420 |
+ #
|
|
421 |
+ # Perform the frobnication process on this Foo
|
|
422 |
+ #
|
|
423 |
+ # Raises:
|
|
424 |
+ # (FrobError): In the case that a frobnication error was
|
|
425 |
+ # encountered
|
|
426 |
+ #
|
|
427 |
+ def frobnicate(self):
|
|
428 |
+ frobnicator.frobnicate(self)
|
|
429 |
+ |
|
430 |
+ # set_count()
|
|
431 |
+ #
|
|
432 |
+ # Sets the count of this foo
|
|
433 |
+ #
|
|
434 |
+ # Args:
|
|
435 |
+ # count (int): The new count to set
|
|
436 |
+ #
|
|
437 |
+ def set_count(self, count):
|
|
438 |
+ |
|
439 |
+ self._count = count
|
|
440 |
+ |
|
441 |
+ # get_count()
|
|
442 |
+ #
|
|
443 |
+ # Accessor for the count value of this foo.
|
|
444 |
+ #
|
|
445 |
+ # Returns:
|
|
446 |
+ # (int): The count of this foo
|
|
447 |
+ #
|
|
448 |
+ def set_count(self, count):
|
|
449 |
+ |
|
450 |
+ return self._count
|
|
451 |
+ |
|
452 |
+ ################################################
|
|
453 |
+ # Private Methods #
|
|
454 |
+ ################################################
|
|
455 |
+ |
|
456 |
+ # NOTE: Private methods are the ones which are internal
|
|
457 |
+ # implementation details of this class.
|
|
458 |
+ #
|
|
459 |
+ # Even though these are private implementation
|
|
460 |
+ # details, they still MUST have API documenting
|
|
461 |
+ # comments on them.
|
|
462 |
+
|
|
463 |
+ # _do_frobbing()
|
|
464 |
+ #
|
|
465 |
+ # Does the actual frobbing
|
|
466 |
+ #
|
|
467 |
+ # Args:
|
|
468 |
+ # count (int): The number of times to frob this foo
|
|
469 |
+ #
|
|
470 |
+ # Returns:
|
|
471 |
+ # (int): The number of times this foo was frobbed.
|
|
472 |
+ #
|
|
473 |
+ def self._do_frobbing(self, count):
|
|
474 |
+ return count
|
|
475 |
+ |
|
476 |
+ |
|
477 |
+.. _contributing_public_and_private:
|
|
478 |
+ |
|
479 |
+Public and private symbols
|
|
480 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
481 |
+BuildStream mostly follows the PEP-8 for defining *public* and *private* symbols
|
|
482 |
+for any given class, with some deviations. Please read the `section on inheritance
|
|
483 |
+<https://www.python.org/dev/peps/pep-0008/#designing-for-inheritance>`_ for
|
|
484 |
+reference on how the PEP-8 defines public and non-public.
|
|
485 |
+ |
|
486 |
+* A *public* symbol is any symbol which you expect to be used by clients
|
|
487 |
+ of your class or module within BuildStream.
|
|
488 |
+ |
|
489 |
+ Public symbols are written without any leading underscores.
|
|
490 |
+ |
|
491 |
+* A *private* symbol is any symbol which is entirely internal to your class
|
|
492 |
+ or module within BuildStream. These symbols cannot ever be accessed by
|
|
493 |
+ external clients or modules.
|
|
494 |
+ |
|
495 |
+ A private symbol must be denoted by a leading underscore.
|
|
496 |
+ |
|
497 |
+* When a class can have subclasses (for example, the ``Sandbox`` or ``Platform``
|
|
498 |
+ classes which have various implementations, or the ``Element`` and ``Source``
|
|
499 |
+ classes which plugins derive from), then private symbols should be denoted
|
|
500 |
+ by two leading underscores.
|
|
501 |
+ |
|
502 |
+ The double leading underscore naming convention invokes Python's name
|
|
503 |
+ mangling algorithm which helps prevent namespace collisions in the case
|
|
504 |
+ that subclasses might have a private symbol with the same name.
|
|
505 |
+ |
|
506 |
+In BuildStream, we have what we call a *"Public API Surface"*, as previously
|
|
507 |
+mentioned in :ref:`contributing_documenting_symbols`. In the :ref:`next section
|
|
508 |
+<contributing_public_api_surface>` we will discuss the *"Public API Surface"* and
|
|
509 |
+outline the exceptions to the rules discussed here.
|
|
510 |
+ |
|
511 |
+ |
|
512 |
+.. _contributing_public_api_surface:
|
|
513 |
+ |
|
514 |
+Public API surface
|
|
515 |
+~~~~~~~~~~~~~~~~~~
|
|
516 |
+BuildStream exposes what we call a *"Public API Surface"* which is stable
|
|
517 |
+and unchanging. This is for the sake of stability of the interfaces which
|
|
518 |
+plugins use, so it can also be referred to as the *"Plugin facing API"*.
|
|
519 |
+ |
|
520 |
+Any symbols which are a part of the *"Public API Surface*" are never allowed
|
|
521 |
+to change once they have landed in a stable release version of BuildStream. As
|
|
522 |
+such, we aim to keep the *"Public API Surface"* as small as possible at all
|
|
523 |
+times, and never expose any internal details to plugins inadvertently.
|
|
524 |
+ |
|
525 |
+One problem which arises from this is that we end up having symbols
|
|
526 |
+which are *public* according to the :ref:`rules discussed in the previous section
|
|
527 |
+<contributing_public_and_private>`, but must be hidden away from the
|
|
528 |
+*"Public API Surface"*. For example, BuildStream internal classes need
|
|
529 |
+to invoke methods on the ``Element`` and ``Source`` classes, wheras these
|
|
530 |
+methods need to be hidden from the *"Public API Surface"*.
|
|
531 |
+ |
|
532 |
+This is where BuildStream deviates from the PEP-8 standard for public
|
|
533 |
+and private symbol naming.
|
|
534 |
+ |
|
535 |
+In order to disambiguate between:
|
|
536 |
+ |
|
537 |
+* Symbols which are publicly accessible details of the ``Element`` class, can
|
|
538 |
+ be accessed by BuildStream internals, but must remain hidden from the
|
|
539 |
+ *"Public API Surface"*
|
|
540 |
+ |
|
541 |
+* Symbols which are private to the ``Element`` class, and cannot be accessed
|
|
542 |
+ from outside of the ``Element`` class at all.
|
|
543 |
+ |
|
544 |
+We denote the former category of symbols with only a single underscore, and the latter
|
|
545 |
+category of symbols with a double underscore. We often refer to this distinction
|
|
546 |
+as *"API Private"* (the former category) and *"Local Private"* (the latter category).
|
|
547 |
+ |
|
548 |
+Classes which are a part of the *"Public API Surface"* and require this disambiguation
|
|
549 |
+were not discussed in :ref:`the class ordering section <contributing_class_order>`, for
|
|
550 |
+these classes, the *"API Private"* symbols always come **before** the *"Local Private"*
|
|
551 |
+symbols in the class declaration.
|
|
552 |
+ |
|
553 |
+Modules which are not a part of the *"Public API Surface"* have their Python files
|
|
554 |
+prefixed with a single underscore, and are not imported in BuildStream's the master
|
|
555 |
+``__init__.py`` which is used by plugins.
|
|
556 |
+ |
|
557 |
+.. note::
|
|
558 |
+ |
|
559 |
+ The ``utils.py`` module is public and exposes a handful of utility functions,
|
|
560 |
+ however many of the functions it provides are *"API Private"*.
|
|
561 |
+ |
|
562 |
+ In this case, the *"API Private"* functions are prefixed with a single underscore.
|
|
563 |
+ |
|
564 |
+Any objects which are a part of the *"Public API Surface"* should be exposed via the
|
|
565 |
+toplevel ``__init__.py`` of the ``buildstream`` package.
|
|
566 |
+ |
|
567 |
+ |
|
568 |
+File naming convention
|
|
569 |
+~~~~~~~~~~~~~~~~~~~~~~
|
|
570 |
+With the exception of a few helper objects and data structures, we structure
|
|
571 |
+the code in BuildStream such that every filename is named after the object it
|
|
572 |
+implements. E.g. The ``Project`` object is implemented in ``_project.py``, the
|
|
573 |
+``Context`` object in ``_context.py``, the base ``Element`` class in ``element.py``,
|
|
574 |
+etc.
|
|
575 |
+ |
|
576 |
+As mentioned in the previous section, objects which are not a part of the
|
|
577 |
+:ref:`public, plugin facing API surface <contributing_public_api_surface>` have their
|
|
578 |
+filenames prefixed with a leading underscore (like ``_context.py`` and ``_project.py``
|
|
579 |
+in the examples above).
|
|
580 |
+ |
|
581 |
+When an object name has multiple words in it, e.g. ``ArtifactCache``, then the
|
|
582 |
+resulting file is named all in lower case without any underscore to separate
|
|
583 |
+words. In the case of ``ArtifactCache``, the filename implementing this object
|
|
584 |
+is found at ``_artifactcache/artifactcache.py``.
|
|
94 | 585 |
|
95 | 586 |
|
96 | 587 |
Imports
|
97 | 588 |
~~~~~~~
|
98 | 589 |
Module imports inside BuildStream are done with relative ``.`` notation
|
99 | 590 |
|
100 |
-Good::
|
|
591 |
+**Good**::
|
|
101 | 592 |
|
102 |
- from .context import Context
|
|
593 |
+ from ._context import Context
|
|
103 | 594 |
|
104 |
-Bad::
|
|
595 |
+**Bad**::
|
|
105 | 596 |
|
106 |
- from buildstream.context import Context
|
|
597 |
+ from buildstream._context import Context
|
|
107 | 598 |
|
108 | 599 |
The exception to the above rule is when authoring plugins,
|
109 | 600 |
plugins do not reside in the same namespace so they must
|
... | ... | @@ -122,128 +613,586 @@ This makes things clear when reading code that said functions |
122 | 613 |
are not defined in the same file but come from utils.py for example.
|
123 | 614 |
|
124 | 615 |
|
125 |
-Policy for private symbols
|
|
126 |
-~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
127 |
-Private symbols are expressed via a leading ``_`` single underscore, or
|
|
128 |
-in some special circumstances with a leading ``__`` double underscore.
|
|
616 |
+.. _contributing_instance_variables:
|
|
617 |
+ |
|
618 |
+Instance variables
|
|
619 |
+~~~~~~~~~~~~~~~~~~
|
|
620 |
+It is preferred that all instance state variables be declared as :ref:`private symbols
|
|
621 |
+<contributing_public_and_private>`, however in some cases, especially when the state
|
|
622 |
+is immutable for the object's life time (like an ``Element`` name for example), it
|
|
623 |
+is acceptable to save some typing by using a publicly accessible instance variable.
|
|
624 |
+ |
|
625 |
+It is never acceptable to modify the value of an instance variable from outside
|
|
626 |
+of the declaring class, even if the variable is *public*. In other words, the class
|
|
627 |
+which exposes an instance variable is the only one in control of the value of this
|
|
628 |
+variable.
|
|
629 |
+ |
|
630 |
+* If an instance variable is public and must be modified; then it must be
|
|
631 |
+ modified using a :ref:`mutator <contributing_accessor_mutator>`
|
|
632 |
+ |
|
633 |
+* Ideally for better encapsulation, all object state is declared as
|
|
634 |
+ :ref:`private instance variables <contributing_public_and_private>` and can
|
|
635 |
+ only be accessed by external classes via public :ref:`accessors and mutators
|
|
636 |
+ <contributing_accessor_mutator>`
|
|
637 |
+ |
|
638 |
+.. note::
|
|
639 |
+ |
|
640 |
+ In some cases, we may use small data structures declared as objects for the sake
|
|
641 |
+ of better readability, where the object class itself has no real supporting code.
|
|
642 |
+ |
|
643 |
+ In these exceptions, it can be acceptable to modify the instance variables
|
|
644 |
+ of these objects directly, unless they are otherwise documented to be immutable.
|
|
645 |
+ |
|
646 |
+ |
|
647 |
+.. _contributing_accessor_mutator:
|
|
648 |
+ |
|
649 |
+Accessors and mutators
|
|
650 |
+~~~~~~~~~~~~~~~~~~~~~~
|
|
651 |
+An accessor and mutator, are methods defined on the object class to access (get)
|
|
652 |
+or mutate (set) a value owned by the declaring class, respectively.
|
|
653 |
+ |
|
654 |
+An accessor might derive the returned value from one or more of its components,
|
|
655 |
+and a mutator might have side effects, or delegate the mutation to a component.
|
|
656 |
+ |
|
657 |
+Accessors and mutators are always :ref:`public <contributing_public_and_private>`
|
|
658 |
+(even if they might have a single leading underscore and are considered
|
|
659 |
+:ref:`API Private <contributing_public_api_surface>`), as their purpose is to
|
|
660 |
+enforce encapsulation with regards to any accesses to the state which is owned
|
|
661 |
+by the declaring class.
|
|
662 |
+ |
|
663 |
+Accessors and mutators are functions prefixed with ``get_`` and ``set_``
|
|
664 |
+respectively, e.g.::
|
|
665 |
+ |
|
666 |
+ class Foo():
|
|
667 |
+ |
|
668 |
+ def __init__(self):
|
|
669 |
+ |
|
670 |
+ # Declare some internal state
|
|
671 |
+ self._count = 0
|
|
672 |
+ |
|
673 |
+ # get_count()
|
|
674 |
+ #
|
|
675 |
+ # Gets the count of this Foo.
|
|
676 |
+ #
|
|
677 |
+ # Returns:
|
|
678 |
+ # (int): The current count of this Foo
|
|
679 |
+ #
|
|
680 |
+ def get_foo(self):
|
|
681 |
+ return self._count
|
|
682 |
+ |
|
683 |
+ # set_count()
|
|
684 |
+ #
|
|
685 |
+ # Sets the count of this Foo.
|
|
686 |
+ #
|
|
687 |
+ # Args:
|
|
688 |
+ # count (int): The new count for this Foo
|
|
689 |
+ #
|
|
690 |
+ def set_foo(self, count):
|
|
691 |
+ self._count = count
|
|
692 |
+ |
|
693 |
+.. attention::
|
|
129 | 694 |
|
130 |
-Before understanding the naming policy, it is first important to understand
|
|
131 |
-that in BuildStream, there are two levels of privateness which need to be
|
|
132 |
-considered.
|
|
695 |
+ We are aware that Python offers a facility for accessors and
|
|
696 |
+ mutators using the ``@property`` decorator instead. Do not use
|
|
697 |
+ the ``@property`` decorator.
|
|
133 | 698 |
|
134 |
-These are treated subtly differently and thus need to be understood:
|
|
699 |
+ The decision to use explicitly defined functions instead of the
|
|
700 |
+ ``@property`` decorator is rather arbitrary, there is not much
|
|
701 |
+ technical merit to preferring one technique over the other.
|
|
702 |
+ However as :ref:`discussed below <contributing_always_consistent>`,
|
|
703 |
+ it is of the utmost importance that we do not mix both techniques
|
|
704 |
+ in the same codebase.
|
|
135 | 705 |
|
136 |
-* API Private
|
|
137 | 706 |
|
138 |
- A symbol is considered to be *API private* if it is not exposed in the *public API*.
|
|
707 |
+.. _contributing_abstract_methods:
|
|
139 | 708 |
|
140 |
- Even if a symbol does not have any leading underscore, it may still be *API private*
|
|
141 |
- if the containing *class* or *module* is named with a leading underscore.
|
|
709 |
+Abstract methods
|
|
710 |
+~~~~~~~~~~~~~~~~
|
|
711 |
+In BuildStream, an *"Abstract Method"* is a bit of a misnomer and does
|
|
712 |
+not match up to how Python defines abstract methods, we need to seek out
|
|
713 |
+a new nomanclature to refer to these methods.
|
|
142 | 714 |
|
143 |
-* Local private
|
|
715 |
+In Python, an *"Abstract Method"* is a method which **must** be
|
|
716 |
+implemented by a subclass, whereas all methods in Python can be
|
|
717 |
+overridden.
|
|
144 | 718 |
|
145 |
- A symbol is considered to be *local private* if it is not intended for access
|
|
146 |
- outside of the defining *scope*.
|
|
719 |
+In BuildStream, we use the term *"Abstract Method"*, to refer to
|
|
720 |
+a method which **can** be overridden by a subclass, whereas it
|
|
721 |
+is **illegal** to override any other method.
|
|
147 | 722 |
|
148 |
- If a symbol has a leading underscore, it might not be *local private* if it is
|
|
149 |
- declared on a publicly visible class, but needs to be accessed internally by
|
|
150 |
- other modules in the BuildStream core.
|
|
723 |
+* Abstract methods are allowed to have default implementations
|
|
151 | 724 |
|
725 |
+* Subclasses are not allowed to redefine the calling signature
|
|
726 |
+ of an abstract method, or redefine the API contract in any way
|
|
152 | 727 |
|
153 |
-Ordering
|
|
154 |
-''''''''
|
|
155 |
-For better readability and consistency, we try to keep private symbols below
|
|
156 |
-public symbols. In the case of public modules where we may have a mix of
|
|
157 |
-*API private* and *local private* symbols, *API private* symbols should come
|
|
158 |
-before *local private* symbols.
|
|
728 |
+* Subclasses are not allowed to override any other methods.
|
|
159 | 729 |
|
730 |
+The key here is that in BuildStream, we consider it unacceptable
|
|
731 |
+that a subclass overrides a method of it's parent class unless
|
|
732 |
+the said parent class has explicitly given permission to subclasses
|
|
733 |
+to do so, and outlined the API contract for this purpose. No surprises
|
|
734 |
+are allowed.
|
|
160 | 735 |
|
161 |
-Symbol naming
|
|
162 |
-'''''''''''''
|
|
163 |
-Any private symbol must start with a single leading underscore for two reasons:
|
|
164 | 736 |
|
165 |
-* So that it does not bleed into documentation and *public API*.
|
|
737 |
+Error handling
|
|
738 |
+~~~~~~~~~~~~~~
|
|
739 |
+In BuildStream, all non recoverable errors are expressed via
|
|
740 |
+subclasses of the ``BstError`` exception.
|
|
166 | 741 |
|
167 |
-* So that it is clear to developers which symbols are not used outside of the declaring *scope*
|
|
742 |
+This exception is handled deep in the core in a few places, and
|
|
743 |
+it is rarely necessary to handle a ``BstError``.
|
|
168 | 744 |
|
169 |
-Remember that with python, the modules (python files) are also symbols
|
|
170 |
-within their containing *package*, as such; modules which are entirely
|
|
171 |
-private to BuildStream are named as such, e.g. ``_thismodule.py``.
|
|
172 | 745 |
|
746 |
+Raising exceptions
|
|
747 |
+''''''''''''''''''
|
|
748 |
+When writing code in the BuildStream core, ensure that all system
|
|
749 |
+calls and third party library calls are wrapped in a ``try:`` block,
|
|
750 |
+and raise a descriptive ``BstError`` of the appropriate class explaining
|
|
751 |
+what exactly failed.
|
|
173 | 752 |
|
174 |
-Cases for double underscores
|
|
175 |
-''''''''''''''''''''''''''''
|
|
176 |
-The double underscore in python has a special function. When declaring
|
|
177 |
-a symbol in class scope which has a leading underscore, it can only be
|
|
178 |
-accessed within the class scope using the same name. Outside of class
|
|
179 |
-scope, it can only be accessed with a *cheat*.
|
|
753 |
+Ensure that the original system call error is formatted into your new
|
|
754 |
+exception, and that you use the Python ``from`` semantic to retain the
|
|
755 |
+original call trace, example::
|
|
180 | 756 |
|
181 |
-We use the double underscore in cases where the type of privateness can be
|
|
182 |
-ambiguous.
|
|
757 |
+ try:
|
|
758 |
+ os.utime(self._refpath(ref))
|
|
759 |
+ except FileNotFoundError as e:
|
|
760 |
+ raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
|
|
183 | 761 |
|
184 |
-* For private modules and classes
|
|
185 | 762 |
|
186 |
- We never need to disambiguate with a double underscore
|
|
763 |
+Enhancing exceptions
|
|
764 |
+''''''''''''''''''''
|
|
765 |
+Sometimes the ``BstError`` originates from a lower level component,
|
|
766 |
+and the code segment which raised the exception did not have enough context
|
|
767 |
+to create a complete, informative summary of the error for the user.
|
|
187 | 768 |
|
188 |
-* For private symbols declared in a public *scope*
|
|
769 |
+In these cases it is necessary to handle the error and raise a new
|
|
770 |
+one, e.g.::
|
|
189 | 771 |
|
190 |
- In the case that we declare a private method on a public object, it
|
|
191 |
- becomes ambiguous whether:
|
|
772 |
+ try:
|
|
773 |
+ extracted_artifact = self._artifacts.extract(self, cache_key)
|
|
774 |
+ except ArtifactError as e:
|
|
775 |
+ raise ElementError("Failed to extract {} while checking out {}: {}"
|
|
776 |
+ .format(cache_key, self.name, e)) from e
|
|
192 | 777 |
|
193 |
- * The symbol is *local private*, and only used within the given scope
|
|
194 | 778 |
|
195 |
- * The symbol is *API private*, and will be used internally by BuildStream
|
|
196 |
- from other parts of the codebase.
|
|
779 |
+Programming errors
|
|
780 |
+''''''''''''''''''
|
|
781 |
+Sometimes you are writing code and have detected an unexpected condition,
|
|
782 |
+or a broken invariant for which the code cannot be prepared to handle
|
|
783 |
+gracefully.
|
|
197 | 784 |
|
198 |
- In this case, we use a single underscore for *API private* methods which
|
|
199 |
- are not *local private*, and we use a double underscore for *local private*
|
|
200 |
- methods declared in public scope.
|
|
785 |
+In these cases, do **not** raise any of the ``BstError`` class exceptions.
|
|
201 | 786 |
|
787 |
+Instead, use the ``assert`` statement, e.g.::
|
|
202 | 788 |
|
203 |
-Documenting private symbols
|
|
204 |
-'''''''''''''''''''''''''''
|
|
205 |
-Any symbol which is *API Private* (regardless of whether it is also
|
|
206 |
-*local private*), should have some documentation for developers to
|
|
207 |
-better understand the codebase.
|
|
789 |
+ assert utils._is_main_process(), \
|
|
790 |
+ "Attempted to save workspace configuration from child process"
|
|
208 | 791 |
|
209 |
-Contrary to many other python projects, we do not use docstrings to
|
|
210 |
-document private symbols, but prefer to keep *API Private* symbols
|
|
211 |
-documented in code comments placed *above* the symbol (or *beside* the
|
|
212 |
-symbol in some cases, such as variable declarations in a class where
|
|
213 |
-a shorter comment is more desirable), rather than docstrings placed *below*
|
|
214 |
-the symbols being documented.
|
|
792 |
+This will result in a ``BUG`` message with the stack trace included being
|
|
793 |
+logged and reported in the frontend.
|
|
215 | 794 |
|
216 |
-Other than this detail, follow the same guidelines for documenting
|
|
217 |
-symbols as described below.
|
|
218 | 795 |
|
796 |
+BstError parameters
|
|
797 |
+'''''''''''''''''''
|
|
798 |
+When raising ``BstError`` class exceptions, there are some common properties
|
|
799 |
+which can be useful to know about:
|
|
219 | 800 |
|
220 |
-Documenting BuildStream
|
|
221 |
------------------------
|
|
801 |
+* **message:** The brief human readable error, will be formatted on one line in the frontend
|
|
802 |
+ |
|
803 |
+* **detail:** An optional detailed human readable message to accompany the **message** summary
|
|
804 |
+ of the error. This is often used to recommend the user some course of action, or to provide
|
|
805 |
+ additional context about the error.
|
|
806 |
+ |
|
807 |
+* **temporary:** Some errors are allowed to be *temporary*, this attribute is only
|
|
808 |
+ observed from child processes which fail in a temporary way. This distinction
|
|
809 |
+ is used to determine whether the task should be *retried* or not. An error is usually
|
|
810 |
+ only a *temporary* error if the cause of the error was a network timeout.
|
|
811 |
+ |
|
812 |
+* **reason:** A machine readable identifier for the error. This is used for the purpose
|
|
813 |
+ of regression testing, such that we check that BuildStream has errored out for the
|
|
814 |
+ expected reason in a given failure mode.
|
|
815 |
+ |
|
816 |
+ |
|
817 |
+Documenting Exceptions
|
|
818 |
+''''''''''''''''''''''
|
|
819 |
+We have already seen :ref:`some examples <contributing_class_order>` of how
|
|
820 |
+exceptions are documented in API documenting comments, but this is worth some
|
|
821 |
+additional disambiguation.
|
|
822 |
+ |
|
823 |
+* Only document the exceptions which are raised directly by the function in question.
|
|
824 |
+ It is otherwise nearly impossible to keep track of what exceptions *might* be raised
|
|
825 |
+ indirectly by calling the given function.
|
|
826 |
+ |
|
827 |
+* For a regular public or private method, your audience is a caller of the function;
|
|
828 |
+ document the exception in terms of what exception might be raised as a result of
|
|
829 |
+ calling this method.
|
|
830 |
+ |
|
831 |
+* For an :ref:`abstract method <contributing_abstract_methods>`, your audience is the
|
|
832 |
+ implementor of the method in a subclass; document the exception in terms of what
|
|
833 |
+ exception is prescribed for the implementing class to raise.
|
|
834 |
+ |
|
835 |
+ |
|
836 |
+.. _contributing_always_consistent:
|
|
837 |
+ |
|
838 |
+Always be consistent
|
|
839 |
+~~~~~~~~~~~~~~~~~~~~
|
|
840 |
+There are various ways to define functions and classes in Python,
|
|
841 |
+which has evolved with various features over time.
|
|
842 |
+ |
|
843 |
+In BuildStream, we may not have leveraged all of the nice features
|
|
844 |
+we could have, that is okay, and where it does not break API, we
|
|
845 |
+can consider changing it.
|
|
846 |
+ |
|
847 |
+Even if you know there is a *better* way to do a given thing in
|
|
848 |
+Python when compared to the way we do it in BuildStream, *do not do it*.
|
|
849 |
+ |
|
850 |
+Consistency of how we do things in the codebase is more important
|
|
851 |
+than the actual way in which things are done, always.
|
|
852 |
+ |
|
853 |
+Instead, if you like a certain Python feature and think the BuildStream
|
|
854 |
+codebase should use it, then propose your change on the `mailing list
|
|
855 |
+<https://mail.gnome.org/mailman/listinfo/buildstream-list>`_. Chances
|
|
856 |
+are that we will reach agreement to use your preferred approach, and
|
|
857 |
+in that case, it will be important to apply the change unilaterally
|
|
858 |
+across the entire codebase, such that we continue to have a consistent
|
|
859 |
+codebase.
|
|
860 |
+ |
|
861 |
+ |
|
862 |
+Avoid tail calling
|
|
863 |
+~~~~~~~~~~~~~~~~~~
|
|
864 |
+With the exception of tail calling with simple functions from
|
|
865 |
+the standard Python library, such as splitting and joining lines
|
|
866 |
+of text and encoding/decoding text; always avoid tail calling.
|
|
867 |
+ |
|
868 |
+**Good**::
|
|
869 |
+ |
|
870 |
+ # Variables that we will need declared up top
|
|
871 |
+ context = self._get_context()
|
|
872 |
+ workspaces = context.get_workspaces()
|
|
873 |
+ |
|
874 |
+ ...
|
|
875 |
+ |
|
876 |
+ # Saving the workspace configuration
|
|
877 |
+ workspaces.save_config()
|
|
878 |
+ |
|
879 |
+**Bad**::
|
|
880 |
+ |
|
881 |
+ # Saving the workspace configuration
|
|
882 |
+ self._get_context().get_workspaces().save_config()
|
|
883 |
+ |
|
884 |
+**Acceptable**::
|
|
885 |
+ |
|
886 |
+ # Decode the raw text loaded from a log file for display,
|
|
887 |
+ # join them into a single utf-8 string and strip away any
|
|
888 |
+ # trailing whitespace.
|
|
889 |
+ return '\n'.join([line.decode('utf-8') for line in lines]).rstrip()
|
|
890 |
+ |
|
891 |
+When you need to obtain a delegate object via an accessor function,
|
|
892 |
+either do it at the beginning of the function, or at the beginning
|
|
893 |
+of a code block within the function that will use that object.
|
|
894 |
+ |
|
895 |
+There are several reasons for this convention:
|
|
896 |
+ |
|
897 |
+* When observing a stack trace, it is always faster and easier to
|
|
898 |
+ determine what went wrong when all statements are on separate lines.
|
|
899 |
+ |
|
900 |
+* We always want individual lines to trace back to their origin as
|
|
901 |
+ much as possible for the purpose of tracing the history of code
|
|
902 |
+ with *git blame*.
|
|
903 |
+ |
|
904 |
+ One day, you might need the ``Context`` or ``Workspaces`` object
|
|
905 |
+ in the same function for another reason, at which point it will
|
|
906 |
+ be unacceptable to leave the existing line as written, because it
|
|
907 |
+ will introduce a redundant accessor to the same object, so the
|
|
908 |
+ line written as::
|
|
909 |
+ |
|
910 |
+ self._get_context().get_workspaces().save_config()
|
|
911 |
+ |
|
912 |
+ Will have to change at that point, meaning we lose the valuable
|
|
913 |
+ information of which commit originally introduced this call
|
|
914 |
+ when running *git blame*.
|
|
915 |
+ |
|
916 |
+* For similar reasons, we prefer delegate objects be accessed near
|
|
917 |
+ the beginning of a function or code block so that there is less
|
|
918 |
+ chance that this statement will have to move in the future, if
|
|
919 |
+ the same function or code block needs the delegate object for any
|
|
920 |
+ other reason.
|
|
921 |
+ |
|
922 |
+ Asides from this, code is generally more legible and uniform when
|
|
923 |
+ variables are declared at the beginning of function blocks.
|
|
924 |
+ |
|
925 |
+ |
|
926 |
+Vertical stacking of modules
|
|
927 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
928 |
+For the sake of overall comprehensiveness of the BuildStream
|
|
929 |
+architecture, it is important that we retain vertical stacking
|
|
930 |
+order of the dependencies and knowledge of modules as much as
|
|
931 |
+possible, and avoid any cyclic relationships in modules.
|
|
932 |
+ |
|
933 |
+For instance, the ``Source`` objects are owned by ``Element``
|
|
934 |
+objects in the BuildStream data model, and as such the ``Element``
|
|
935 |
+will delegate some activities to the ``Source`` objects in it's
|
|
936 |
+possesion. The ``Source`` objects should however never call functions
|
|
937 |
+on the ``Element`` object, nor should the ``Source`` object itself
|
|
938 |
+have any understanding of what an ``Element`` is.
|
|
939 |
+ |
|
940 |
+If you are implementing a low level utility layer, for example
|
|
941 |
+as a part of the ``YAML`` loading code layers, it can be tempting
|
|
942 |
+to derive context from the higher levels of the codebase which use
|
|
943 |
+these low level utilities, instead of defining properly stand alone
|
|
944 |
+APIs for these utilities to work: Never do this.
|
|
945 |
+ |
|
946 |
+Unfortunately, unlike other languages where include files play
|
|
947 |
+a big part in ensuring that it is difficult to make a mess; Python,
|
|
948 |
+allows you to just call methods on arbitrary objects passed through
|
|
949 |
+a function call without having to import the module which defines
|
|
950 |
+those methods - this leads to cyclic dependencies of modules quickly
|
|
951 |
+if the developer does not take special care of ensuring this does not
|
|
952 |
+happen.
|
|
953 |
+ |
|
954 |
+ |
|
955 |
+Minimize arguments in methods
|
|
956 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
957 |
+When creating an object, or adding a new API method to an existing
|
|
958 |
+object, always strive to keep as much context as possible on the
|
|
959 |
+object itself rather than expecting callers of the methods to provide
|
|
960 |
+everything the method needs every time.
|
|
961 |
+ |
|
962 |
+If the value or object that is needed in a function call is a constant
|
|
963 |
+for the lifetime of the object which exposes the given method, then
|
|
964 |
+that value or object should be passed in the constructor instead of
|
|
965 |
+via a method call.
|
|
966 |
+ |
|
967 |
+ |
|
968 |
+Minimize API surfaces
|
|
969 |
+~~~~~~~~~~~~~~~~~~~~~
|
|
970 |
+When creating an object, or adding new functionality in any way,
|
|
971 |
+try to keep the number of :ref:`public, outward facing <contributing_public_and_private>`
|
|
972 |
+symbols to a minimum, this is important for both
|
|
973 |
+:ref:`internal and public, plugin facing API surfaces <contributing_public_api_surface>`.
|
|
974 |
+ |
|
975 |
+When anyone visits a file, there are two levels of comprehension:
|
|
976 |
+ |
|
977 |
+* What do I need to know in order to *use* this object
|
|
978 |
+ |
|
979 |
+* What do I need to know in order to *modify* this object
|
|
980 |
+ |
|
981 |
+For the former, we want the reader to understand with as little effort
|
|
982 |
+as possible, what the public API contract is for a given object and consequently,
|
|
983 |
+how it is expected to be used. This is also why we
|
|
984 |
+:ref:`order the symbols of a class <contributing_class_order>` in such a way
|
|
985 |
+as to keep all outward facing public API surfaces at the top of the file, so that the
|
|
986 |
+reader never needs to dig deep into the bottom of the file to find something they
|
|
987 |
+might need to use.
|
|
988 |
+ |
|
989 |
+For the latter, when it comes to having to modify the file or add functionality,
|
|
990 |
+you want to retain as much freedom as possible to modify internals, while
|
|
991 |
+being sure that nothing external will be affected by internal modifications.
|
|
992 |
+Less client facing API means that you have less surrounding code to modify
|
|
993 |
+when your API changes. Further, ensuring that there is minimal outward facing
|
|
994 |
+API for any module minimizes the complexity for the developer working on
|
|
995 |
+that module, by limiting the considerations needed regarding external side
|
|
996 |
+effects of their modifications to the module.
|
|
997 |
+ |
|
998 |
+When modifying a file, one should not have to understand or think too
|
|
999 |
+much about external side effects, when the API surface of the file is
|
|
1000 |
+well documented and minimal.
|
|
1001 |
+ |
|
1002 |
+When adding new API to a given object for a new purpose, consider whether
|
|
1003 |
+the new API is in any way redundant with other API (should this value now
|
|
1004 |
+go into the constructor, since we use it more than once ? could this
|
|
1005 |
+value be passed along with another function, and the other function renamed,
|
|
1006 |
+to better suit the new purposes of this module/object ?) and repurpose
|
|
1007 |
+the outward facing API of an object as a whole every time.
|
|
1008 |
+ |
|
1009 |
+ |
|
1010 |
+Avoid transient state on instances
|
|
1011 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
1012 |
+At times, it can be tempting to store transient state that is
|
|
1013 |
+the result of one operation on an instance, only to be retrieved
|
|
1014 |
+later via an accessor function elsewhere.
|
|
1015 |
+ |
|
1016 |
+As a basic rule of thumb, if the value is transient and just the
|
|
1017 |
+result of one operation, which needs to be observed directly after
|
|
1018 |
+by another code segment, then never store it on the instance.
|
|
1019 |
+ |
|
1020 |
+BuildStream is complicated in the sense that it is multi processed
|
|
1021 |
+and it is not always obvious how to pass the transient state around
|
|
1022 |
+as a return value or a function parameter. Do not fall prey to this
|
|
1023 |
+obstacle and pollute object instances with transient state.
|
|
1024 |
+ |
|
1025 |
+Instead, always refactor the surrounding code so that the value
|
|
1026 |
+is propagated to the desired end point via a well defined API, either
|
|
1027 |
+by adding new code paths or changing the design such that the
|
|
1028 |
+architecture continues to make sense.
|
|
1029 |
+ |
|
1030 |
+ |
|
1031 |
+Refactor the codebase as needed
|
|
1032 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
1033 |
+Especially when implementing features, always move the BuildStream
|
|
1034 |
+codebase forward as a whole.
|
|
1035 |
+ |
|
1036 |
+Taking a short cut is alright when prototyping, but circumventing
|
|
1037 |
+existing architecture and design to get a feature implemented without
|
|
1038 |
+re-designing the surrounding architecture to accommodate the new
|
|
1039 |
+feature instead, is never acceptable upstream.
|
|
1040 |
+ |
|
1041 |
+For example, let's say that you have to implement a feature and you've
|
|
1042 |
+successfully prototyped it, but it launches a ``Job`` directly from a
|
|
1043 |
+``Queue`` implementation to get the feature to work, while the ``Scheduler``
|
|
1044 |
+is normally responsible for dispatching ``Jobs`` for the elements on
|
|
1045 |
+a ``Queue``. This means that you've proven that your feature can work,
|
|
1046 |
+and now it is time to start working on a patch for upstream.
|
|
1047 |
+ |
|
1048 |
+Consider what the scenario is and why you are circumventing the design,
|
|
1049 |
+and then redesign the ``Scheduler`` and ``Queue`` objects to accommodate for
|
|
1050 |
+the new feature and condition under which you need to dispatch a ``Job``,
|
|
1051 |
+or how you can give the ``Queue`` implementation the additional context it
|
|
1052 |
+needs.
|
|
1053 |
+ |
|
1054 |
+ |
|
1055 |
+Adding core plugins
|
|
1056 |
+-------------------
|
|
1057 |
+This is a checklist of things which need to be done when adding a new
|
|
1058 |
+core plugin to BuildStream proper.
|
|
1059 |
+ |
|
1060 |
+ |
|
1061 |
+Update documentation index
|
|
1062 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
1063 |
+The documentation generating scripts will automatically pick up your
|
|
1064 |
+newly added plugin and generate HTML, but will not add a link to the
|
|
1065 |
+documentation of your plugin automatically.
|
|
1066 |
+ |
|
1067 |
+Whenever adding a new plugin, you must add an entry for it in ``doc/source/core_plugins.rst``.
|
|
1068 |
+ |
|
1069 |
+ |
|
1070 |
+Bump format version
|
|
1071 |
+~~~~~~~~~~~~~~~~~~~
|
|
1072 |
+In order for projects to assert that they have a new enough version
|
|
1073 |
+of BuildStream to use the new plugin, the ``BST_FORMAT_VERSION`` must
|
|
1074 |
+be incremented in the ``_versions.py`` file.
|
|
1075 |
+ |
|
1076 |
+Remember to include in your plugin's main documentation, the format
|
|
1077 |
+version in which the plugin was introduced, using the standard annotation
|
|
1078 |
+which we use throughout the documentation, e.g.::
|
|
1079 |
+ |
|
1080 |
+ .. note::
|
|
1081 |
+ |
|
1082 |
+ The ``foo`` plugin is available since :ref:`format version 16 <project_format_version>`
|
|
1083 |
+ |
|
1084 |
+ |
|
1085 |
+Add tests
|
|
1086 |
+~~~~~~~~~
|
|
1087 |
+Needless to say, all new feature additions need to be tested. For ``Element``
|
|
1088 |
+plugins, these usually need to be added to the integration tests. For ``Source``
|
|
1089 |
+plugins, the tests are added in two ways:
|
|
1090 |
+ |
|
1091 |
+* For most normal ``Source`` plugins, it is important to add a new ``Repo``
|
|
1092 |
+ implementation for your plugin in the ``tests/testutils/repo/`` directory
|
|
1093 |
+ and update ``ALL_REPO_KINDS`` in ``tests/testutils/repo/__init__.py``. This
|
|
1094 |
+ will include your new ``Source`` implementation in a series of already existing
|
|
1095 |
+ tests, ensuring it works well under normal operating conditions.
|
|
1096 |
+ |
|
1097 |
+* For other source plugins, or in order to test edge cases, such as failure modes,
|
|
1098 |
+ which are not tested under the normal test battery, add new tests in ``tests/sources``.
|
|
1099 |
+ |
|
1100 |
+ |
|
1101 |
+Extend the cachekey test
|
|
1102 |
+~~~~~~~~~~~~~~~~~~~~~~~~
|
|
1103 |
+For any newly added plugins, it is important to add some new simple elements
|
|
1104 |
+in ``tests/cachekey/project/elements`` or ``tests/cachekey/project/sources``,
|
|
1105 |
+and ensure that the newly added elements are depended on by ``tests/cachekey/project/target.bst``.
|
|
1106 |
+ |
|
1107 |
+One new element should be added to the cache key test for every configuration
|
|
1108 |
+value which your plugin understands which can possibly affect the result of
|
|
1109 |
+your plugin's ``Plugin.get_unique_key()`` implementation.
|
|
1110 |
+ |
|
1111 |
+This test ensures that cache keys do not unexpectedly change or become incompatible
|
|
1112 |
+due to code changes. As such, the cache key test should have full coverage of every
|
|
1113 |
+YAML configuration which can possibly affect cache key outcome at all times.
|
|
1114 |
+ |
|
1115 |
+See the ``tests/cachekey/update.py`` file for instructions on running the updater,
|
|
1116 |
+you need to run the updater to generate the ``.expected`` files and add the new
|
|
1117 |
+``.expected`` files in the same commit which extends the cache key test.
|
|
1118 |
+ |
|
1119 |
+ |
|
1120 |
+Protocol buffers
|
|
1121 |
+----------------
|
|
1122 |
+BuildStream uses protobuf and gRPC for serialization and communication with
|
|
1123 |
+artifact cache servers. This requires ``.proto`` files and Python code
|
|
1124 |
+generated from the ``.proto`` files using protoc. All these files live in the
|
|
1125 |
+``buildstream/_protos`` directory. The generated files are included in the
|
|
1126 |
+git repository to avoid depending on grpcio-tools for user installations.
|
|
1127 |
+ |
|
1128 |
+ |
|
1129 |
+Regenerating code
|
|
1130 |
+~~~~~~~~~~~~~~~~~
|
|
1131 |
+When ``.proto`` files are modified, the corresponding Python code needs to
|
|
1132 |
+be regenerated. As a prerequisite for code generation you need to install
|
|
1133 |
+``grpcio-tools`` using pip or some other mechanism::
|
|
1134 |
+ |
|
1135 |
+ pip3 install --user grpcio-tools
|
|
1136 |
+ |
|
1137 |
+To actually regenerate the code::
|
|
1138 |
+ |
|
1139 |
+ ./setup.py build_grpc
|
|
1140 |
+ |
|
1141 |
+ |
|
1142 |
+Documenting
|
|
1143 |
+-----------
|
|
222 | 1144 |
BuildStream starts out as a documented project from day one and uses
|
223 | 1145 |
sphinx to document itself.
|
224 | 1146 |
|
1147 |
+This section discusses formatting policies for editing files in the
|
|
1148 |
+``doc/source`` directory, and describes the details of how the docs are
|
|
1149 |
+generated so that you can easily generate and view the docs yourself before
|
|
1150 |
+submitting patches to the documentation.
|
|
1151 |
+ |
|
1152 |
+For details on how API documenting comments and docstrings are formatted,
|
|
1153 |
+refer to the :ref:`documenting section of the coding guidelines
|
|
1154 |
+<contributing_documenting_symbols>`.
|
|
1155 |
+ |
|
225 | 1156 |
|
226 | 1157 |
Documentation formatting policy
|
227 | 1158 |
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
228 | 1159 |
The BuildStream documentation style is as follows:
|
229 | 1160 |
|
230 |
-* Titles and headings require two leading empty lines above them. Only the first word should be capitalized.
|
|
1161 |
+* Titles and headings require two leading empty lines above them.
|
|
1162 |
+ Only the first word in a title should be capitalized.
|
|
231 | 1163 |
|
232 |
- * If there is an ``.. _internal_link`` anchor, there should be two empty lines above the anchor, followed by one leading empty line.
|
|
1164 |
+ * If there is an ``.. _internal_link:`` anchor, there should be two empty lines
|
|
1165 |
+ above the anchor, followed by one leading empty line.
|
|
233 | 1166 |
|
234 | 1167 |
* Within a section, paragraphs should be separated by one empty line.
|
235 | 1168 |
|
236 |
-* Notes are defined using: ``.. note::`` blocks, followed by an empty line and then indented (3 spaces) text.
|
|
1169 |
+* Notes are defined using: ``.. note::`` blocks, followed by an empty line
|
|
1170 |
+ and then indented (3 spaces) text.
|
|
1171 |
+ |
|
1172 |
+ * Other kinds of notes can be used throughout the documentation and will
|
|
1173 |
+ be decorated in different ways, these work in the same way as ``.. note::`` does.
|
|
237 | 1174 |
|
238 |
-* Code blocks are defined using: ``.. code:: LANGUAGE`` blocks, followed by an empty line and then indented (3 spaces) text. Note that the default language is `python`.
|
|
1175 |
+ Feel free to also use ``.. attention::`` or ``.. important::`` to call special
|
|
1176 |
+ attention to a paragraph, ``.. tip::`` to give the reader a special tip on how
|
|
1177 |
+ to use an advanced feature or ``.. warning::`` to warn the user about a potential
|
|
1178 |
+ misuse of the API and explain it's consequences.
|
|
1179 |
+ |
|
1180 |
+* Code blocks are defined using: ``.. code:: LANGUAGE`` blocks, followed by an empty
|
|
1181 |
+ line and then indented (3 spaces) text. Note that the default language is ``python``.
|
|
239 | 1182 |
|
240 | 1183 |
* Cross references should be of the form ``:role:`target```.
|
241 | 1184 |
|
242 |
- * To cross reference arbitrary locations with, for example, the anchor ``_anchor_name``, you must give the link an explicit title: ``:ref:`Link text <anchor_name>```. Note that the "_" prefix is not required.
|
|
1185 |
+ * Explicit anchors can be declared as ``.. _anchor_name:`` on a line by itself.
|
|
1186 |
+
|
|
1187 |
+ * To cross reference arbitrary locations with, for example, the anchor ``anchor_name``,
|
|
1188 |
+ always provide some explicit text in the link instead of deriving the text from
|
|
1189 |
+ the target, e.g.: ``:ref:`Link text <anchor_name>```.
|
|
1190 |
+ Note that the "_" prefix is not used when referring to the target.
|
|
243 | 1191 |
|
244 | 1192 |
Useful links:
|
245 | 1193 |
|
246 |
-For further information, please see the `Sphinx Documentation <http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html>`_.
|
|
1194 |
+For further information, please see the `Sphinx Documentation
|
|
1195 |
+<http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html>`_.
|
|
247 | 1196 |
|
248 | 1197 |
|
249 | 1198 |
Building Docs
|
... | ... | @@ -312,32 +1261,6 @@ the ``man/`` subdirectory, which will be automatically included |
312 | 1261 |
in the buildstream distribution.
|
313 | 1262 |
|
314 | 1263 |
|
315 |
-Documenting conventions
|
|
316 |
-~~~~~~~~~~~~~~~~~~~~~~~
|
|
317 |
-We use the sphinx.ext.napoleon extension for the purpose of having
|
|
318 |
-a bit nicer docstrings than the default sphinx docstrings.
|
|
319 |
- |
|
320 |
-A docstring for a method, class or function should have the following
|
|
321 |
-format::
|
|
322 |
- |
|
323 |
- """Brief description of entity
|
|
324 |
- |
|
325 |
- Args:
|
|
326 |
- argument1 (type): Description of arg
|
|
327 |
- argument2 (type): Description of arg
|
|
328 |
- |
|
329 |
- Returns:
|
|
330 |
- (type): Description of returned thing of the specified type
|
|
331 |
- |
|
332 |
- Raises:
|
|
333 |
- (SomeError): When some error occurs
|
|
334 |
- (SomeOtherError): When some other error occurs
|
|
335 |
- |
|
336 |
- A detailed description can go here if one is needed, only
|
|
337 |
- after the above part documents the calling conventions.
|
|
338 |
- """
|
|
339 |
- |
|
340 |
- |
|
341 | 1264 |
Documentation Examples
|
342 | 1265 |
~~~~~~~~~~~~~~~~~~~~~~
|
343 | 1266 |
The examples section of the documentation contains a series of standalone
|
... | ... | @@ -419,30 +1342,10 @@ regenerate them locally in order to build the docs. |
419 | 1342 |
command: build hello.bst
|
420 | 1343 |
|
421 | 1344 |
|
422 |
-Protocol Buffers
|
|
423 |
-----------------
|
|
424 |
-BuildStream uses protobuf and gRPC for serialization and communication with
|
|
425 |
-artifact cache servers. This requires ``.proto`` files and Python code
|
|
426 |
-generated from the ``.proto`` files using protoc. All these files live in the
|
|
427 |
-``buildstream/_protos`` directory. The generated files are included in the
|
|
428 |
-git repository to avoid depending on grpcio-tools for user installations.
|
|
1345 |
+.. _contributing_testing:
|
|
429 | 1346 |
|
430 |
- |
|
431 |
-Regenerating code
|
|
432 |
-~~~~~~~~~~~~~~~~~
|
|
433 |
-When ``.proto`` files are modified, the corresponding Python code needs to
|
|
434 |
-be regenerated. As a prerequisite for code generation you need to install
|
|
435 |
-``grpcio-tools`` using pip or some other mechanism::
|
|
436 |
- |
|
437 |
- pip3 install --user grpcio-tools
|
|
438 |
- |
|
439 |
-To actually regenerate the code::
|
|
440 |
- |
|
441 |
- ./setup.py build_grpc
|
|
442 |
- |
|
443 |
- |
|
444 |
-Testing BuildStream
|
|
445 |
--------------------
|
|
1347 |
+Testing
|
|
1348 |
+-------
|
|
446 | 1349 |
BuildStream uses pytest for regression tests and testing out
|
447 | 1350 |
the behavior of newly added components.
|
448 | 1351 |
|
... | ... | @@ -495,6 +1398,7 @@ with:: |
495 | 1398 |
Alternatively, any IDE plugin that uses pytest should automatically
|
496 | 1399 |
detect the ``.pylintrc`` in the project's root directory.
|
497 | 1400 |
|
1401 |
+ |
|
498 | 1402 |
Adding tests
|
499 | 1403 |
~~~~~~~~~~~~
|
500 | 1404 |
Tests are found in the tests subdirectory, inside of which
|
... | ... | @@ -521,8 +1425,9 @@ Tests that run a sandbox should be decorated with:: |
521 | 1425 |
|
522 | 1426 |
and use the integration cli helper.
|
523 | 1427 |
|
524 |
-Measuring BuildStream performance
|
|
525 |
----------------------------------
|
|
1428 |
+ |
|
1429 |
+Measuring performance
|
|
1430 |
+---------------------
|
|
526 | 1431 |
|
527 | 1432 |
|
528 | 1433 |
Benchmarking framework
|
... | ... | @@ -506,7 +506,7 @@ class CASCache(ArtifactCache): |
506 | 506 |
def set_ref(self, ref, tree):
|
507 | 507 |
refpath = self._refpath(ref)
|
508 | 508 |
os.makedirs(os.path.dirname(refpath), exist_ok=True)
|
509 |
- with utils.save_file_atomic(refpath, 'wb') as f:
|
|
509 |
+ with utils.save_file_atomic(refpath, 'wb', tempdir=self.tmpdir) as f:
|
|
510 | 510 |
f.write(tree.SerializeToString())
|
511 | 511 |
|
512 | 512 |
# resolve_ref():
|
... | ... | @@ -1048,10 +1048,29 @@ class CASCache(ArtifactCache): |
1048 | 1048 |
missing_blobs[d.hash] = d
|
1049 | 1049 |
|
1050 | 1050 |
# Upload any blobs missing on the server
|
1051 |
- for blob_digest in missing_blobs.values():
|
|
1052 |
- with open(self.objpath(blob_digest), 'rb') as f:
|
|
1053 |
- assert os.fstat(f.fileno()).st_size == blob_digest.size_bytes
|
|
1054 |
- self._send_blob(remote, blob_digest, f, u_uid=u_uid)
|
|
1051 |
+ self._send_blobs(remote, missing_blobs.values(), u_uid)
|
|
1052 |
+ |
|
1053 |
+ def _send_blobs(self, remote, digests, u_uid=uuid.uuid4()):
|
|
1054 |
+ batch = _CASBatchUpdate(remote)
|
|
1055 |
+ |
|
1056 |
+ for digest in digests:
|
|
1057 |
+ with open(self.objpath(digest), 'rb') as f:
|
|
1058 |
+ assert os.fstat(f.fileno()).st_size == digest.size_bytes
|
|
1059 |
+ |
|
1060 |
+ if (digest.size_bytes >= remote.max_batch_total_size_bytes or
|
|
1061 |
+ not remote.batch_update_supported):
|
|
1062 |
+ # Too large for batch request, upload in independent request.
|
|
1063 |
+ self._send_blob(remote, digest, f, u_uid=u_uid)
|
|
1064 |
+ else:
|
|
1065 |
+ if not batch.add(digest, f):
|
|
1066 |
+ # Not enough space left in batch request.
|
|
1067 |
+ # Complete pending batch first.
|
|
1068 |
+ batch.send()
|
|
1069 |
+ batch = _CASBatchUpdate(remote)
|
|
1070 |
+ batch.add(digest, f)
|
|
1071 |
+ |
|
1072 |
+ # Send final batch
|
|
1073 |
+ batch.send()
|
|
1055 | 1074 |
|
1056 | 1075 |
|
1057 | 1076 |
# Represents a single remote CAS cache.
|
... | ... | @@ -1126,6 +1145,17 @@ class _CASRemote(): |
1126 | 1145 |
if e.code() != grpc.StatusCode.UNIMPLEMENTED:
|
1127 | 1146 |
raise
|
1128 | 1147 |
|
1148 |
+ # Check whether the server supports BatchUpdateBlobs()
|
|
1149 |
+ self.batch_update_supported = False
|
|
1150 |
+ try:
|
|
1151 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest()
|
|
1152 |
+ response = self.cas.BatchUpdateBlobs(request)
|
|
1153 |
+ self.batch_update_supported = True
|
|
1154 |
+ except grpc.RpcError as e:
|
|
1155 |
+ if (e.code() != grpc.StatusCode.UNIMPLEMENTED and
|
|
1156 |
+ e.code() != grpc.StatusCode.PERMISSION_DENIED):
|
|
1157 |
+ raise
|
|
1158 |
+ |
|
1129 | 1159 |
self._initialized = True
|
1130 | 1160 |
|
1131 | 1161 |
|
... | ... | @@ -1173,6 +1203,46 @@ class _CASBatchRead(): |
1173 | 1203 |
yield (response.digest, response.data)
|
1174 | 1204 |
|
1175 | 1205 |
|
1206 |
+# Represents a batch of blobs queued for upload.
|
|
1207 |
+#
|
|
1208 |
+class _CASBatchUpdate():
|
|
1209 |
+ def __init__(self, remote):
|
|
1210 |
+ self._remote = remote
|
|
1211 |
+ self._max_total_size_bytes = remote.max_batch_total_size_bytes
|
|
1212 |
+ self._request = remote_execution_pb2.BatchUpdateBlobsRequest()
|
|
1213 |
+ self._size = 0
|
|
1214 |
+ self._sent = False
|
|
1215 |
+ |
|
1216 |
+ def add(self, digest, stream):
|
|
1217 |
+ assert not self._sent
|
|
1218 |
+ |
|
1219 |
+ new_batch_size = self._size + digest.size_bytes
|
|
1220 |
+ if new_batch_size > self._max_total_size_bytes:
|
|
1221 |
+ # Not enough space left in current batch
|
|
1222 |
+ return False
|
|
1223 |
+ |
|
1224 |
+ blob_request = self._request.requests.add()
|
|
1225 |
+ blob_request.digest.hash = digest.hash
|
|
1226 |
+ blob_request.digest.size_bytes = digest.size_bytes
|
|
1227 |
+ blob_request.data = stream.read(digest.size_bytes)
|
|
1228 |
+ self._size = new_batch_size
|
|
1229 |
+ return True
|
|
1230 |
+ |
|
1231 |
+ def send(self):
|
|
1232 |
+ assert not self._sent
|
|
1233 |
+ self._sent = True
|
|
1234 |
+ |
|
1235 |
+ if len(self._request.requests) == 0:
|
|
1236 |
+ return
|
|
1237 |
+ |
|
1238 |
+ batch_response = self._remote.cas.BatchUpdateBlobs(self._request)
|
|
1239 |
+ |
|
1240 |
+ for response in batch_response.responses:
|
|
1241 |
+ if response.status.code != grpc.StatusCode.OK.value[0]:
|
|
1242 |
+ raise ArtifactError("Failed to upload blob {}: {}".format(
|
|
1243 |
+ response.digest.hash, response.status.code))
|
|
1244 |
+ |
|
1245 |
+ |
|
1176 | 1246 |
def _grouper(iterable, n):
|
1177 | 1247 |
while True:
|
1178 | 1248 |
try:
|
... | ... | @@ -68,7 +68,7 @@ def create_server(repo, *, enable_push): |
68 | 68 |
_ByteStreamServicer(artifactcache, enable_push=enable_push), server)
|
69 | 69 |
|
70 | 70 |
remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(
|
71 |
- _ContentAddressableStorageServicer(artifactcache), server)
|
|
71 |
+ _ContentAddressableStorageServicer(artifactcache, enable_push=enable_push), server)
|
|
72 | 72 |
|
73 | 73 |
remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(
|
74 | 74 |
_CapabilitiesServicer(), server)
|
... | ... | @@ -222,9 +222,10 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer): |
222 | 222 |
|
223 | 223 |
|
224 | 224 |
class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddressableStorageServicer):
|
225 |
- def __init__(self, cas):
|
|
225 |
+ def __init__(self, cas, *, enable_push):
|
|
226 | 226 |
super().__init__()
|
227 | 227 |
self.cas = cas
|
228 |
+ self.enable_push = enable_push
|
|
228 | 229 |
|
229 | 230 |
def FindMissingBlobs(self, request, context):
|
230 | 231 |
response = remote_execution_pb2.FindMissingBlobsResponse()
|
... | ... | @@ -260,6 +261,46 @@ class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddres |
260 | 261 |
|
261 | 262 |
return response
|
262 | 263 |
|
264 |
+ def BatchUpdateBlobs(self, request, context):
|
|
265 |
+ response = remote_execution_pb2.BatchUpdateBlobsResponse()
|
|
266 |
+ |
|
267 |
+ if not self.enable_push:
|
|
268 |
+ context.set_code(grpc.StatusCode.PERMISSION_DENIED)
|
|
269 |
+ return response
|
|
270 |
+ |
|
271 |
+ batch_size = 0
|
|
272 |
+ |
|
273 |
+ for blob_request in request.requests:
|
|
274 |
+ digest = blob_request.digest
|
|
275 |
+ |
|
276 |
+ batch_size += digest.size_bytes
|
|
277 |
+ if batch_size > _MAX_PAYLOAD_BYTES:
|
|
278 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
279 |
+ return response
|
|
280 |
+ |
|
281 |
+ blob_response = response.responses.add()
|
|
282 |
+ blob_response.digest.hash = digest.hash
|
|
283 |
+ blob_response.digest.size_bytes = digest.size_bytes
|
|
284 |
+ |
|
285 |
+ if len(blob_request.data) != digest.size_bytes:
|
|
286 |
+ blob_response.status.code = grpc.StatusCode.FAILED_PRECONDITION
|
|
287 |
+ continue
|
|
288 |
+ |
|
289 |
+ try:
|
|
290 |
+ _clean_up_cache(self.cas, digest.size_bytes)
|
|
291 |
+ |
|
292 |
+ with tempfile.NamedTemporaryFile(dir=self.cas.tmpdir) as out:
|
|
293 |
+ out.write(blob_request.data)
|
|
294 |
+ out.flush()
|
|
295 |
+ server_digest = self.cas.add_object(path=out.name)
|
|
296 |
+ if server_digest.hash != digest.hash:
|
|
297 |
+ blob_response.status.code = grpc.StatusCode.FAILED_PRECONDITION
|
|
298 |
+ |
|
299 |
+ except ArtifactTooLargeException:
|
|
300 |
+ blob_response.status.code = grpc.StatusCode.RESOURCE_EXHAUSTED
|
|
301 |
+ |
|
302 |
+ return response
|
|
303 |
+ |
|
263 | 304 |
|
264 | 305 |
class _CapabilitiesServicer(remote_execution_pb2_grpc.CapabilitiesServicer):
|
265 | 306 |
def GetCapabilities(self, request, context):
|
... | ... | @@ -185,6 +185,6 @@ def _extract_depends_from_node(node, *, key=None): |
185 | 185 |
output_deps.append(dependency)
|
186 | 186 |
|
187 | 187 |
# Now delete the field, we dont want it anymore
|
188 |
- del node[key]
|
|
188 |
+ node.pop(key, None)
|
|
189 | 189 |
|
190 | 190 |
return output_deps
|
... | ... | @@ -29,6 +29,7 @@ from .. import _yaml |
29 | 29 |
from ..element import Element
|
30 | 30 |
from .._profile import Topics, profile_start, profile_end
|
31 | 31 |
from .._includes import Includes
|
32 |
+from .._yamlcache import YamlCache
|
|
32 | 33 |
|
33 | 34 |
from .types import Symbol, Dependency
|
34 | 35 |
from .loadelement import LoadElement
|
... | ... | @@ -112,7 +113,14 @@ class Loader(): |
112 | 113 |
profile_start(Topics.LOAD_PROJECT, target)
|
113 | 114 |
junction, name, loader = self._parse_name(target, rewritable, ticker,
|
114 | 115 |
fetch_subprojects=fetch_subprojects)
|
115 |
- loader._load_file(name, rewritable, ticker, fetch_subprojects)
|
|
116 |
+ |
|
117 |
+ # XXX This will need to be changed to the context's top-level project if this method
|
|
118 |
+ # is ever used for subprojects
|
|
119 |
+ top_dir = self.project.directory
|
|
120 |
+ |
|
121 |
+ cache_file = YamlCache.get_cache_file(top_dir)
|
|
122 |
+ with YamlCache.open(self._context, cache_file) as yaml_cache:
|
|
123 |
+ loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
|
|
116 | 124 |
deps.append(Dependency(name, junction=junction))
|
117 | 125 |
profile_end(Topics.LOAD_PROJECT, target)
|
118 | 126 |
|
... | ... | @@ -201,11 +209,12 @@ class Loader(): |
201 | 209 |
# rewritable (bool): Whether we should load in round trippable mode
|
202 | 210 |
# ticker (callable): A callback to report loaded filenames to the frontend
|
203 | 211 |
# fetch_subprojects (bool): Whether to fetch subprojects while loading
|
212 |
+ # yaml_cache (YamlCache): A yaml cache
|
|
204 | 213 |
#
|
205 | 214 |
# Returns:
|
206 | 215 |
# (LoadElement): A loaded LoadElement
|
207 | 216 |
#
|
208 |
- def _load_file(self, filename, rewritable, ticker, fetch_subprojects):
|
|
217 |
+ def _load_file(self, filename, rewritable, ticker, fetch_subprojects, yaml_cache=None):
|
|
209 | 218 |
|
210 | 219 |
# Silently ignore already loaded files
|
211 | 220 |
if filename in self._elements:
|
... | ... | @@ -218,7 +227,8 @@ class Loader(): |
218 | 227 |
# Load the data and process any conditional statements therein
|
219 | 228 |
fullpath = os.path.join(self._basedir, filename)
|
220 | 229 |
try:
|
221 |
- node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable, project=self.project)
|
|
230 |
+ node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable,
|
|
231 |
+ project=self.project, yaml_cache=yaml_cache)
|
|
222 | 232 |
except LoadError as e:
|
223 | 233 |
if e.reason == LoadErrorReason.MISSING_FILE:
|
224 | 234 |
# If we can't find the file, try to suggest plausible
|
... | ... | @@ -261,13 +271,13 @@ class Loader(): |
261 | 271 |
# Load all dependency files for the new LoadElement
|
262 | 272 |
for dep in element.deps:
|
263 | 273 |
if dep.junction:
|
264 |
- self._load_file(dep.junction, rewritable, ticker, fetch_subprojects)
|
|
274 |
+ self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
|
|
265 | 275 |
loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
|
266 | 276 |
fetch_subprojects=fetch_subprojects)
|
267 | 277 |
else:
|
268 | 278 |
loader = self
|
269 | 279 |
|
270 |
- dep_element = loader._load_file(dep.name, rewritable, ticker, fetch_subprojects)
|
|
280 |
+ dep_element = loader._load_file(dep.name, rewritable, ticker, fetch_subprojects, yaml_cache)
|
|
271 | 281 |
|
272 | 282 |
if _yaml.node_get(dep_element.node, str, Symbol.KIND) == 'junction':
|
273 | 283 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
... | ... | @@ -34,6 +34,9 @@ class Darwin(Platform): |
34 | 34 |
super().__init__()
|
35 | 35 |
|
36 | 36 |
def create_sandbox(self, *args, **kwargs):
|
37 |
+ kwargs['dummy_reason'] = \
|
|
38 |
+ "OSXFUSE is not supported and there are no supported sandbox" + \
|
|
39 |
+ "technologies for OSX at this time"
|
|
37 | 40 |
return SandboxDummy(*args, **kwargs)
|
38 | 41 |
|
39 | 42 |
def check_sandbox_config(self, config):
|
... | ... | @@ -41,10 +44,11 @@ class Darwin(Platform): |
41 | 44 |
return True
|
42 | 45 |
|
43 | 46 |
def get_cpu_count(self, cap=None):
|
44 |
- if cap < os.cpu_count():
|
|
45 |
- return cap
|
|
47 |
+ cpu_count = os.cpu_count()
|
|
48 |
+ if cap is None:
|
|
49 |
+ return cpu_count
|
|
46 | 50 |
else:
|
47 |
- return os.cpu_count()
|
|
51 |
+ return min(cpu_count, cap)
|
|
48 | 52 |
|
49 | 53 |
def set_resource_limits(self, soft_limit=OPEN_MAX, hard_limit=None):
|
50 | 54 |
super().set_resource_limits(soft_limit)
|
... | ... | @@ -37,24 +37,30 @@ class Linux(Platform): |
37 | 37 |
self._uid = os.geteuid()
|
38 | 38 |
self._gid = os.getegid()
|
39 | 39 |
|
40 |
+ self._have_fuse = os.path.exists("/dev/fuse")
|
|
41 |
+ self._bwrap_exists = _site.check_bwrap_version(0, 0, 0)
|
|
42 |
+ self._have_good_bwrap = _site.check_bwrap_version(0, 1, 2)
|
|
43 |
+ |
|
44 |
+ self._local_sandbox_available = self._have_fuse and self._have_good_bwrap
|
|
45 |
+ |
|
40 | 46 |
self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
|
41 | 47 |
|
42 |
- if self._local_sandbox_available():
|
|
48 |
+ if self._local_sandbox_available:
|
|
43 | 49 |
self._user_ns_available = self._check_user_ns_available()
|
44 | 50 |
else:
|
45 | 51 |
self._user_ns_available = False
|
46 | 52 |
|
47 | 53 |
def create_sandbox(self, *args, **kwargs):
|
48 |
- if not self._local_sandbox_available():
|
|
49 |
- return SandboxDummy(*args, **kwargs)
|
|
54 |
+ if not self._local_sandbox_available:
|
|
55 |
+ return self._create_dummy_sandbox(*args, **kwargs)
|
|
50 | 56 |
else:
|
51 |
- from ..sandbox._sandboxbwrap import SandboxBwrap
|
|
52 |
- # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
|
|
53 |
- kwargs['user_ns_available'] = self._user_ns_available
|
|
54 |
- kwargs['die_with_parent_available'] = self._die_with_parent_available
|
|
55 |
- return SandboxBwrap(*args, **kwargs)
|
|
57 |
+ return self._create_bwrap_sandbox(*args, **kwargs)
|
|
56 | 58 |
|
57 | 59 |
def check_sandbox_config(self, config):
|
60 |
+ if not self._local_sandbox_available:
|
|
61 |
+ # Accept all sandbox configs as it's irrelevant with the dummy sandbox (no Sandbox.run).
|
|
62 |
+ return True
|
|
63 |
+ |
|
58 | 64 |
if self._user_ns_available:
|
59 | 65 |
# User namespace support allows arbitrary build UID/GID settings.
|
60 | 66 |
return True
|
... | ... | @@ -66,11 +72,26 @@ class Linux(Platform): |
66 | 72 |
################################################
|
67 | 73 |
# Private Methods #
|
68 | 74 |
################################################
|
69 |
- def _local_sandbox_available(self):
|
|
70 |
- try:
|
|
71 |
- return os.path.exists(utils.get_host_tool('bwrap')) and os.path.exists('/dev/fuse')
|
|
72 |
- except utils.ProgramNotFoundError:
|
|
73 |
- return False
|
|
75 |
+ |
|
76 |
+ def _create_dummy_sandbox(self, *args, **kwargs):
|
|
77 |
+ reasons = []
|
|
78 |
+ if not self._have_fuse:
|
|
79 |
+ reasons.append("FUSE is unavailable")
|
|
80 |
+ if not self._have_good_bwrap:
|
|
81 |
+ if self._bwrap_exists:
|
|
82 |
+ reasons.append("`bwrap` is too old (bst needs at least 0.1.2)")
|
|
83 |
+ else:
|
|
84 |
+ reasons.append("`bwrap` executable not found")
|
|
85 |
+ |
|
86 |
+ kwargs['dummy_reason'] = " and ".join(reasons)
|
|
87 |
+ return SandboxDummy(*args, **kwargs)
|
|
88 |
+ |
|
89 |
+ def _create_bwrap_sandbox(self, *args, **kwargs):
|
|
90 |
+ from ..sandbox._sandboxbwrap import SandboxBwrap
|
|
91 |
+ # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
|
|
92 |
+ kwargs['user_ns_available'] = self._user_ns_available
|
|
93 |
+ kwargs['die_with_parent_available'] = self._die_with_parent_available
|
|
94 |
+ return SandboxBwrap(*args, **kwargs)
|
|
74 | 95 |
|
75 | 96 |
def _check_user_ns_available(self):
|
76 | 97 |
# Here, lets check if bwrap is able to create user namespaces,
|
... | ... | @@ -67,7 +67,11 @@ class Platform(): |
67 | 67 |
return cls._instance
|
68 | 68 |
|
69 | 69 |
def get_cpu_count(self, cap=None):
|
70 |
- return min(len(os.sched_getaffinity(0)), cap)
|
|
70 |
+ cpu_count = len(os.sched_getaffinity(0))
|
|
71 |
+ if cap is None:
|
|
72 |
+ return cpu_count
|
|
73 |
+ else:
|
|
74 |
+ return min(cpu_count, cap)
|
|
71 | 75 |
|
72 | 76 |
##################################################################
|
73 | 77 |
# Sandbox functions #
|
... | ... | @@ -19,6 +19,7 @@ |
19 | 19 |
# Tiago Gomes <tiago gomes codethink co uk>
|
20 | 20 |
|
21 | 21 |
import os
|
22 |
+import hashlib
|
|
22 | 23 |
from collections import Mapping, OrderedDict
|
23 | 24 |
from pluginbase import PluginBase
|
24 | 25 |
from . import utils
|
... | ... | @@ -119,6 +119,8 @@ class Job(): |
119 | 119 |
self._result = None # Return value of child action in the parent
|
120 | 120 |
self._tries = 0 # Try count, for retryable jobs
|
121 | 121 |
self._skipped_flag = False # Indicate whether the job was skipped.
|
122 |
+ self._terminated = False # Whether this job has been explicitly terminated
|
|
123 |
+ |
|
122 | 124 |
# If False, a retry will not be attempted regardless of whether _tries is less than _max_retries.
|
123 | 125 |
#
|
124 | 126 |
self._retry_flag = True
|
... | ... | @@ -190,6 +192,8 @@ class Job(): |
190 | 192 |
# Terminate the process using multiprocessing API pathway
|
191 | 193 |
self._process.terminate()
|
192 | 194 |
|
195 |
+ self._terminated = True
|
|
196 |
+ |
|
193 | 197 |
# terminate_wait()
|
194 | 198 |
#
|
195 | 199 |
# Wait for terminated jobs to complete
|
... | ... | @@ -273,18 +277,22 @@ class Job(): |
273 | 277 |
# running the integration commands).
|
274 | 278 |
#
|
275 | 279 |
# Args:
|
276 |
- # (int): The plugin identifier for this task
|
|
280 |
+ # task_id (int): The plugin identifier for this task
|
|
277 | 281 |
#
|
278 | 282 |
def set_task_id(self, task_id):
|
279 | 283 |
self._task_id = task_id
|
280 | 284 |
|
281 | 285 |
# skipped
|
282 | 286 |
#
|
287 |
+ # This will evaluate to True if the job was skipped
|
|
288 |
+ # during processing, or if it was forcefully terminated.
|
|
289 |
+ #
|
|
283 | 290 |
# Returns:
|
284 |
- # bool: True if the job was skipped while processing.
|
|
291 |
+ # (bool): Whether the job should appear as skipped
|
|
292 |
+ #
|
|
285 | 293 |
@property
|
286 | 294 |
def skipped(self):
|
287 |
- return self._skipped_flag
|
|
295 |
+ return self._skipped_flag or self._terminated
|
|
288 | 296 |
|
289 | 297 |
#######################################################
|
290 | 298 |
# Abstract Methods #
|
... | ... | @@ -326,16 +326,20 @@ class Queue(): |
326 | 326 |
detail=traceback.format_exc())
|
327 | 327 |
self.failed_elements.append(element)
|
328 | 328 |
else:
|
329 |
- |
|
330 |
- # No exception occured, handle the success/failure state in the normal way
|
|
331 | 329 |
#
|
330 |
+ # No exception occured in post processing
|
|
331 |
+ #
|
|
332 |
+ |
|
333 |
+ # All jobs get placed on the done queue for later processing.
|
|
332 | 334 |
self._done_queue.append(job)
|
333 | 335 |
|
334 |
- if success:
|
|
335 |
- if not job.skipped:
|
|
336 |
- self.processed_elements.append(element)
|
|
337 |
- else:
|
|
338 |
- self.skipped_elements.append(element)
|
|
336 |
+ # A Job can be skipped whether or not it has failed,
|
|
337 |
+ # we want to only bookkeep them as processed or failed
|
|
338 |
+ # if they are not skipped.
|
|
339 |
+ if job.skipped:
|
|
340 |
+ self.skipped_elements.append(element)
|
|
341 |
+ elif success:
|
|
342 |
+ self.processed_elements.append(element)
|
|
339 | 343 |
else:
|
340 | 344 |
self.failed_elements.append(element)
|
341 | 345 |
|
... | ... | @@ -387,6 +387,15 @@ class Scheduler(): |
387 | 387 |
# A loop registered event callback for keyboard interrupts
|
388 | 388 |
#
|
389 | 389 |
def _interrupt_event(self):
|
390 |
+ |
|
391 |
+ # FIXME: This should not be needed, but for some reason we receive an
|
|
392 |
+ # additional SIGINT event when the user hits ^C a second time
|
|
393 |
+ # to inform us that they really intend to terminate; even though
|
|
394 |
+ # we have disconnected our handlers at this time.
|
|
395 |
+ #
|
|
396 |
+ if self.terminated:
|
|
397 |
+ return
|
|
398 |
+ |
|
390 | 399 |
# Leave this to the frontend to decide, if no
|
391 | 400 |
# interrrupt callback was specified, then just terminate.
|
392 | 401 |
if self._interrupt_callback:
|
... | ... | @@ -78,18 +78,12 @@ def check_bwrap_version(major, minor, patch): |
78 | 78 |
if not bwrap_path:
|
79 | 79 |
return False
|
80 | 80 |
cmd = [bwrap_path, "--version"]
|
81 |
- version = str(subprocess.check_output(cmd).split()[1], "utf-8")
|
|
81 |
+ try:
|
|
82 |
+ version = str(subprocess.check_output(cmd).split()[1], "utf-8")
|
|
83 |
+ except subprocess.CalledProcessError:
|
|
84 |
+ # Failure trying to run bubblewrap
|
|
85 |
+ return False
|
|
82 | 86 |
_bwrap_major, _bwrap_minor, _bwrap_patch = map(int, version.split("."))
|
83 | 87 |
|
84 | 88 |
# Check whether the installed version meets the requirements
|
85 |
- if _bwrap_major > major:
|
|
86 |
- return True
|
|
87 |
- elif _bwrap_major < major:
|
|
88 |
- return False
|
|
89 |
- else:
|
|
90 |
- if _bwrap_minor > minor:
|
|
91 |
- return True
|
|
92 |
- elif _bwrap_minor < minor:
|
|
93 |
- return False
|
|
94 |
- else:
|
|
95 |
- return _bwrap_patch >= patch
|
|
89 |
+ return (_bwrap_major, _bwrap_minor, _bwrap_patch) >= (major, minor, patch)
|
... | ... | @@ -183,20 +183,32 @@ class CompositeTypeError(CompositeError): |
183 | 183 |
# shortname (str): The filename in shorthand for error reporting (or None)
|
184 | 184 |
# copy_tree (bool): Whether to make a copy, preserving the original toplevels
|
185 | 185 |
# for later serialization
|
186 |
+# yaml_cache (YamlCache): A yaml cache to consult rather than parsing
|
|
186 | 187 |
#
|
187 | 188 |
# Returns (dict): A loaded copy of the YAML file with provenance information
|
188 | 189 |
#
|
189 | 190 |
# Raises: LoadError
|
190 | 191 |
#
|
191 |
-def load(filename, shortname=None, copy_tree=False, *, project=None):
|
|
192 |
+def load(filename, shortname=None, copy_tree=False, *, project=None, yaml_cache=None):
|
|
192 | 193 |
if not shortname:
|
193 | 194 |
shortname = filename
|
194 | 195 |
|
195 | 196 |
file = ProvenanceFile(filename, shortname, project)
|
196 | 197 |
|
197 | 198 |
try:
|
199 |
+ data = None
|
|
198 | 200 |
with open(filename) as f:
|
199 |
- return load_data(f, file, copy_tree=copy_tree)
|
|
201 |
+ contents = f.read()
|
|
202 |
+ if yaml_cache:
|
|
203 |
+ data, key = yaml_cache.get(project, filename, contents, copy_tree)
|
|
204 |
+ |
|
205 |
+ if not data:
|
|
206 |
+ data = load_data(contents, file, copy_tree=copy_tree)
|
|
207 |
+ |
|
208 |
+ if yaml_cache:
|
|
209 |
+ yaml_cache.put_from_key(project, filename, key, data)
|
|
210 |
+ |
|
211 |
+ return data
|
|
200 | 212 |
except FileNotFoundError as e:
|
201 | 213 |
raise LoadError(LoadErrorReason.MISSING_FILE,
|
202 | 214 |
"Could not find file at {}".format(filename)) from e
|
1 |
+#
|
|
2 |
+# Copyright 2018 Bloomberg Finance LP
|
|
3 |
+#
|
|
4 |
+# This program is free software; you can redistribute it and/or
|
|
5 |
+# modify it under the terms of the GNU Lesser General Public
|
|
6 |
+# License as published by the Free Software Foundation; either
|
|
7 |
+# version 2 of the License, or (at your option) any later version.
|
|
8 |
+#
|
|
9 |
+# This library is distributed in the hope that it will be useful,
|
|
10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
12 |
+# Lesser General Public License for more details.
|
|
13 |
+#
|
|
14 |
+# You should have received a copy of the GNU Lesser General Public
|
|
15 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
16 |
+#
|
|
17 |
+# Authors:
|
|
18 |
+# Jonathan Maw <jonathan maw codethink co uk>
|
|
19 |
+ |
|
20 |
+import os
|
|
21 |
+import pickle
|
|
22 |
+import hashlib
|
|
23 |
+import io
|
|
24 |
+ |
|
25 |
+import sys
|
|
26 |
+ |
|
27 |
+from contextlib import contextmanager
|
|
28 |
+from collections import namedtuple
|
|
29 |
+ |
|
30 |
+from ._cachekey import generate_key
|
|
31 |
+from ._context import Context
|
|
32 |
+from . import utils, _yaml
|
|
33 |
+ |
|
34 |
+ |
|
35 |
+YAML_CACHE_FILENAME = "yaml_cache.pickle"
|
|
36 |
+ |
|
37 |
+ |
|
38 |
+# YamlCache()
|
|
39 |
+#
|
|
40 |
+# A cache that wraps around the loading of yaml in projects.
|
|
41 |
+#
|
|
42 |
+# The recommended way to use a YamlCache is:
|
|
43 |
+# with YamlCache.open(context) as yamlcache:
|
|
44 |
+# # Load all the yaml
|
|
45 |
+# ...
|
|
46 |
+#
|
|
47 |
+# Args:
|
|
48 |
+# context (Context): The invocation Context
|
|
49 |
+#
|
|
50 |
+class YamlCache():
|
|
51 |
+ |
|
52 |
+ def __init__(self, context):
|
|
53 |
+ self._project_caches = {}
|
|
54 |
+ self._context = context
|
|
55 |
+ ##################
|
|
56 |
+ # Public Methods #
|
|
57 |
+ ##################
|
|
58 |
+ |
|
59 |
+ # is_cached():
|
|
60 |
+ #
|
|
61 |
+ # Checks whether a file is cached.
|
|
62 |
+ #
|
|
63 |
+ # Args:
|
|
64 |
+ # project (Project): The project this file is in.
|
|
65 |
+ # filepath (str): The path to the file, *relative to the project's directory*.
|
|
66 |
+ #
|
|
67 |
+ # Returns:
|
|
68 |
+ # (bool): Whether the file is cached.
|
|
69 |
+ def is_cached(self, project, filepath):
|
|
70 |
+ cache_path = self._get_filepath(project, filepath)
|
|
71 |
+ project_name = project.name if project else ""
|
|
72 |
+ try:
|
|
73 |
+ project_cache = self._project_caches[project_name]
|
|
74 |
+ if cache_path in project_cache.elements:
|
|
75 |
+ return True
|
|
76 |
+ except KeyError:
|
|
77 |
+ pass
|
|
78 |
+ return False
|
|
79 |
+ |
|
80 |
+ # open():
|
|
81 |
+ #
|
|
82 |
+ # Return an instance of the YamlCache which writes to disk when it leaves scope.
|
|
83 |
+ #
|
|
84 |
+ # Args:
|
|
85 |
+ # context (Context): The context.
|
|
86 |
+ # cachefile (str): The path to the cache file.
|
|
87 |
+ #
|
|
88 |
+ # Returns:
|
|
89 |
+ # (YamlCache): A YamlCache.
|
|
90 |
+ @staticmethod
|
|
91 |
+ @contextmanager
|
|
92 |
+ def open(context, cachefile):
|
|
93 |
+ # Try to load from disk first
|
|
94 |
+ cache = None
|
|
95 |
+ if os.path.exists(cachefile):
|
|
96 |
+ try:
|
|
97 |
+ with open(cachefile, "rb") as f:
|
|
98 |
+ cache = BstUnpickler(f, context).load()
|
|
99 |
+ except pickle.UnpicklingError as e:
|
|
100 |
+ sys.stderr.write("Failed to load YamlCache, {}\n".format(e))
|
|
101 |
+ |
|
102 |
+ # Failed to load from disk, create a new one
|
|
103 |
+ if not cache:
|
|
104 |
+ cache = YamlCache(context)
|
|
105 |
+ |
|
106 |
+ yield cache
|
|
107 |
+ |
|
108 |
+ cache._write(cachefile)
|
|
109 |
+ |
|
110 |
+ # get_cache_file():
|
|
111 |
+ #
|
|
112 |
+ # Retrieves a path to the yaml cache file.
|
|
113 |
+ #
|
|
114 |
+ # Returns:
|
|
115 |
+ # (str): The path to the cache file
|
|
116 |
+ @staticmethod
|
|
117 |
+ def get_cache_file(top_dir):
|
|
118 |
+ return os.path.join(top_dir, ".bst", YAML_CACHE_FILENAME)
|
|
119 |
+ |
|
120 |
+ # get():
|
|
121 |
+ #
|
|
122 |
+ # Gets a parsed file from the cache.
|
|
123 |
+ #
|
|
124 |
+ # Args:
|
|
125 |
+ # project (Project) or None: The project this file is in, if it exists.
|
|
126 |
+ # filepath (str): The absolute path to the file.
|
|
127 |
+ # contents (str): The contents of the file to be cached
|
|
128 |
+ # copy_tree (bool): Whether the data should make a copy when it's being generated
|
|
129 |
+ # (i.e. exactly as when called in yaml)
|
|
130 |
+ #
|
|
131 |
+ # Returns:
|
|
132 |
+ # (decorated dict): The parsed yaml from the cache, or None if the file isn't in the cache.
|
|
133 |
+ # (str): The key used to look up the parsed yaml in the cache
|
|
134 |
+ def get(self, project, filepath, contents, copy_tree):
|
|
135 |
+ key = self._calculate_key(contents, copy_tree)
|
|
136 |
+ data = self._get(project, filepath, key)
|
|
137 |
+ return data, key
|
|
138 |
+ |
|
139 |
+ # put():
|
|
140 |
+ #
|
|
141 |
+ # Puts a parsed file into the cache.
|
|
142 |
+ #
|
|
143 |
+ # Args:
|
|
144 |
+ # project (Project): The project this file is in.
|
|
145 |
+ # filepath (str): The path to the file.
|
|
146 |
+ # contents (str): The contents of the file that has been cached
|
|
147 |
+ # copy_tree (bool): Whether the data should make a copy when it's being generated
|
|
148 |
+ # (i.e. exactly as when called in yaml)
|
|
149 |
+ # value (decorated dict): The data to put into the cache.
|
|
150 |
+ def put(self, project, filepath, contents, copy_tree, value):
|
|
151 |
+ key = self._calculate_key(contents, copy_tree)
|
|
152 |
+ self.put_from_key(project, filepath, key, value)
|
|
153 |
+ |
|
154 |
+ # put_from_key():
|
|
155 |
+ #
|
|
156 |
+ # Put a parsed file into the cache when given a key.
|
|
157 |
+ #
|
|
158 |
+ # Args:
|
|
159 |
+ # project (Project): The project this file is in.
|
|
160 |
+ # filepath (str): The path to the file.
|
|
161 |
+ # key (str): The key to the file within the cache. Typically, this is the
|
|
162 |
+ # value of `calculate_key()` with the file's unparsed contents
|
|
163 |
+ # and any relevant metadata passed in.
|
|
164 |
+ # value (decorated dict): The data to put into the cache.
|
|
165 |
+ def put_from_key(self, project, filepath, key, value):
|
|
166 |
+ cache_path = self._get_filepath(project, filepath)
|
|
167 |
+ project_name = project.name if project else ""
|
|
168 |
+ try:
|
|
169 |
+ project_cache = self._project_caches[project_name]
|
|
170 |
+ except KeyError:
|
|
171 |
+ project_cache = self._project_caches[project_name] = CachedProject({})
|
|
172 |
+ |
|
173 |
+ project_cache.elements[cache_path] = CachedYaml(key, value)
|
|
174 |
+ |
|
175 |
+ |
|
176 |
+ ###################
|
|
177 |
+ # Private Methods #
|
|
178 |
+ ###################
|
|
179 |
+ |
|
180 |
+ # Writes the yaml cache to the specified path.
|
|
181 |
+ #
|
|
182 |
+ # Args:
|
|
183 |
+ # path (str): The path to the cache file.
|
|
184 |
+ def _write(self, path):
|
|
185 |
+ parent_dir = os.path.dirname(path)
|
|
186 |
+ os.makedirs(parent_dir, exist_ok=True)
|
|
187 |
+ with open(path, "wb") as f:
|
|
188 |
+ BstPickler(f).dump(self)
|
|
189 |
+ |
|
190 |
+ # _get_filepath():
|
|
191 |
+ #
|
|
192 |
+ # Returns a file path relative to a project if passed, or the original path if
|
|
193 |
+ # the project is None
|
|
194 |
+ #
|
|
195 |
+ # Args:
|
|
196 |
+ # project (Project) or None: The project the filepath exists within
|
|
197 |
+ # full_path (str): The path that the returned path is based on
|
|
198 |
+ #
|
|
199 |
+ # Returns:
|
|
200 |
+ # (str): The path to the file, relative to a project if it exists
|
|
201 |
+ def _get_filepath(self, project, full_path):
|
|
202 |
+ if project:
|
|
203 |
+ assert full_path.startswith(project.directory)
|
|
204 |
+ filepath = os.path.relpath(full_path, project.directory)
|
|
205 |
+ else:
|
|
206 |
+ filepath = full_path
|
|
207 |
+ return full_path
|
|
208 |
+ |
|
209 |
+ # _calculate_key():
|
|
210 |
+ #
|
|
211 |
+ # Calculates a key for putting into the cache.
|
|
212 |
+ #
|
|
213 |
+ # Args:
|
|
214 |
+ # (basic object)... : Any number of strictly-ordered basic objects
|
|
215 |
+ #
|
|
216 |
+ # Returns:
|
|
217 |
+ # (str): A key made out of every arg passed in
|
|
218 |
+ @staticmethod
|
|
219 |
+ def _calculate_key(*args):
|
|
220 |
+ string = pickle.dumps(args)
|
|
221 |
+ return hashlib.sha1(string).hexdigest()
|
|
222 |
+ |
|
223 |
+ # _get():
|
|
224 |
+ #
|
|
225 |
+ # Gets a parsed file from the cache when given a key.
|
|
226 |
+ #
|
|
227 |
+ # Args:
|
|
228 |
+ # project (Project): The project this file is in.
|
|
229 |
+ # filepath (str): The path to the file.
|
|
230 |
+ # key (str): The key to the file within the cache. Typically, this is the
|
|
231 |
+ # value of `calculate_key()` with the file's unparsed contents
|
|
232 |
+ # and any relevant metadata passed in.
|
|
233 |
+ #
|
|
234 |
+ # Returns:
|
|
235 |
+ # (decorated dict): The parsed yaml from the cache, or None if the file isn't in the cache.
|
|
236 |
+ def _get(self, project, filepath, key):
|
|
237 |
+ cache_path = self._get_filepath(project, filepath)
|
|
238 |
+ project_name = project.name if project else ""
|
|
239 |
+ try:
|
|
240 |
+ project_cache = self._project_caches[project_name]
|
|
241 |
+ try:
|
|
242 |
+ cachedyaml = project_cache.elements[cache_path]
|
|
243 |
+ if cachedyaml._key == key:
|
|
244 |
+ # We've unpickled the YamlCache, but not the specific file
|
|
245 |
+ if cachedyaml._contents is None:
|
|
246 |
+ cachedyaml._contents = BstUnpickler.loads(cachedyaml._pickled_contents, self._context)
|
|
247 |
+ return cachedyaml._contents
|
|
248 |
+ except KeyError:
|
|
249 |
+ pass
|
|
250 |
+ except KeyError:
|
|
251 |
+ pass
|
|
252 |
+ return None
|
|
253 |
+ |
|
254 |
+ |
|
255 |
+CachedProject = namedtuple('CachedProject', ['elements'])
|
|
256 |
+ |
|
257 |
+ |
|
258 |
+class CachedYaml():
|
|
259 |
+ def __init__(self, key, contents):
|
|
260 |
+ self._key = key
|
|
261 |
+ self.set_contents(contents)
|
|
262 |
+ |
|
263 |
+ # Sets the contents of the CachedYaml.
|
|
264 |
+ #
|
|
265 |
+ # Args:
|
|
266 |
+ # contents (provenanced dict): The contents to put in the cache.
|
|
267 |
+ #
|
|
268 |
+ def set_contents(self, contents):
|
|
269 |
+ self._contents = contents
|
|
270 |
+ self._pickled_contents = BstPickler.dumps(contents)
|
|
271 |
+ |
|
272 |
+ # Pickling helper method, prevents 'contents' from being serialised
|
|
273 |
+ def __getstate__(self):
|
|
274 |
+ data = self.__dict__.copy()
|
|
275 |
+ data['_contents'] = None
|
|
276 |
+ return data
|
|
277 |
+ |
|
278 |
+ |
|
279 |
+# In _yaml.load, we have a ProvenanceFile that stores the project the file
|
|
280 |
+# came from. Projects can't be pickled, but it's always going to be the same
|
|
281 |
+# project between invocations (unless the entire project is moved but the
|
|
282 |
+# file stayed in the same place)
|
|
283 |
+class BstPickler(pickle.Pickler):
|
|
284 |
+ def persistent_id(self, obj):
|
|
285 |
+ if isinstance(obj, _yaml.ProvenanceFile):
|
|
286 |
+ if obj.project:
|
|
287 |
+ # ProvenanceFile's project object cannot be stored as it is.
|
|
288 |
+ project_tag = obj.project.name
|
|
289 |
+ # ProvenanceFile's filename must be stored relative to the
|
|
290 |
+ # project, as the project dir may move.
|
|
291 |
+ name = os.path.relpath(obj.name, obj.project.directory)
|
|
292 |
+ else:
|
|
293 |
+ project_tag = None
|
|
294 |
+ name = obj.name
|
|
295 |
+ return ("ProvenanceFile", name, obj.shortname, project_tag)
|
|
296 |
+ elif isinstance(obj, Context):
|
|
297 |
+ return ("Context",)
|
|
298 |
+ else:
|
|
299 |
+ return None
|
|
300 |
+ |
|
301 |
+ @staticmethod
|
|
302 |
+ def dumps(obj):
|
|
303 |
+ stream = io.BytesIO()
|
|
304 |
+ BstPickler(stream).dump(obj)
|
|
305 |
+ stream.seek(0)
|
|
306 |
+ return stream.read()
|
|
307 |
+ |
|
308 |
+ |
|
309 |
+class BstUnpickler(pickle.Unpickler):
|
|
310 |
+ def __init__(self, file, context):
|
|
311 |
+ super().__init__(file)
|
|
312 |
+ self._context = context
|
|
313 |
+ |
|
314 |
+ def persistent_load(self, pid):
|
|
315 |
+ if pid[0] == "ProvenanceFile":
|
|
316 |
+ _, tagged_name, shortname, project_tag = pid
|
|
317 |
+ |
|
318 |
+ if project_tag is not None:
|
|
319 |
+ for p in self._context.get_projects():
|
|
320 |
+ if project_tag == p.name:
|
|
321 |
+ project = p
|
|
322 |
+ break
|
|
323 |
+ |
|
324 |
+ name = os.path.join(project.directory, tagged_name)
|
|
325 |
+ |
|
326 |
+ if not project:
|
|
327 |
+ projects = [p.name for p in self._context.get_projects()]
|
|
328 |
+ raise pickle.UnpicklingError("No project with name {} found in {}"
|
|
329 |
+ .format(key_id, projects))
|
|
330 |
+ else:
|
|
331 |
+ project = None
|
|
332 |
+ name = tagged_name
|
|
333 |
+ |
|
334 |
+ return _yaml.ProvenanceFile(name, shortname, project)
|
|
335 |
+ elif pid[0] == "Context":
|
|
336 |
+ return self._context
|
|
337 |
+ else:
|
|
338 |
+ raise pickle.UnpicklingError("Unsupported persistent object, {}".format(pid))
|
|
339 |
+ |
|
340 |
+ @staticmethod
|
|
341 |
+ def loads(text, context):
|
|
342 |
+ stream = io.BytesIO()
|
|
343 |
+ stream.write(bytes(text))
|
|
344 |
+ stream.seek(0)
|
|
345 |
+ return BstUnpickler(stream, context).load()
|
... | ... | @@ -26,8 +26,13 @@ logdir: ${XDG_CACHE_HOME}/buildstream/logs |
26 | 26 |
# Cache
|
27 | 27 |
#
|
28 | 28 |
cache:
|
29 |
- # Size of the artifact cache - BuildStream will attempt to keep the
|
|
29 |
+ # Size of the artifact cache in bytes - BuildStream will attempt to keep the
|
|
30 | 30 |
# artifact cache within this size.
|
31 |
+ # If the value is suffixed with K, M, G or T, the specified memory size is
|
|
32 |
+ # parsed as Kilobytes, Megabytes, Gigabytes, or Terabytes (with the base
|
|
33 |
+ # 1024), respectively.
|
|
34 |
+ # Alternatively, a percentage value may be specified, which is taken relative
|
|
35 |
+ # to the isize of the file system containing the cache.
|
|
31 | 36 |
quota: infinity
|
32 | 37 |
|
33 | 38 |
#
|
... | ... | @@ -212,7 +212,7 @@ class Element(Plugin): |
212 | 212 |
self.__staged_sources_directory = None # Location where Element.stage_sources() was called
|
213 | 213 |
self.__tainted = None # Whether the artifact is tainted and should not be shared
|
214 | 214 |
self.__required = False # Whether the artifact is required in the current session
|
215 |
- self.__build_result = None # The result of assembling this Element
|
|
215 |
+ self.__build_result = None # The result of assembling this Element (success, description, detail)
|
|
216 | 216 |
self._build_log_path = None # The path of the build log for this Element
|
217 | 217 |
|
218 | 218 |
# hash tables of loaded artifact metadata, hashed by key
|
... | ... | @@ -1316,7 +1316,8 @@ class Element(Plugin): |
1316 | 1316 |
#
|
1317 | 1317 |
@contextmanager
|
1318 | 1318 |
def _prepare_sandbox(self, scope, directory, deps='run', integrate=True):
|
1319 |
- with self.__sandbox(directory, config=self.__sandbox_config) as sandbox:
|
|
1319 |
+ # bst shell and bst checkout require a local sandbox.
|
|
1320 |
+ with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False) as sandbox:
|
|
1320 | 1321 |
|
1321 | 1322 |
# Configure always comes first, and we need it.
|
1322 | 1323 |
self.configure_sandbox(sandbox)
|
... | ... | @@ -1379,10 +1380,10 @@ class Element(Plugin): |
1379 | 1380 |
if not vdirectory.is_empty():
|
1380 | 1381 |
raise ElementError("Staging directory '{}' is not empty".format(vdirectory))
|
1381 | 1382 |
|
1382 |
- # While mkdtemp is advertised as using the TMP environment variable, it
|
|
1383 |
- # doesn't, so this explicit extraction is necesasry.
|
|
1384 |
- tmp_prefix = os.environ.get("TMP", None)
|
|
1385 |
- temp_staging_directory = tempfile.mkdtemp(prefix=tmp_prefix)
|
|
1383 |
+ # It's advantageous to have this temporary directory on
|
|
1384 |
+ # the same filing system as the rest of our cache.
|
|
1385 |
+ temp_staging_location = os.path.join(self._get_context().artifactdir, "staging_temp")
|
|
1386 |
+ temp_staging_directory = tempfile.mkdtemp(prefix=temp_staging_location)
|
|
1386 | 1387 |
|
1387 | 1388 |
try:
|
1388 | 1389 |
workspace = self._get_workspace()
|
... | ... | @@ -1479,11 +1480,13 @@ class Element(Plugin): |
1479 | 1480 |
|
1480 | 1481 |
self._update_state()
|
1481 | 1482 |
|
1482 |
- if self._get_workspace() and self._cached():
|
|
1483 |
+ if self._get_workspace() and self._cached_success():
|
|
1484 |
+ assert utils._is_main_process(), \
|
|
1485 |
+ "Attempted to save workspace configuration from child process"
|
|
1483 | 1486 |
#
|
1484 | 1487 |
# Note that this block can only happen in the
|
1485 |
- # main process, since `self._cached()` cannot
|
|
1486 |
- # be true when assembly is completed in the task.
|
|
1488 |
+ # main process, since `self._cached_success()` cannot
|
|
1489 |
+ # be true when assembly is successful in the task.
|
|
1487 | 1490 |
#
|
1488 | 1491 |
# For this reason, it is safe to update and
|
1489 | 1492 |
# save the workspaces configuration
|
... | ... | @@ -2147,17 +2150,18 @@ class Element(Plugin): |
2147 | 2150 |
# stdout (fileobject): The stream for stdout for the sandbox
|
2148 | 2151 |
# stderr (fileobject): The stream for stderr for the sandbox
|
2149 | 2152 |
# config (SandboxConfig): The SandboxConfig object
|
2153 |
+ # allow_remote (bool): Whether the sandbox is allowed to be remote
|
|
2150 | 2154 |
#
|
2151 | 2155 |
# Yields:
|
2152 | 2156 |
# (Sandbox): A usable sandbox
|
2153 | 2157 |
#
|
2154 | 2158 |
@contextmanager
|
2155 |
- def __sandbox(self, directory, stdout=None, stderr=None, config=None):
|
|
2159 |
+ def __sandbox(self, directory, stdout=None, stderr=None, config=None, allow_remote=True):
|
|
2156 | 2160 |
context = self._get_context()
|
2157 | 2161 |
project = self._get_project()
|
2158 | 2162 |
platform = Platform.get_platform()
|
2159 | 2163 |
|
2160 |
- if directory is not None and self.__use_remote_execution():
|
|
2164 |
+ if directory is not None and allow_remote and self.__use_remote_execution():
|
|
2161 | 2165 |
|
2162 | 2166 |
self.info("Using a remote sandbox for artifact {} with directory '{}'".format(self.name, directory))
|
2163 | 2167 |
|
... | ... | @@ -2171,7 +2175,7 @@ class Element(Plugin): |
2171 | 2175 |
yield sandbox
|
2172 | 2176 |
|
2173 | 2177 |
elif directory is not None and os.path.exists(directory):
|
2174 |
- if self.__remote_execution_url:
|
|
2178 |
+ if allow_remote and self.__remote_execution_url:
|
|
2175 | 2179 |
self.warn("Artifact {} is configured to use remote execution but element plugin does not support it."
|
2176 | 2180 |
.format(self.name), detail="Element plugin '{kind}' does not support virtual directories."
|
2177 | 2181 |
.format(kind=self.get_kind()), warning_token="remote-failure")
|
... | ... | @@ -2191,7 +2195,8 @@ class Element(Plugin): |
2191 | 2195 |
rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
|
2192 | 2196 |
|
2193 | 2197 |
# Recursive contextmanager...
|
2194 |
- with self.__sandbox(rootdir, stdout=stdout, stderr=stderr, config=config) as sandbox:
|
|
2198 |
+ with self.__sandbox(rootdir, stdout=stdout, stderr=stderr, config=config,
|
|
2199 |
+ allow_remote=allow_remote) as sandbox:
|
|
2195 | 2200 |
yield sandbox
|
2196 | 2201 |
|
2197 | 2202 |
# Cleanup the build dir
|
... | ... | @@ -184,10 +184,18 @@ class GitMirror(SourceFetcher): |
184 | 184 |
cwd=self.mirror)
|
185 | 185 |
|
186 | 186 |
def fetch(self, alias_override=None):
|
187 |
- self.ensure(alias_override)
|
|
188 |
- if not self.has_ref():
|
|
189 |
- self._fetch(alias_override)
|
|
190 |
- self.assert_ref()
|
|
187 |
+ # Resolve the URL for the message
|
|
188 |
+ resolved_url = self.source.translate_url(self.url,
|
|
189 |
+ alias_override=alias_override,
|
|
190 |
+ primary=self.primary)
|
|
191 |
+ |
|
192 |
+ with self.source.timed_activity("Fetching from {}"
|
|
193 |
+ .format(resolved_url),
|
|
194 |
+ silent_nested=True):
|
|
195 |
+ self.ensure(alias_override)
|
|
196 |
+ if not self.has_ref():
|
|
197 |
+ self._fetch(alias_override)
|
|
198 |
+ self.assert_ref()
|
|
191 | 199 |
|
192 | 200 |
def has_ref(self):
|
193 | 201 |
if not self.ref:
|
... | ... | @@ -23,6 +23,7 @@ from . import Sandbox |
23 | 23 |
class SandboxDummy(Sandbox):
|
24 | 24 |
def __init__(self, *args, **kwargs):
|
25 | 25 |
super().__init__(*args, **kwargs)
|
26 |
+ self._reason = kwargs.get("dummy_reason", "no reason given")
|
|
26 | 27 |
|
27 | 28 |
def run(self, command, flags, *, cwd=None, env=None):
|
28 | 29 |
|
... | ... | @@ -37,4 +38,4 @@ class SandboxDummy(Sandbox): |
37 | 38 |
"'{}'".format(command[0]),
|
38 | 39 |
reason='missing-command')
|
39 | 40 |
|
40 |
- raise SandboxError("This platform does not support local builds")
|
|
41 |
+ raise SandboxError("This platform does not support local builds: {}".format(self._reason))
|
... | ... | @@ -177,15 +177,11 @@ class SandboxRemote(Sandbox): |
177 | 177 |
if not cascache.verify_digest_pushed(self._get_project(), upload_vdir.ref):
|
178 | 178 |
raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
|
179 | 179 |
|
180 |
- # Set up environment and working directory
|
|
181 |
- if cwd is None:
|
|
182 |
- cwd = self._get_work_directory()
|
|
183 |
- |
|
184 |
- if cwd is None:
|
|
185 |
- cwd = '/'
|
|
186 |
- |
|
187 |
- if env is None:
|
|
188 |
- env = self._get_environment()
|
|
180 |
+ # Fallback to the sandbox default settings for
|
|
181 |
+ # the cwd and env.
|
|
182 |
+ #
|
|
183 |
+ cwd = self._get_work_directory(cwd=cwd)
|
|
184 |
+ env = self._get_environment(cwd=cwd, env=env)
|
|
189 | 185 |
|
190 | 186 |
# We want command args as a list of strings
|
191 | 187 |
if isinstance(command, str):
|
... | ... | @@ -965,28 +965,48 @@ class Source(Plugin): |
965 | 965 |
# Tries to call fetch for every mirror, stopping once it succeeds
|
966 | 966 |
def __do_fetch(self, **kwargs):
|
967 | 967 |
project = self._get_project()
|
968 |
- source_fetchers = self.get_source_fetchers()
|
|
968 |
+ context = self._get_context()
|
|
969 |
+ |
|
970 |
+ # Silence the STATUS messages which might happen as a result
|
|
971 |
+ # of checking the source fetchers.
|
|
972 |
+ with context.silence():
|
|
973 |
+ source_fetchers = self.get_source_fetchers()
|
|
969 | 974 |
|
970 | 975 |
# Use the source fetchers if they are provided
|
971 | 976 |
#
|
972 | 977 |
if source_fetchers:
|
973 |
- for fetcher in source_fetchers:
|
|
974 |
- alias = fetcher._get_alias()
|
|
975 |
- for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
|
|
976 |
- try:
|
|
977 |
- fetcher.fetch(uri)
|
|
978 |
- # FIXME: Need to consider temporary vs. permanent failures,
|
|
979 |
- # and how this works with retries.
|
|
980 |
- except BstError as e:
|
|
981 |
- last_error = e
|
|
982 |
- continue
|
|
983 |
- |
|
984 |
- # No error, we're done with this fetcher
|
|
985 |
- break
|
|
986 | 978 |
|
987 |
- else:
|
|
988 |
- # No break occurred, raise the last detected error
|
|
989 |
- raise last_error
|
|
979 |
+ # Use a contorted loop here, this is to allow us to
|
|
980 |
+ # silence the messages which can result from consuming
|
|
981 |
+ # the items of source_fetchers, if it happens to be a generator.
|
|
982 |
+ #
|
|
983 |
+ source_fetchers = iter(source_fetchers)
|
|
984 |
+ try:
|
|
985 |
+ |
|
986 |
+ while True:
|
|
987 |
+ |
|
988 |
+ with context.silence():
|
|
989 |
+ fetcher = next(source_fetchers)
|
|
990 |
+ |
|
991 |
+ alias = fetcher._get_alias()
|
|
992 |
+ for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
|
|
993 |
+ try:
|
|
994 |
+ fetcher.fetch(uri)
|
|
995 |
+ # FIXME: Need to consider temporary vs. permanent failures,
|
|
996 |
+ # and how this works with retries.
|
|
997 |
+ except BstError as e:
|
|
998 |
+ last_error = e
|
|
999 |
+ continue
|
|
1000 |
+ |
|
1001 |
+ # No error, we're done with this fetcher
|
|
1002 |
+ break
|
|
1003 |
+ |
|
1004 |
+ else:
|
|
1005 |
+ # No break occurred, raise the last detected error
|
|
1006 |
+ raise last_error
|
|
1007 |
+ |
|
1008 |
+ except StopIteration:
|
|
1009 |
+ pass
|
|
990 | 1010 |
|
991 | 1011 |
# Default codepath is to reinstantiate the Source
|
992 | 1012 |
#
|
... | ... | @@ -502,7 +502,7 @@ def get_bst_version(): |
502 | 502 |
|
503 | 503 |
@contextmanager
|
504 | 504 |
def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
|
505 |
- errors=None, newline=None, closefd=True, opener=None):
|
|
505 |
+ errors=None, newline=None, closefd=True, opener=None, tempdir=None):
|
|
506 | 506 |
"""Save a file with a temporary name and rename it into place when ready.
|
507 | 507 |
|
508 | 508 |
This is a context manager which is meant for saving data to files.
|
... | ... | @@ -529,8 +529,9 @@ def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None, |
529 | 529 |
# https://bugs.python.org/issue8604
|
530 | 530 |
|
531 | 531 |
assert os.path.isabs(filename), "The utils.save_file_atomic() parameter ``filename`` must be an absolute path"
|
532 |
- dirname = os.path.dirname(filename)
|
|
533 |
- fd, tempname = tempfile.mkstemp(dir=dirname)
|
|
532 |
+ if tempdir is None:
|
|
533 |
+ tempdir = os.path.dirname(filename)
|
|
534 |
+ fd, tempname = tempfile.mkstemp(dir=tempdir)
|
|
534 | 535 |
os.close(fd)
|
535 | 536 |
|
536 | 537 |
f = open(tempname, mode=mode, buffering=buffering, encoding=encoding,
|
... | ... | @@ -562,6 +563,9 @@ def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None, |
562 | 563 |
#
|
563 | 564 |
# Get the disk usage of a given directory in bytes.
|
564 | 565 |
#
|
566 |
+# This function assumes that files do not inadvertantly
|
|
567 |
+# disappear while this function is running.
|
|
568 |
+#
|
|
565 | 569 |
# Arguments:
|
566 | 570 |
# (str) The path whose size to check.
|
567 | 571 |
#
|
... | ... | @@ -682,7 +686,7 @@ def _force_rmtree(rootpath, **kwargs): |
682 | 686 |
|
683 | 687 |
try:
|
684 | 688 |
shutil.rmtree(rootpath, **kwargs)
|
685 |
- except shutil.Error as e:
|
|
689 |
+ except OSError as e:
|
|
686 | 690 |
raise UtilError("Failed to remove cache directory '{}': {}"
|
687 | 691 |
.format(rootpath, e))
|
688 | 692 |
|
1 |
+#!/bin/bash
|
|
2 |
+#
|
|
3 |
+# Copyright 2018 Bloomberg Finance LP
|
|
4 |
+#
|
|
5 |
+# This program is free software; you can redistribute it and/or
|
|
6 |
+# modify it under the terms of the GNU Lesser General Public
|
|
7 |
+# License as published by the Free Software Foundation; either
|
|
8 |
+# version 2 of the License, or (at your option) any later version.
|
|
9 |
+#
|
|
10 |
+# This library is distributed in the hope that it will be useful,
|
|
11 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
13 |
+# Lesser General Public License for more details.
|
|
14 |
+#
|
|
15 |
+# You should have received a copy of the GNU Lesser General Public
|
|
16 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
17 |
+#
|
|
18 |
+# Authors:
|
|
19 |
+# Chadnan Singh <csingh43 bloomberg net>
|
|
20 |
+ |
|
21 |
+# This is a helper script to generate Docker images using checkouts of
|
|
22 |
+# BuildStream elements.
|
|
23 |
+ |
|
24 |
+usage() {
|
|
25 |
+ cat <<EOF
|
|
26 |
+ |
|
27 |
+USAGE: $(basename "$0") [-c BST_CMD] [-m MESSAGE] [-t TAG] [-h] ELEMENT
|
|
28 |
+ |
|
29 |
+Create a Docker image from bst checkout of an element.
|
|
30 |
+ |
|
31 |
+OPTIONS:
|
|
32 |
+ -c BST_CMD Path to BuildStream command (default: bst).
|
|
33 |
+ -m MESSAGE Commit message for the imported image.
|
|
34 |
+ -t TAG Tag of the imported image.
|
|
35 |
+ -h Print this help text and exit.
|
|
36 |
+ |
|
37 |
+EXAMPLES:
|
|
38 |
+ |
|
39 |
+ # Import hello.bst as a Docker image with tag "bst-hello" and message "hello"
|
|
40 |
+ $(basename "$0") -m hello -t bst-hello hello.bst
|
|
41 |
+ |
|
42 |
+ # Import hello.bst as a Docker image with tag "bst-hello" using bst-here
|
|
43 |
+ $(basename "$0") -c bst-here -t bst-hello hello.bst
|
|
44 |
+ |
|
45 |
+EOF
|
|
46 |
+ exit "$1"
|
|
47 |
+}
|
|
48 |
+ |
|
49 |
+die() {
|
|
50 |
+ echo "FATAL: $1" >&2
|
|
51 |
+ exit 1
|
|
52 |
+}
|
|
53 |
+ |
|
54 |
+bst_cmd=bst
|
|
55 |
+docker_import_cmd=(docker import)
|
|
56 |
+docker_image_tag=
|
|
57 |
+ |
|
58 |
+while getopts c:m:t:h arg
|
|
59 |
+do
|
|
60 |
+ case $arg in
|
|
61 |
+ c)
|
|
62 |
+ bst_cmd="$OPTARG"
|
|
63 |
+ ;;
|
|
64 |
+ m)
|
|
65 |
+ docker_import_cmd+=('-m' "$OPTARG")
|
|
66 |
+ ;;
|
|
67 |
+ t)
|
|
68 |
+ docker_image_tag="$OPTARG"
|
|
69 |
+ ;;
|
|
70 |
+ h)
|
|
71 |
+ usage 0
|
|
72 |
+ ;;
|
|
73 |
+ \?)
|
|
74 |
+ usage 1
|
|
75 |
+ esac
|
|
76 |
+done
|
|
77 |
+ |
|
78 |
+shift $((OPTIND-1))
|
|
79 |
+if [[ "$#" != 1 ]]; then
|
|
80 |
+ echo "$0: No element specified" >&2
|
|
81 |
+ usage 1
|
|
82 |
+fi
|
|
83 |
+element="$1"
|
|
84 |
+ |
|
85 |
+# Dump to a temporary file in the current directory.
|
|
86 |
+# NOTE: We use current directory to try to ensure compatibility with scripts
|
|
87 |
+# like bst-here, assuming that the current working directory is mounted
|
|
88 |
+# inside the container.
|
|
89 |
+ |
|
90 |
+checkout_tar="bst-checkout-$(basename "$element")-$RANDOM.tar"
|
|
91 |
+ |
|
92 |
+echo "INFO: Checking out $element ..." >&2
|
|
93 |
+$bst_cmd checkout --tar "$element" "$checkout_tar" || die "Failed to checkout $element"
|
|
94 |
+echo "INFO: Successfully checked out $element" >&2
|
|
95 |
+ |
|
96 |
+echo "INFO: Importing Docker image ..."
|
|
97 |
+"${docker_import_cmd[@]}" "$checkout_tar" "$docker_image_tag" || die "Failed to import Docker image from tarball"
|
|
98 |
+echo "INFO: Successfully import Docker image $docker_image_tag"
|
|
99 |
+ |
|
100 |
+echo "INFO: Cleaning up ..."
|
|
101 |
+rm "$checkout_tar" || die "Failed to remove $checkout_tar"
|
|
102 |
+echo "INFO: Clean up finished"
|
... | ... | @@ -54,12 +54,13 @@ REQUIRED_BWRAP_MINOR = 1 |
54 | 54 |
REQUIRED_BWRAP_PATCH = 2
|
55 | 55 |
|
56 | 56 |
|
57 |
-def exit_bwrap(reason):
|
|
57 |
+def warn_bwrap(reason):
|
|
58 | 58 |
print(reason +
|
59 |
- "\nBuildStream requires Bubblewrap (bwrap) for"
|
|
60 |
- " sandboxing the build environment. Install it using your package manager"
|
|
61 |
- " (usually bwrap or bubblewrap)")
|
|
62 |
- sys.exit(1)
|
|
59 |
+ "\nBuildStream requires Bubblewrap (bwrap {}.{}.{} or better),"
|
|
60 |
+ " during local builds, for"
|
|
61 |
+ " sandboxing the build environment.\nInstall it using your package manager"
|
|
62 |
+ " (usually bwrap or bubblewrap) otherwise you will be limited to"
|
|
63 |
+ " remote builds only.".format(REQUIRED_BWRAP_MAJOR, REQUIRED_BWRAP_MINOR, REQUIRED_BWRAP_PATCH))
|
|
63 | 64 |
|
64 | 65 |
|
65 | 66 |
def bwrap_too_old(major, minor, patch):
|
... | ... | @@ -76,18 +77,19 @@ def bwrap_too_old(major, minor, patch): |
76 | 77 |
return False
|
77 | 78 |
|
78 | 79 |
|
79 |
-def assert_bwrap():
|
|
80 |
+def check_for_bwrap():
|
|
80 | 81 |
platform = os.environ.get('BST_FORCE_BACKEND', '') or sys.platform
|
81 | 82 |
if platform.startswith('linux'):
|
82 | 83 |
bwrap_path = shutil.which('bwrap')
|
83 | 84 |
if not bwrap_path:
|
84 |
- exit_bwrap("Bubblewrap not found")
|
|
85 |
+ warn_bwrap("Bubblewrap not found")
|
|
86 |
+ return
|
|
85 | 87 |
|
86 | 88 |
version_bytes = subprocess.check_output([bwrap_path, "--version"]).split()[1]
|
87 | 89 |
version_string = str(version_bytes, "utf-8")
|
88 | 90 |
major, minor, patch = map(int, version_string.split("."))
|
89 | 91 |
if bwrap_too_old(major, minor, patch):
|
90 |
- exit_bwrap("Bubblewrap too old")
|
|
92 |
+ warn_bwrap("Bubblewrap too old")
|
|
91 | 93 |
|
92 | 94 |
|
93 | 95 |
###########################################
|
... | ... | @@ -126,7 +128,7 @@ bst_install_entry_points = { |
126 | 128 |
}
|
127 | 129 |
|
128 | 130 |
if not os.environ.get('BST_ARTIFACTS_ONLY', ''):
|
129 |
- assert_bwrap()
|
|
131 |
+ check_for_bwrap()
|
|
130 | 132 |
bst_install_entry_points['console_scripts'] += [
|
131 | 133 |
'bst = buildstream._frontend:cli'
|
132 | 134 |
]
|
... | ... | @@ -139,6 +139,82 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind): |
139 | 139 |
result.assert_success()
|
140 | 140 |
|
141 | 141 |
|
142 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
143 |
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
|
|
144 |
+@pytest.mark.parametrize("mirror", [("no-mirror"), ("mirror"), ("unrelated-mirror")])
|
|
145 |
+def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
|
|
146 |
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
|
|
147 |
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
|
|
148 |
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
|
|
149 |
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
|
|
150 |
+ project_dir = os.path.join(str(tmpdir), 'project')
|
|
151 |
+ os.makedirs(project_dir)
|
|
152 |
+ element_dir = os.path.join(project_dir, 'elements')
|
|
153 |
+ |
|
154 |
+ # Create repo objects of the upstream and mirror
|
|
155 |
+ upstream_repo = create_repo('tar', upstream_repodir)
|
|
156 |
+ upstream_ref = upstream_repo.create(bin_files_path)
|
|
157 |
+ mirror_repo = upstream_repo.copy(mirror_repodir)
|
|
158 |
+ mirror_ref = upstream_ref
|
|
159 |
+ upstream_ref = upstream_repo.create(dev_files_path)
|
|
160 |
+ |
|
161 |
+ element = {
|
|
162 |
+ 'kind': 'import',
|
|
163 |
+ 'sources': [
|
|
164 |
+ upstream_repo.source_config(ref=upstream_ref if ref_storage == 'inline' else None)
|
|
165 |
+ ]
|
|
166 |
+ }
|
|
167 |
+ element_name = 'test.bst'
|
|
168 |
+ element_path = os.path.join(element_dir, element_name)
|
|
169 |
+ full_repo = element['sources'][0]['url']
|
|
170 |
+ upstream_map, repo_name = os.path.split(full_repo)
|
|
171 |
+ alias = 'foo'
|
|
172 |
+ aliased_repo = alias + ':' + repo_name
|
|
173 |
+ element['sources'][0]['url'] = aliased_repo
|
|
174 |
+ full_mirror = mirror_repo.source_config()['url']
|
|
175 |
+ mirror_map, _ = os.path.split(full_mirror)
|
|
176 |
+ os.makedirs(element_dir)
|
|
177 |
+ _yaml.dump(element, element_path)
|
|
178 |
+ |
|
179 |
+ if ref_storage == 'project.refs':
|
|
180 |
+ # Manually set project.refs to avoid caching the repo prematurely
|
|
181 |
+ project_refs = {'projects': {
|
|
182 |
+ 'test': {
|
|
183 |
+ element_name: [
|
|
184 |
+ {'ref': upstream_ref}
|
|
185 |
+ ]
|
|
186 |
+ }
|
|
187 |
+ }}
|
|
188 |
+ project_refs_path = os.path.join(project_dir, 'project.refs')
|
|
189 |
+ _yaml.dump(project_refs, project_refs_path)
|
|
190 |
+ |
|
191 |
+ project = {
|
|
192 |
+ 'name': 'test',
|
|
193 |
+ 'element-path': 'elements',
|
|
194 |
+ 'aliases': {
|
|
195 |
+ alias: upstream_map + "/"
|
|
196 |
+ },
|
|
197 |
+ 'ref-storage': ref_storage
|
|
198 |
+ }
|
|
199 |
+ if mirror != 'no-mirror':
|
|
200 |
+ mirror_data = [{
|
|
201 |
+ 'name': 'middle-earth',
|
|
202 |
+ 'aliases': {alias: [mirror_map + '/']}
|
|
203 |
+ }]
|
|
204 |
+ if mirror == 'unrelated-mirror':
|
|
205 |
+ mirror_data.insert(0, {
|
|
206 |
+ 'name': 'narnia',
|
|
207 |
+ 'aliases': {'frob': ['http://www.example.com/repo']}
|
|
208 |
+ })
|
|
209 |
+ project['mirrors'] = mirror_data
|
|
210 |
+ |
|
211 |
+ project_file = os.path.join(project_dir, 'project.conf')
|
|
212 |
+ _yaml.dump(project, project_file)
|
|
213 |
+ |
|
214 |
+ result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
215 |
+ result.assert_success()
|
|
216 |
+ |
|
217 |
+ |
|
142 | 218 |
@pytest.mark.datafiles(DATA_DIR)
|
143 | 219 |
@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
|
144 | 220 |
def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
|
... | ... | @@ -43,7 +43,8 @@ DATA_DIR = os.path.join( |
43 | 43 |
)
|
44 | 44 |
|
45 | 45 |
|
46 |
-def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None, project_path=None):
|
|
46 |
+def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
|
|
47 |
+ project_path=None, element_attrs=None):
|
|
47 | 48 |
if not workspace_dir:
|
48 | 49 |
workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
49 | 50 |
if not project_path:
|
... | ... | @@ -69,6 +70,8 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir |
69 | 70 |
repo.source_config(ref=ref)
|
70 | 71 |
]
|
71 | 72 |
}
|
73 |
+ if element_attrs:
|
|
74 |
+ element = {**element, **element_attrs}
|
|
72 | 75 |
_yaml.dump(element,
|
73 | 76 |
os.path.join(element_path,
|
74 | 77 |
element_name))
|
... | ... | @@ -854,3 +857,22 @@ def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict): |
854 | 857 |
|
855 | 858 |
# Check that the original /usr/bin/hello is not in the checkout
|
856 | 859 |
assert not os.path.exists(os.path.join(checkout, 'usr', 'bin', 'hello'))
|
860 |
+ |
|
861 |
+ |
|
862 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
863 |
+def test_multiple_failed_builds(cli, tmpdir, datafiles):
|
|
864 |
+ element_config = {
|
|
865 |
+ "kind": "manual",
|
|
866 |
+ "config": {
|
|
867 |
+ "configure-commands": [
|
|
868 |
+ "unknown_command_that_will_fail"
|
|
869 |
+ ]
|
|
870 |
+ }
|
|
871 |
+ }
|
|
872 |
+ element_name, project, _ = open_workspace(cli, tmpdir, datafiles,
|
|
873 |
+ "git", False, element_attrs=element_config)
|
|
874 |
+ |
|
875 |
+ for _ in range(2):
|
|
876 |
+ result = cli.run(project=project, args=["build", element_name])
|
|
877 |
+ assert "BUG" not in result.stderr
|
|
878 |
+ assert cli.get_element_state(project, element_name) != "cached"
|
1 |
+import os
|
|
2 |
+import pytest
|
|
3 |
+import hashlib
|
|
4 |
+import tempfile
|
|
5 |
+from ruamel import yaml
|
|
6 |
+ |
|
7 |
+from tests.testutils import cli, generate_junction, create_element_size, create_repo
|
|
8 |
+from buildstream import _yaml
|
|
9 |
+from buildstream._yamlcache import YamlCache
|
|
10 |
+from buildstream._project import Project
|
|
11 |
+from buildstream._context import Context
|
|
12 |
+from contextlib import contextmanager
|
|
13 |
+ |
|
14 |
+ |
|
15 |
+def generate_project(tmpdir, ref_storage, with_junction, name="test"):
|
|
16 |
+ if with_junction == 'junction':
|
|
17 |
+ subproject_dir = generate_project(
|
|
18 |
+ tmpdir, ref_storage,
|
|
19 |
+ 'no-junction', name='test-subproject'
|
|
20 |
+ )
|
|
21 |
+ |
|
22 |
+ project_dir = os.path.join(tmpdir, name)
|
|
23 |
+ os.makedirs(project_dir)
|
|
24 |
+ # project.conf
|
|
25 |
+ project_conf_path = os.path.join(project_dir, 'project.conf')
|
|
26 |
+ elements_path = 'elements'
|
|
27 |
+ project_conf = {
|
|
28 |
+ 'name': name,
|
|
29 |
+ 'element-path': elements_path,
|
|
30 |
+ 'ref-storage': ref_storage,
|
|
31 |
+ }
|
|
32 |
+ _yaml.dump(project_conf, project_conf_path)
|
|
33 |
+ |
|
34 |
+ # elements
|
|
35 |
+ if with_junction == 'junction':
|
|
36 |
+ junction_name = 'junction.bst'
|
|
37 |
+ junction_dir = os.path.join(project_dir, elements_path)
|
|
38 |
+ junction_path = os.path.join(project_dir, elements_path, junction_name)
|
|
39 |
+ os.makedirs(junction_dir)
|
|
40 |
+ generate_junction(tmpdir, subproject_dir, junction_path)
|
|
41 |
+ element_depends = [{'junction': junction_name, 'filename': 'test.bst'}]
|
|
42 |
+ else:
|
|
43 |
+ element_depends = []
|
|
44 |
+ |
|
45 |
+ element_name = 'test.bst'
|
|
46 |
+ create_element_size(element_name, project_dir, elements_path, element_depends, 1)
|
|
47 |
+ |
|
48 |
+ return project_dir
|
|
49 |
+ |
|
50 |
+ |
|
51 |
+@contextmanager
|
|
52 |
+def with_yamlcache(project_dir):
|
|
53 |
+ context = Context()
|
|
54 |
+ project = Project(project_dir, context)
|
|
55 |
+ cache_file = YamlCache.get_cache_file(project_dir)
|
|
56 |
+ with YamlCache.open(context, cache_file) as yamlcache:
|
|
57 |
+ yield yamlcache, project
|
|
58 |
+ |
|
59 |
+ |
|
60 |
+def yamlcache_key(yamlcache, in_file, copy_tree=False):
|
|
61 |
+ with open(in_file) as f:
|
|
62 |
+ key = yamlcache._calculate_key(f.read(), copy_tree)
|
|
63 |
+ return key
|
|
64 |
+ |
|
65 |
+ |
|
66 |
+def modified_file(input_file, tmpdir):
|
|
67 |
+ with open(input_file) as f:
|
|
68 |
+ data = f.read()
|
|
69 |
+ assert 'variables' not in data
|
|
70 |
+ data += '\nvariables: {modified: True}\n'
|
|
71 |
+ _, temppath = tempfile.mkstemp(dir=tmpdir, text=True)
|
|
72 |
+ with open(temppath, 'w') as f:
|
|
73 |
+ f.write(data)
|
|
74 |
+ |
|
75 |
+ return temppath
|
|
76 |
+ |
|
77 |
+ |
|
78 |
+@pytest.mark.parametrize('ref_storage', ['inline', 'project.refs'])
|
|
79 |
+@pytest.mark.parametrize('with_junction', ['no-junction', 'junction'])
|
|
80 |
+@pytest.mark.parametrize('move_project', ['move', 'no-move'])
|
|
81 |
+def test_yamlcache_used(cli, tmpdir, ref_storage, with_junction, move_project):
|
|
82 |
+ # Generate the project
|
|
83 |
+ project = generate_project(str(tmpdir), ref_storage, with_junction)
|
|
84 |
+ if with_junction == 'junction':
|
|
85 |
+ result = cli.run(project=project, args=['fetch', '--track', 'junction.bst'])
|
|
86 |
+ result.assert_success()
|
|
87 |
+ |
|
88 |
+ # bst show to put it in the cache
|
|
89 |
+ result = cli.run(project=project, args=['show', 'test.bst'])
|
|
90 |
+ result.assert_success()
|
|
91 |
+ |
|
92 |
+ element_path = os.path.join(project, 'elements', 'test.bst')
|
|
93 |
+ with with_yamlcache(project) as (yc, prj):
|
|
94 |
+ # Check that it's in the cache
|
|
95 |
+ assert yc.is_cached(prj, element_path)
|
|
96 |
+ |
|
97 |
+ # *Absolutely* horrible cache corruption to check it's being used
|
|
98 |
+ # Modifying the data from the cache is fraught with danger,
|
|
99 |
+ # so instead I'll load a modified version of the original file
|
|
100 |
+ temppath = modified_file(element_path, str(tmpdir))
|
|
101 |
+ contents = _yaml.load(temppath, copy_tree=False, project=prj)
|
|
102 |
+ key = yamlcache_key(yc, element_path)
|
|
103 |
+ yc.put_from_key(prj, element_path, key, contents)
|
|
104 |
+ |
|
105 |
+ # Show that a variable has been added
|
|
106 |
+ result = cli.run(project=project, args=['show', '--format', '%{vars}', 'test.bst'])
|
|
107 |
+ result.assert_success()
|
|
108 |
+ data = yaml.safe_load(result.output)
|
|
109 |
+ assert 'modified' in data
|
|
110 |
+ assert data['modified'] == 'True'
|
|
111 |
+ |
|
112 |
+ |
|
113 |
+@pytest.mark.parametrize('ref_storage', ['inline', 'project.refs'])
|
|
114 |
+@pytest.mark.parametrize('with_junction', ['junction', 'no-junction'])
|
|
115 |
+@pytest.mark.parametrize('move_project', ['move', 'no-move'])
|
|
116 |
+def test_yamlcache_changed_file(cli, ref_storage, with_junction, move_project):
|
|
117 |
+ # i.e. a file is cached, the file is changed, loading the file (with cache) returns new data
|
|
118 |
+ # inline and junction can only be changed by opening a workspace
|
|
119 |
+ pass
|
1 | 1 |
import os
|
2 | 2 |
import pytest
|
3 |
+import tempfile
|
|
3 | 4 |
from collections import Mapping
|
4 | 5 |
|
5 | 6 |
from buildstream import _yaml
|
6 | 7 |
from buildstream._exceptions import LoadError, LoadErrorReason
|
8 |
+from buildstream._context import Context
|
|
9 |
+from buildstream._yamlcache import YamlCache
|
|
7 | 10 |
|
8 | 11 |
DATA_DIR = os.path.join(
|
9 | 12 |
os.path.dirname(os.path.realpath(__file__)),
|
... | ... | @@ -150,6 +153,21 @@ def test_composite_preserve_originals(datafiles): |
150 | 153 |
assert(_yaml.node_get(orig_extra, str, 'old') == 'new')
|
151 | 154 |
|
152 | 155 |
|
156 |
+def load_yaml_file(filename, *, cache_path, shortname=None, from_cache='raw'):
|
|
157 |
+ |
|
158 |
+ temppath = tempfile.mkstemp(dir=str(cache_path), text=True)
|
|
159 |
+ context = Context()
|
|
160 |
+ |
|
161 |
+ with YamlCache.open(context, str(temppath)) as yc:
|
|
162 |
+ if from_cache == 'raw':
|
|
163 |
+ return _yaml.load(filename, shortname)
|
|
164 |
+ elif from_cache == 'cached':
|
|
165 |
+ _yaml.load(filename, shortname, yaml_cache=yc)
|
|
166 |
+ return _yaml.load(filename, shortname, yaml_cache=yc)
|
|
167 |
+ else:
|
|
168 |
+ assert False
|
|
169 |
+ |
|
170 |
+ |
|
153 | 171 |
# Tests for list composition
|
154 | 172 |
#
|
155 | 173 |
# Each test composits a filename on top of basics.yaml, and tests
|
... | ... | @@ -165,6 +183,7 @@ def test_composite_preserve_originals(datafiles): |
165 | 183 |
# prov_col: The expected provenance column of "mood"
|
166 | 184 |
#
|
167 | 185 |
@pytest.mark.datafiles(os.path.join(DATA_DIR))
|
186 |
+@pytest.mark.parametrize('caching', [('raw'), ('cached')])
|
|
168 | 187 |
@pytest.mark.parametrize("filename,index,length,mood,prov_file,prov_line,prov_col", [
|
169 | 188 |
|
170 | 189 |
# Test results of compositing with the (<) prepend directive
|
... | ... | @@ -195,14 +214,15 @@ def test_composite_preserve_originals(datafiles): |
195 | 214 |
('implicitoverwrite.yaml', 0, 2, 'overwrite1', 'implicitoverwrite.yaml', 4, 8),
|
196 | 215 |
('implicitoverwrite.yaml', 1, 2, 'overwrite2', 'implicitoverwrite.yaml', 6, 8),
|
197 | 216 |
])
|
198 |
-def test_list_composition(datafiles, filename,
|
|
217 |
+def test_list_composition(datafiles, filename, tmpdir,
|
|
199 | 218 |
index, length, mood,
|
200 |
- prov_file, prov_line, prov_col):
|
|
201 |
- base = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
|
|
202 |
- overlay = os.path.join(datafiles.dirname, datafiles.basename, filename)
|
|
219 |
+ prov_file, prov_line, prov_col, caching):
|
|
220 |
+ base_file = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
|
|
221 |
+ overlay_file = os.path.join(datafiles.dirname, datafiles.basename, filename)
|
|
222 |
+ |
|
223 |
+ base = load_yaml_file(base_file, cache_path=tmpdir, shortname='basics.yaml', from_cache=caching)
|
|
224 |
+ overlay = load_yaml_file(overlay_file, cache_path=tmpdir, shortname=filename, from_cache=caching)
|
|
203 | 225 |
|
204 |
- base = _yaml.load(base, shortname='basics.yaml')
|
|
205 |
- overlay = _yaml.load(overlay, shortname=filename)
|
|
206 | 226 |
_yaml.composite_dict(base, overlay)
|
207 | 227 |
|
208 | 228 |
children = _yaml.node_get(base, list, 'children')
|
... | ... | @@ -254,6 +274,7 @@ def test_list_deletion(datafiles): |
254 | 274 |
# prov_col: The expected provenance column of "mood"
|
255 | 275 |
#
|
256 | 276 |
@pytest.mark.datafiles(os.path.join(DATA_DIR))
|
277 |
+@pytest.mark.parametrize('caching', [('raw'), ('cached')])
|
|
257 | 278 |
@pytest.mark.parametrize("filename1,filename2,index,length,mood,prov_file,prov_line,prov_col", [
|
258 | 279 |
|
259 | 280 |
# Test results of compositing literal list with (>) and then (<)
|
... | ... | @@ -310,9 +331,9 @@ def test_list_deletion(datafiles): |
310 | 331 |
('listoverwrite.yaml', 'listprepend.yaml', 2, 4, 'overwrite1', 'listoverwrite.yaml', 5, 10),
|
311 | 332 |
('listoverwrite.yaml', 'listprepend.yaml', 3, 4, 'overwrite2', 'listoverwrite.yaml', 7, 10),
|
312 | 333 |
])
|
313 |
-def test_list_composition_twice(datafiles, filename1, filename2,
|
|
334 |
+def test_list_composition_twice(datafiles, tmpdir, filename1, filename2,
|
|
314 | 335 |
index, length, mood,
|
315 |
- prov_file, prov_line, prov_col):
|
|
336 |
+ prov_file, prov_line, prov_col, caching):
|
|
316 | 337 |
file_base = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
|
317 | 338 |
file1 = os.path.join(datafiles.dirname, datafiles.basename, filename1)
|
318 | 339 |
file2 = os.path.join(datafiles.dirname, datafiles.basename, filename2)
|
... | ... | @@ -320,9 +341,9 @@ def test_list_composition_twice(datafiles, filename1, filename2, |
320 | 341 |
#####################
|
321 | 342 |
# Round 1 - Fight !
|
322 | 343 |
#####################
|
323 |
- base = _yaml.load(file_base, shortname='basics.yaml')
|
|
324 |
- overlay1 = _yaml.load(file1, shortname=filename1)
|
|
325 |
- overlay2 = _yaml.load(file2, shortname=filename2)
|
|
344 |
+ base = load_yaml_file(file_base, cache_path=tmpdir, shortname='basics.yaml', from_cache=caching)
|
|
345 |
+ overlay1 = load_yaml_file(file1, cache_path=tmpdir, shortname=filename1, from_cache=caching)
|
|
346 |
+ overlay2 = load_yaml_file(file2, cache_path=tmpdir, shortname=filename2, from_cache=caching)
|
|
326 | 347 |
|
327 | 348 |
_yaml.composite_dict(base, overlay1)
|
328 | 349 |
_yaml.composite_dict(base, overlay2)
|
... | ... | @@ -337,9 +358,9 @@ def test_list_composition_twice(datafiles, filename1, filename2, |
337 | 358 |
#####################
|
338 | 359 |
# Round 2 - Fight !
|
339 | 360 |
#####################
|
340 |
- base = _yaml.load(file_base, shortname='basics.yaml')
|
|
341 |
- overlay1 = _yaml.load(file1, shortname=filename1)
|
|
342 |
- overlay2 = _yaml.load(file2, shortname=filename2)
|
|
361 |
+ base = load_yaml_file(file_base, cache_path=tmpdir, shortname='basics.yaml', from_cache=caching)
|
|
362 |
+ overlay1 = load_yaml_file(file1, cache_path=tmpdir, shortname=filename1, from_cache=caching)
|
|
363 |
+ overlay2 = load_yaml_file(file2, cache_path=tmpdir, shortname=filename2, from_cache=caching)
|
|
343 | 364 |
|
344 | 365 |
_yaml.composite_dict(overlay1, overlay2)
|
345 | 366 |
_yaml.composite_dict(base, overlay1)
|