Tiago Gomes pushed to branch tiagogomes/issue-287-backport at BuildStream / buildstream
Commits:
-
95630260
by Tristan Van Berkom at 2018-09-14T12:11:30Z
-
abc5e375
by Tristan Van Berkom at 2018-09-14T12:11:31Z
-
fe98e5cb
by Tristan Van Berkom at 2018-09-14T12:11:31Z
-
354efc50
by Tristan Van Berkom at 2018-09-14T12:11:31Z
-
12296c84
by Tristan Van Berkom at 2018-09-14T12:11:31Z
-
673f2372
by Tristan Van Berkom at 2018-09-14T12:11:31Z
-
ef846ced
by Tristan Van Berkom at 2018-09-14T12:44:45Z
-
4aa0dd0b
by Tiago Gomes at 2018-09-14T15:43:47Z
-
06001fef
by Tiago Gomes at 2018-09-14T15:43:47Z
-
5e3587cd
by Tiago Gomes at 2018-09-14T15:43:47Z
-
3cbcec43
by Tiago Gomes at 2018-09-14T15:43:47Z
14 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_exceptions.py
- buildstream/_stream.py
- buildstream/data/projectconfig.yaml
- buildstream/element.py
- doc/source/format_declaring.rst
- tests/artifactcache/expiry.py
- tests/integration/manual.py
- + tests/loader/variables.py
- + tests/loader/variables/simple/foo.txt
- + tests/loader/variables/simple/project.conf
- tests/testutils/__init__.py
- tests/testutils/element_generators.py
- tests/testutils/repo/git.py
Changes:
... | ... | @@ -87,7 +87,7 @@ class ArtifactCache(): |
87 | 87 |
self.global_remote_specs = []
|
88 | 88 |
self.project_remote_specs = {}
|
89 | 89 |
|
90 |
- self._required_artifacts = set() # The artifacts required for this session
|
|
90 |
+ self._required_elements = set() # The elements required for this session
|
|
91 | 91 |
self._cache_size = None # The current cache size, sometimes it's an estimate
|
92 | 92 |
self._cache_quota = None # The cache quota
|
93 | 93 |
self._cache_lower_threshold = None # The target cache size for a cleanup
|
... | ... | @@ -189,33 +189,40 @@ class ArtifactCache(): |
189 | 189 |
(str(provenance)))
|
190 | 190 |
return cache_specs
|
191 | 191 |
|
192 |
- # append_required_artifacts():
|
|
192 |
+ # mark_required_elements():
|
|
193 | 193 |
#
|
194 |
- # Append to the list of elements whose artifacts are required for
|
|
195 |
- # the current run. Artifacts whose elements are in this list will
|
|
196 |
- # be locked by the artifact cache and not touched for the duration
|
|
197 |
- # of the current pipeline.
|
|
194 |
+ # Mark elements whose artifacts are required for the current run.
|
|
195 |
+ #
|
|
196 |
+ # Artifacts whose elements are in this list will be locked by the artifact
|
|
197 |
+ # cache and not touched for the duration of the current pipeline.
|
|
198 | 198 |
#
|
199 | 199 |
# Args:
|
200 | 200 |
# elements (iterable): A set of elements to mark as required
|
201 | 201 |
#
|
202 |
- def append_required_artifacts(self, elements):
|
|
203 |
- # We lock both strong and weak keys - deleting one but not the
|
|
204 |
- # other won't save space in most cases anyway, but would be a
|
|
205 |
- # user inconvenience.
|
|
202 |
+ def mark_required_elements(self, elements):
|
|
203 |
+ |
|
204 |
+ # We risk calling this function with a generator, so we
|
|
205 |
+ # better consume it first.
|
|
206 |
+ #
|
|
207 |
+ elements = list(elements)
|
|
208 |
+ |
|
209 |
+ # Mark the elements as required. We cannot know that we know the
|
|
210 |
+ # cache keys yet, so we only check that later when deleting.
|
|
211 |
+ #
|
|
212 |
+ self._required_elements.update(elements)
|
|
206 | 213 |
|
214 |
+ # For the cache keys which were resolved so far, we bump
|
|
215 |
+ # the atime of them.
|
|
216 |
+ #
|
|
217 |
+ # This is just in case we have concurrent instances of
|
|
218 |
+ # BuildStream running with the same artifact cache, it will
|
|
219 |
+ # reduce the likelyhood of one instance deleting artifacts
|
|
220 |
+ # which are required by the other.
|
|
207 | 221 |
for element in elements:
|
208 | 222 |
strong_key = element._get_cache_key(strength=_KeyStrength.STRONG)
|
209 | 223 |
weak_key = element._get_cache_key(strength=_KeyStrength.WEAK)
|
210 |
- |
|
211 | 224 |
for key in (strong_key, weak_key):
|
212 |
- if key and key not in self._required_artifacts:
|
|
213 |
- self._required_artifacts.add(key)
|
|
214 |
- |
|
215 |
- # We also update the usage times of any artifacts
|
|
216 |
- # we will be using, which helps preventing a
|
|
217 |
- # buildstream process that runs in parallel with
|
|
218 |
- # this one from removing artifacts in-use.
|
|
225 |
+ if key:
|
|
219 | 226 |
try:
|
220 | 227 |
self.update_atime(key)
|
221 | 228 |
except ArtifactError:
|
... | ... | @@ -231,6 +238,18 @@ class ArtifactCache(): |
231 | 238 |
def clean(self):
|
232 | 239 |
artifacts = self.list_artifacts()
|
233 | 240 |
|
241 |
+ # Build a set of the cache keys which are required
|
|
242 |
+ # based on the required elements at cleanup time
|
|
243 |
+ #
|
|
244 |
+ # We lock both strong and weak keys - deleting one but not the
|
|
245 |
+ # other won't save space, but would be a user inconvenience.
|
|
246 |
+ required_artifacts = set()
|
|
247 |
+ for element in self._required_elements:
|
|
248 |
+ required_artifacts.update([
|
|
249 |
+ element._get_cache_key(strength=_KeyStrength.STRONG),
|
|
250 |
+ element._get_cache_key(strength=_KeyStrength.WEAK)
|
|
251 |
+ ])
|
|
252 |
+ |
|
234 | 253 |
# Do a real computation of the cache size once, just in case
|
235 | 254 |
self.compute_cache_size()
|
236 | 255 |
|
... | ... | @@ -256,7 +275,7 @@ class ArtifactCache(): |
256 | 275 |
break
|
257 | 276 |
|
258 | 277 |
key = to_remove.rpartition('/')[2]
|
259 |
- if key not in self._required_artifacts:
|
|
278 |
+ if key not in required_artifacts:
|
|
260 | 279 |
|
261 | 280 |
# Remove the actual artifact, if it's not required.
|
262 | 281 |
size = self.remove(to_remove)
|
... | ... | @@ -219,6 +219,9 @@ class LoadErrorReason(Enum): |
219 | 219 |
# A recursive variable has been encountered
|
220 | 220 |
RECURSIVE_VARIABLE = 22
|
221 | 221 |
|
222 |
+ # An attempt so set the value of a protected variable
|
|
223 |
+ PROTECTED_VARIABLE_REDEFINED = 23
|
|
224 |
+ |
|
222 | 225 |
|
223 | 226 |
# LoadError
|
224 | 227 |
#
|
... | ... | @@ -937,13 +937,10 @@ class Stream(): |
937 | 937 |
# Set the "required" artifacts that should not be removed
|
938 | 938 |
# while this pipeline is active
|
939 | 939 |
#
|
940 |
- # FIXME: The set of required artifacts is only really needed
|
|
941 |
- # for build and pull tasks.
|
|
940 |
+ # It must include all the artifacts which are required by the
|
|
941 |
+ # final product. Note that this is a superset of the build plan.
|
|
942 | 942 |
#
|
943 |
- # It must include all the artifacts which are required by the
|
|
944 |
- # final product. Note that this is a superset of the build plan.
|
|
945 |
- #
|
|
946 |
- self._artifacts.append_required_artifacts((e for e in self._pipeline.dependencies(elements, Scope.ALL)))
|
|
943 |
+ self._artifacts.mark_required_elements(self._pipeline.dependencies(elements, Scope.ALL))
|
|
947 | 944 |
|
948 | 945 |
if selection == PipelineSelection.PLAN and dynamic_plan:
|
949 | 946 |
# We use a dynamic build plan, only request artifacts of top-level targets,
|
... | ... | @@ -20,21 +20,7 @@ fail-on-overlap: False |
20 | 20 |
# Variable Configuration
|
21 | 21 |
#
|
22 | 22 |
variables:
|
23 |
- |
|
24 |
- # Maximum number of parallel build processes within a given
|
|
25 |
- # build, support for this is conditional on the element type
|
|
26 |
- # and the build system used (any element using 'make' can
|
|
27 |
- # implement this).
|
|
28 |
- #
|
|
29 |
- # Note: this value defaults to the number of cores available
|
|
30 |
- max-jobs: 4
|
|
31 |
- |
|
32 |
- # Note: These variables are defined later on in element.py and _project.py
|
|
33 |
- element-name: ""
|
|
34 |
- project-name: ""
|
|
35 |
- |
|
36 | 23 |
# Path configuration, to be used in build instructions.
|
37 |
- #
|
|
38 | 24 |
prefix: "/usr"
|
39 | 25 |
exec_prefix: "%{prefix}"
|
40 | 26 |
bindir: "%{exec_prefix}/bin"
|
... | ... | @@ -93,7 +79,6 @@ variables: |
93 | 79 |
find "%{install-root}" -name '*.pyc' -exec \
|
94 | 80 |
dd if=/dev/zero of={} bs=1 count=4 seek=4 conv=notrunc ';'
|
95 | 81 |
|
96 |
- |
|
97 | 82 |
# Base sandbox environment, can be overridden by plugins
|
98 | 83 |
environment:
|
99 | 84 |
PATH: /usr/bin:/bin:/usr/sbin:/sbin
|
... | ... | @@ -1434,15 +1434,20 @@ class Element(Plugin): |
1434 | 1434 |
workspace.clear_running_files()
|
1435 | 1435 |
self._get_context().get_workspaces().save_config()
|
1436 | 1436 |
|
1437 |
- # We also need to update the required artifacts, since
|
|
1438 |
- # workspaced dependencies do not have a fixed cache key
|
|
1439 |
- # when the build starts.
|
|
1437 |
+ # This element will have already been marked as
|
|
1438 |
+ # required, but we bump the atime again, in case
|
|
1439 |
+ # we did not know the cache key until now.
|
|
1440 | 1440 |
#
|
1441 |
- # This does *not* cause a race condition, because
|
|
1442 |
- # _assemble_done is called before a cleanup job may be
|
|
1443 |
- # launched.
|
|
1441 |
+ # FIXME: This is not exactly correct, we should be
|
|
1442 |
+ # doing this at the time which we have discovered
|
|
1443 |
+ # a new cache key, this just happens to be the
|
|
1444 |
+ # last place where that can happen.
|
|
1444 | 1445 |
#
|
1445 |
- self.__artifacts.append_required_artifacts([self])
|
|
1446 |
+ # Ultimately, we should be refactoring
|
|
1447 |
+ # Element._update_state() such that we know
|
|
1448 |
+ # when a cache key is actually discovered.
|
|
1449 |
+ #
|
|
1450 |
+ self.__artifacts.mark_required_elements([self])
|
|
1446 | 1451 |
|
1447 | 1452 |
# _assemble():
|
1448 | 1453 |
#
|
... | ... | @@ -2161,7 +2166,8 @@ class Element(Plugin): |
2161 | 2166 |
# substituting command strings to be run in the sandbox
|
2162 | 2167 |
#
|
2163 | 2168 |
def __extract_variables(self, meta):
|
2164 |
- default_vars = _yaml.node_get(self.__defaults, Mapping, 'variables', default_value={})
|
|
2169 |
+ default_vars = _yaml.node_get(self.__defaults, Mapping, 'variables',
|
|
2170 |
+ default_value={})
|
|
2165 | 2171 |
|
2166 | 2172 |
project = self._get_project()
|
2167 | 2173 |
if self.__is_junction:
|
... | ... | @@ -2174,6 +2180,13 @@ class Element(Plugin): |
2174 | 2180 |
_yaml.composite(variables, meta.variables)
|
2175 | 2181 |
_yaml.node_final_assertions(variables)
|
2176 | 2182 |
|
2183 |
+ for var in ('project-name', 'element-name', 'max-jobs'):
|
|
2184 |
+ provenance = _yaml.node_get_provenance(variables, var)
|
|
2185 |
+ if provenance and provenance.filename != '':
|
|
2186 |
+ raise LoadError(LoadErrorReason.PROTECTED_VARIABLE_REDEFINED,
|
|
2187 |
+ "{}: invalid redefinition of protected variable '{}'"
|
|
2188 |
+ .format(provenance, var))
|
|
2189 |
+ |
|
2177 | 2190 |
return variables
|
2178 | 2191 |
|
2179 | 2192 |
# This will resolve the final configuration to be handed
|
... | ... | @@ -420,3 +420,25 @@ dependency and that all referenced variables are declared, the following is fine |
420 | 420 |
install-commands:
|
421 | 421 |
- |
|
422 | 422 |
%{make-install} RELEASE_TEXT="%{release-text}"
|
423 |
+ |
|
424 |
+ |
|
425 |
+Variables declared by BuildStream
|
|
426 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
427 |
+BuildStream declares a set of :ref:`builtin <project_builtin_defaults>`
|
|
428 |
+variables that may be overridden. In addition, the following
|
|
429 |
+read-only variables are also dynamically declared by BuildStream:
|
|
430 |
+ |
|
431 |
+* ``element-name``
|
|
432 |
+ |
|
433 |
+ The name of the element being processed (e.g base/alpine.bst).
|
|
434 |
+ |
|
435 |
+* ``project-name``
|
|
436 |
+ |
|
437 |
+ The name of project where BuildStream is being used.
|
|
438 |
+ |
|
439 |
+* ``max-jobs``
|
|
440 |
+ |
|
441 |
+ Maximum number of parallel build processes within a given
|
|
442 |
+ build, support for this is conditional on the element type
|
|
443 |
+ and the build system used (any element using 'make' can
|
|
444 |
+ implement this).
|
... | ... | @@ -5,7 +5,7 @@ import pytest |
5 | 5 |
from buildstream import _yaml
|
6 | 6 |
from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
7 | 7 |
|
8 |
-from tests.testutils import cli, create_element_size
|
|
8 |
+from tests.testutils import cli, create_element_size, update_element_size
|
|
9 | 9 |
|
10 | 10 |
|
11 | 11 |
DATA_DIR = os.path.join(
|
... | ... | @@ -74,6 +74,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir, size): |
74 | 74 |
create_element_size('target.bst', project, element_path, [], size)
|
75 | 75 |
res = cli.run(project=project, args=['build', 'target.bst'])
|
76 | 76 |
res.assert_main_error(ErrorDomain.STREAM, None)
|
77 |
+ res.assert_task_error(ErrorDomain.ARTIFACT, 'cache-too-full')
|
|
77 | 78 |
|
78 | 79 |
|
79 | 80 |
@pytest.mark.datafiles(DATA_DIR)
|
... | ... | @@ -175,24 +176,8 @@ def test_keep_dependencies(cli, datafiles, tmpdir): |
175 | 176 |
|
176 | 177 |
|
177 | 178 |
# Assert that we never delete a dependency required for a build tree
|
178 |
-#
|
|
179 |
-# NOTE: This test expects that a build will fail if it attempts to
|
|
180 |
-# put more artifacts in the cache than the quota can hold,
|
|
181 |
-# and expects that the last two elements which don't fit into
|
|
182 |
-# the quota wont even be built.
|
|
183 |
-#
|
|
184 |
-# In real life, this will not be the case, since once we reach
|
|
185 |
-# the estimated quota we launch a cache size calculation job and
|
|
186 |
-# only launch a cleanup job when the size is calculated; and
|
|
187 |
-# other build tasks will be scheduled while the cache size job
|
|
188 |
-# is running.
|
|
189 |
-#
|
|
190 |
-# This test only passes because we configure `builders` to 1,
|
|
191 |
-# ensuring that the cache size job runs exclusively since it
|
|
192 |
-# also requires a compute resource (a "builder").
|
|
193 |
-#
|
|
194 | 179 |
@pytest.mark.datafiles(DATA_DIR)
|
195 |
-def test_never_delete_dependencies(cli, datafiles, tmpdir):
|
|
180 |
+def test_never_delete_required(cli, datafiles, tmpdir):
|
|
196 | 181 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
197 | 182 |
element_path = 'elements'
|
198 | 183 |
|
... | ... | @@ -205,37 +190,94 @@ def test_never_delete_dependencies(cli, datafiles, tmpdir): |
205 | 190 |
}
|
206 | 191 |
})
|
207 | 192 |
|
208 |
- # Create a build tree
|
|
209 |
- create_element_size('dependency.bst', project,
|
|
210 |
- element_path, [], 8000000)
|
|
211 |
- create_element_size('related.bst', project,
|
|
212 |
- element_path, ['dependency.bst'], 8000000)
|
|
213 |
- create_element_size('target.bst', project,
|
|
214 |
- element_path, ['related.bst'], 8000000)
|
|
215 |
- create_element_size('target2.bst', project,
|
|
216 |
- element_path, ['target.bst'], 8000000)
|
|
193 |
+ # Create a linear build tree
|
|
194 |
+ create_element_size('dep1.bst', project, element_path, [], 8000000)
|
|
195 |
+ create_element_size('dep2.bst', project, element_path, ['dep1.bst'], 8000000)
|
|
196 |
+ create_element_size('dep3.bst', project, element_path, ['dep2.bst'], 8000000)
|
|
197 |
+ create_element_size('target.bst', project, element_path, ['dep3.bst'], 8000000)
|
|
217 | 198 |
|
218 | 199 |
# We try to build this pipeline, but it's too big for the
|
219 | 200 |
# cache. Since all elements are required, the build should fail.
|
220 |
- res = cli.run(project=project, args=['build', 'target2.bst'])
|
|
201 |
+ res = cli.run(project=project, args=['build', 'target.bst'])
|
|
221 | 202 |
res.assert_main_error(ErrorDomain.STREAM, None)
|
203 |
+ res.assert_task_error(ErrorDomain.ARTIFACT, 'cache-too-full')
|
|
222 | 204 |
|
223 |
- assert cli.get_element_state(project, 'dependency.bst') == 'cached'
|
|
205 |
+ # Only the first artifact fits in the cache, but we expect
|
|
206 |
+ # that the first *two* artifacts will be cached.
|
|
207 |
+ #
|
|
208 |
+ # This is because after caching the first artifact we must
|
|
209 |
+ # proceed to build the next artifact, and we cannot really
|
|
210 |
+ # know how large an artifact will be until we try to cache it.
|
|
211 |
+ #
|
|
212 |
+ # In this case, we deem it more acceptable to not delete an
|
|
213 |
+ # artifact which caused the cache to outgrow the quota.
|
|
214 |
+ #
|
|
215 |
+ # Note that this test only works because we have forced
|
|
216 |
+ # the configuration to build one element at a time, in real
|
|
217 |
+ # life there may potentially be N-builders cached artifacts
|
|
218 |
+ # which exceed the quota
|
|
219 |
+ #
|
|
220 |
+ assert cli.get_element_state(project, 'dep1.bst') == 'cached'
|
|
221 |
+ assert cli.get_element_state(project, 'dep2.bst') == 'cached'
|
|
222 |
+ |
|
223 |
+ assert cli.get_element_state(project, 'dep3.bst') != 'cached'
|
|
224 |
+ assert cli.get_element_state(project, 'target.bst') != 'cached'
|
|
225 |
+ |
|
226 |
+ |
|
227 |
+# Assert that we never delete a dependency required for a build tree,
|
|
228 |
+# even when the artifact cache was previously populated with
|
|
229 |
+# artifacts we do not require, and the new build is run with dynamic tracking.
|
|
230 |
+#
|
|
231 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
232 |
+def test_never_delete_required_track(cli, datafiles, tmpdir):
|
|
233 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
234 |
+ element_path = 'elements'
|
|
235 |
+ |
|
236 |
+ cli.configure({
|
|
237 |
+ 'cache': {
|
|
238 |
+ 'quota': 10000000
|
|
239 |
+ },
|
|
240 |
+ 'scheduler': {
|
|
241 |
+ 'builders': 1
|
|
242 |
+ }
|
|
243 |
+ })
|
|
224 | 244 |
|
225 |
- # This is *technically* above the cache limit. BuildStream accepts
|
|
226 |
- # some fuzziness, since it's hard to assert that we don't create
|
|
227 |
- # an artifact larger than the cache quota. We would have to remove
|
|
228 |
- # the artifact after-the-fact, but since it is required for the
|
|
229 |
- # current build and nothing broke yet, it's nicer to keep it
|
|
230 |
- # around.
|
|
245 |
+ # Create a linear build tree
|
|
246 |
+ repo_dep1 = create_element_size('dep1.bst', project, element_path, [], 2000000)
|
|
247 |
+ repo_dep2 = create_element_size('dep2.bst', project, element_path, ['dep1.bst'], 2000000)
|
|
248 |
+ repo_dep3 = create_element_size('dep3.bst', project, element_path, ['dep2.bst'], 2000000)
|
|
249 |
+ repo_target = create_element_size('target.bst', project, element_path, ['dep3.bst'], 2000000)
|
|
250 |
+ |
|
251 |
+ # This should all fit into the artifact cache
|
|
252 |
+ res = cli.run(project=project, args=['build', 'target.bst'])
|
|
253 |
+ res.assert_success()
|
|
254 |
+ |
|
255 |
+ # They should all be cached
|
|
256 |
+ assert cli.get_element_state(project, 'dep1.bst') == 'cached'
|
|
257 |
+ assert cli.get_element_state(project, 'dep2.bst') == 'cached'
|
|
258 |
+ assert cli.get_element_state(project, 'dep3.bst') == 'cached'
|
|
259 |
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
|
|
260 |
+ |
|
261 |
+ # Now increase the size of all the elements
|
|
231 | 262 |
#
|
232 |
- # This scenario is quite unlikely, and the cache overflow will be
|
|
233 |
- # resolved if the user does something about it anyway.
|
|
263 |
+ update_element_size('dep1.bst', project, repo_dep1, 8000000)
|
|
264 |
+ update_element_size('dep2.bst', project, repo_dep2, 8000000)
|
|
265 |
+ update_element_size('dep3.bst', project, repo_dep3, 8000000)
|
|
266 |
+ update_element_size('target.bst', project, repo_target, 8000000)
|
|
267 |
+ |
|
268 |
+ # Now repeat the same test we did in test_never_delete_required(),
|
|
269 |
+ # except this time let's add dynamic tracking
|
|
234 | 270 |
#
|
235 |
- assert cli.get_element_state(project, 'related.bst') == 'cached'
|
|
271 |
+ res = cli.run(project=project, args=['build', '--track-all', 'target.bst'])
|
|
272 |
+ res.assert_main_error(ErrorDomain.STREAM, None)
|
|
273 |
+ res.assert_task_error(ErrorDomain.ARTIFACT, 'cache-too-full')
|
|
236 | 274 |
|
275 |
+ # Expect the same result that we did in test_never_delete_required()
|
|
276 |
+ #
|
|
277 |
+ assert cli.get_element_state(project, 'dep1.bst') == 'cached'
|
|
278 |
+ assert cli.get_element_state(project, 'dep2.bst') == 'cached'
|
|
279 |
+ assert cli.get_element_state(project, 'dep3.bst') != 'cached'
|
|
237 | 280 |
assert cli.get_element_state(project, 'target.bst') != 'cached'
|
238 |
- assert cli.get_element_state(project, 'target2.bst') != 'cached'
|
|
239 | 281 |
|
240 | 282 |
|
241 | 283 |
# Ensure that only valid cache quotas make it through the loading
|
... | ... | @@ -64,7 +64,7 @@ strip |
64 | 64 |
|
65 | 65 |
|
66 | 66 |
@pytest.mark.datafiles(DATA_DIR)
|
67 |
-def test_manual_element_noparallel(cli, tmpdir, datafiles):
|
|
67 |
+def test_manual_element_environment(cli, tmpdir, datafiles):
|
|
68 | 68 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
69 | 69 |
checkout = os.path.join(cli.directory, 'checkout')
|
70 | 70 |
element_path = os.path.join(project, 'elements')
|
... | ... | @@ -72,15 +72,11 @@ def test_manual_element_noparallel(cli, tmpdir, datafiles): |
72 | 72 |
|
73 | 73 |
create_manual_element(element_name, element_path, {
|
74 | 74 |
'install-commands': [
|
75 |
- "echo $MAKEFLAGS >> test",
|
|
76 | 75 |
"echo $V >> test",
|
77 | 76 |
"cp test %{install-root}"
|
78 | 77 |
]
|
79 | 78 |
}, {
|
80 |
- 'max-jobs': 2,
|
|
81 |
- 'notparallel': True
|
|
82 | 79 |
}, {
|
83 |
- 'MAKEFLAGS': '-j%{max-jobs} -Wall',
|
|
84 | 80 |
'V': 2
|
85 | 81 |
})
|
86 | 82 |
|
... | ... | @@ -93,13 +89,11 @@ def test_manual_element_noparallel(cli, tmpdir, datafiles): |
93 | 89 |
with open(os.path.join(checkout, 'test')) as f:
|
94 | 90 |
text = f.read()
|
95 | 91 |
|
96 |
- assert text == """-j1 -Wall
|
|
97 |
-2
|
|
98 |
-"""
|
|
92 |
+ assert text == "2\n"
|
|
99 | 93 |
|
100 | 94 |
|
101 | 95 |
@pytest.mark.datafiles(DATA_DIR)
|
102 |
-def test_manual_element_environment(cli, tmpdir, datafiles):
|
|
96 |
+def test_manual_element_noparallel(cli, tmpdir, datafiles):
|
|
103 | 97 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
104 | 98 |
checkout = os.path.join(cli.directory, 'checkout')
|
105 | 99 |
element_path = os.path.join(project, 'elements')
|
... | ... | @@ -112,7 +106,7 @@ def test_manual_element_environment(cli, tmpdir, datafiles): |
112 | 106 |
"cp test %{install-root}"
|
113 | 107 |
]
|
114 | 108 |
}, {
|
115 |
- 'max-jobs': 2
|
|
109 |
+ 'notparallel': True
|
|
116 | 110 |
}, {
|
117 | 111 |
'MAKEFLAGS': '-j%{max-jobs} -Wall',
|
118 | 112 |
'V': 2
|
... | ... | @@ -127,6 +121,6 @@ def test_manual_element_environment(cli, tmpdir, datafiles): |
127 | 121 |
with open(os.path.join(checkout, 'test')) as f:
|
128 | 122 |
text = f.read()
|
129 | 123 |
|
130 |
- assert text == """-j2 -Wall
|
|
124 |
+ assert text == """-j1 -Wall
|
|
131 | 125 |
2
|
132 | 126 |
"""
|
1 |
+import os
|
|
2 |
+import pytest
|
|
3 |
+ |
|
4 |
+from buildstream import _yaml
|
|
5 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
6 |
+from tests.testutils import cli
|
|
7 |
+ |
|
8 |
+DATA_DIR = os.path.join(
|
|
9 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
10 |
+ 'variables',
|
|
11 |
+)
|
|
12 |
+ |
|
13 |
+PROTECTED_VARIABLES = [('project-name'), ('element-name'), ('max-jobs')]
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+@pytest.mark.parametrize('protected_var', PROTECTED_VARIABLES)
|
|
17 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
18 |
+def test_use_of_protected_var_project_conf(cli, tmpdir, datafiles, protected_var):
|
|
19 |
+ project = os.path.join(str(datafiles), 'simple')
|
|
20 |
+ |
|
21 |
+ conf = {
|
|
22 |
+ 'name': 'test',
|
|
23 |
+ 'variables': {
|
|
24 |
+ protected_var: 'some-value'
|
|
25 |
+ }
|
|
26 |
+ }
|
|
27 |
+ _yaml.dump(conf, os.path.join(project, 'project.conf'))
|
|
28 |
+ |
|
29 |
+ element = {
|
|
30 |
+ 'kind': 'import',
|
|
31 |
+ 'sources': [
|
|
32 |
+ {
|
|
33 |
+ 'kind': 'local',
|
|
34 |
+ 'path': 'foo.txt'
|
|
35 |
+ }
|
|
36 |
+ ],
|
|
37 |
+ }
|
|
38 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
39 |
+ |
|
40 |
+ result = cli.run(project=project, args=['build', 'target.bst'])
|
|
41 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
42 |
+ LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
|
|
43 |
+ |
|
44 |
+ |
|
45 |
+@pytest.mark.parametrize('protected_var', PROTECTED_VARIABLES)
|
|
46 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
47 |
+def test_use_of_protected_var_element_overrides(cli, tmpdir, datafiles, protected_var):
|
|
48 |
+ project = os.path.join(str(datafiles), 'simple')
|
|
49 |
+ |
|
50 |
+ conf = {
|
|
51 |
+ 'name': 'test',
|
|
52 |
+ 'elements': {
|
|
53 |
+ 'manual': {
|
|
54 |
+ 'variables': {
|
|
55 |
+ protected_var: 'some-value'
|
|
56 |
+ }
|
|
57 |
+ }
|
|
58 |
+ }
|
|
59 |
+ }
|
|
60 |
+ _yaml.dump(conf, os.path.join(project, 'project.conf'))
|
|
61 |
+ |
|
62 |
+ element = {
|
|
63 |
+ 'kind': 'manual',
|
|
64 |
+ 'sources': [
|
|
65 |
+ {
|
|
66 |
+ 'kind': 'local',
|
|
67 |
+ 'path': 'foo.txt'
|
|
68 |
+ }
|
|
69 |
+ ],
|
|
70 |
+ }
|
|
71 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
72 |
+ |
|
73 |
+ result = cli.run(project=project, args=['build', 'target.bst'])
|
|
74 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
75 |
+ LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
|
|
76 |
+ |
|
77 |
+ |
|
78 |
+@pytest.mark.parametrize('protected_var', PROTECTED_VARIABLES)
|
|
79 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
80 |
+def test_use_of_protected_var_in_element(cli, tmpdir, datafiles, protected_var):
|
|
81 |
+ project = os.path.join(str(datafiles), 'simple')
|
|
82 |
+ |
|
83 |
+ element = {
|
|
84 |
+ 'kind': 'import',
|
|
85 |
+ 'sources': [
|
|
86 |
+ {
|
|
87 |
+ 'kind': 'local',
|
|
88 |
+ 'path': 'foo.txt'
|
|
89 |
+ }
|
|
90 |
+ ],
|
|
91 |
+ 'variables': {
|
|
92 |
+ protected_var: 'some-value'
|
|
93 |
+ }
|
|
94 |
+ }
|
|
95 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
96 |
+ |
|
97 |
+ result = cli.run(project=project, args=['build', 'target.bst'])
|
|
98 |
+ result.assert_main_error(ErrorDomain.LOAD,
|
|
99 |
+ LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
|
1 |
+foo
|
1 |
+name: foo
|
1 | 1 |
from .runcli import cli, cli_integration
|
2 | 2 |
from .repo import create_repo, ALL_REPO_KINDS
|
3 | 3 |
from .artifactshare import create_artifact_share
|
4 |
-from .element_generators import create_element_size
|
|
4 |
+from .element_generators import create_element_size, update_element_size
|
|
5 | 5 |
from .junction import generate_junction
|
1 | 1 |
import os
|
2 | 2 |
|
3 | 3 |
from buildstream import _yaml
|
4 |
+from buildstream import utils
|
|
5 |
+ |
|
6 |
+from . import create_repo
|
|
4 | 7 |
|
5 | 8 |
|
6 | 9 |
# create_element_size()
|
7 | 10 |
#
|
8 |
-# This will open a "<name>_data" file for writing and write
|
|
9 |
-# <size> MB of urandom (/dev/urandom) "stuff" into the file.
|
|
10 |
-# A bst import element file is then created: <name>.bst
|
|
11 |
+# Creates an import element with a git repo, using random
|
|
12 |
+# data to create a file in that repo of the specified size,
|
|
13 |
+# such that building it will add an artifact of the specified
|
|
14 |
+# size to the artifact cache.
|
|
11 | 15 |
#
|
12 | 16 |
# Args:
|
13 |
-# name: (str) of the element name (e.g. target.bst)
|
|
14 |
-# path: (str) pathway to the project/elements directory
|
|
15 |
-# dependencies: A list of strings (can also be an empty list)
|
|
16 |
-# size: (int) size of the element in bytes
|
|
17 |
+# name: (str) of the element name (e.g. target.bst)
|
|
18 |
+# project_dir (str): The path to the project
|
|
19 |
+# element_path (str): The element path within the project
|
|
20 |
+# dependencies: A list of strings (can also be an empty list)
|
|
21 |
+# size: (int) size of the element in bytes
|
|
17 | 22 |
#
|
18 | 23 |
# Returns:
|
19 |
-# Nothing (creates a .bst file of specified size)
|
|
24 |
+# (Repo): A git repo which can be used to introduce trackable changes
|
|
25 |
+# by using the update_element_size() function below.
|
|
20 | 26 |
#
|
21 | 27 |
def create_element_size(name, project_dir, elements_path, dependencies, size):
|
22 | 28 |
full_elements_path = os.path.join(project_dir, elements_path)
|
23 | 29 |
os.makedirs(full_elements_path, exist_ok=True)
|
24 | 30 |
|
25 |
- # Create a file to be included in this element's artifact
|
|
26 |
- with open(os.path.join(project_dir, name + '_data'), 'wb+') as f:
|
|
27 |
- f.write(os.urandom(size))
|
|
31 |
+ # Create a git repo
|
|
32 |
+ repodir = os.path.join(project_dir, 'repos')
|
|
33 |
+ repo = create_repo('git', repodir, subdir=name)
|
|
34 |
+ |
|
35 |
+ with utils._tempdir(dir=project_dir) as tmp:
|
|
36 |
+ |
|
37 |
+ # We use a data/ subdir in the git repo we create,
|
|
38 |
+ # and we set the import element to only extract that
|
|
39 |
+ # part; this ensures we never include a .git/ directory
|
|
40 |
+ # in the cached artifacts for these sized elements.
|
|
41 |
+ #
|
|
42 |
+ datadir = os.path.join(tmp, 'data')
|
|
43 |
+ os.makedirs(datadir)
|
|
44 |
+ |
|
45 |
+ # Use /dev/urandom to create the sized file in the datadir
|
|
46 |
+ with open(os.path.join(datadir, name), 'wb+') as f:
|
|
47 |
+ f.write(os.urandom(size))
|
|
48 |
+ |
|
49 |
+ # Create the git repo from the temp directory
|
|
50 |
+ ref = repo.create(tmp)
|
|
28 | 51 |
|
29 |
- # Simplest case: We want this file (of specified size) to just
|
|
30 |
- # be an import element.
|
|
31 | 52 |
element = {
|
32 | 53 |
'kind': 'import',
|
33 | 54 |
'sources': [
|
34 |
- {
|
|
35 |
- 'kind': 'local',
|
|
36 |
- 'path': name + '_data'
|
|
37 |
- }
|
|
55 |
+ repo.source_config(ref=ref)
|
|
38 | 56 |
],
|
57 |
+ 'config': {
|
|
58 |
+ # Extract only the data directory
|
|
59 |
+ 'source': 'data'
|
|
60 |
+ },
|
|
39 | 61 |
'depends': dependencies
|
40 | 62 |
}
|
41 | 63 |
_yaml.dump(element, os.path.join(project_dir, elements_path, name))
|
64 |
+ |
|
65 |
+ # Return the repo, so that it can later be used to add commits
|
|
66 |
+ return repo
|
|
67 |
+ |
|
68 |
+ |
|
69 |
+# update_element_size()
|
|
70 |
+#
|
|
71 |
+# Updates a repo returned by create_element_size() such that
|
|
72 |
+# the newly added commit is completely changed, and has the newly
|
|
73 |
+# specified size.
|
|
74 |
+#
|
|
75 |
+# The name and project_dir arguments must match the arguments
|
|
76 |
+# previously given to create_element_size()
|
|
77 |
+#
|
|
78 |
+# Args:
|
|
79 |
+# name: (str) of the element name (e.g. target.bst)
|
|
80 |
+# project_dir (str): The path to the project
|
|
81 |
+# repo: (Repo) The Repo returned by create_element_size()
|
|
82 |
+# size: (int) The new size which the element generates, in bytes
|
|
83 |
+#
|
|
84 |
+# Returns:
|
|
85 |
+# (Repo): A git repo which can be used to introduce trackable changes
|
|
86 |
+# by using the update_element_size() function below.
|
|
87 |
+#
|
|
88 |
+def update_element_size(name, project_dir, repo, size):
|
|
89 |
+ |
|
90 |
+ with utils._tempdir(dir=project_dir) as tmp:
|
|
91 |
+ |
|
92 |
+ new_file = os.path.join(tmp, name)
|
|
93 |
+ |
|
94 |
+ # Use /dev/urandom to create the sized file in the datadir
|
|
95 |
+ with open(new_file, 'wb+') as f:
|
|
96 |
+ f.write(os.urandom(size))
|
|
97 |
+ |
|
98 |
+ # Modify the git repo with a new commit to the same path,
|
|
99 |
+ # replacing the original file with a new one.
|
|
100 |
+ repo.modify_file(new_file, os.path.join('data', name))
|
... | ... | @@ -46,6 +46,13 @@ class Git(Repo): |
46 | 46 |
], env=GIT_ENV, cwd=self.repo)
|
47 | 47 |
return self.latest_commit()
|
48 | 48 |
|
49 |
+ def modify_file(self, new_file, path):
|
|
50 |
+ shutil.copy(new_file, os.path.join(self.repo, path))
|
|
51 |
+ subprocess.call([
|
|
52 |
+ 'git', 'commit', path, '-m', 'Modified {}'.format(os.path.basename(path))
|
|
53 |
+ ], env=GIT_ENV, cwd=self.repo)
|
|
54 |
+ return self.latest_commit()
|
|
55 |
+ |
|
49 | 56 |
def add_submodule(self, subdir, url=None, checkout=None):
|
50 | 57 |
submodule = {}
|
51 | 58 |
if checkout is not None:
|