Jim MacArthur pushed to branch jmac/stop-caching-vdirs at BuildStream / buildstream
Commits:
-
461a0588
by Jim MacArthur at 2018-09-21T10:53:11Z
-
aa9caaac
by Jim MacArthur at 2018-09-21T10:53:11Z
-
2aae68c7
by Jim MacArthur at 2018-09-21T10:53:11Z
-
ca1bb72c
by Jim MacArthur at 2018-09-21T10:53:11Z
-
55c93a82
by Jim MacArthur at 2018-09-21T11:26:55Z
-
e209beb0
by Chandan Singh at 2018-09-21T13:10:08Z
-
0b000518
by Chandan Singh at 2018-09-21T13:56:55Z
-
ef26043a
by Chandan Singh at 2018-09-21T17:14:16Z
-
1b2aed40
by Chandan Singh at 2018-09-21T17:40:11Z
-
da7e038b
by Jürg Billeter at 2018-09-24T13:58:55Z
-
86ea1173
by Jürg Billeter at 2018-09-24T13:59:59Z
-
a76339de
by Jürg Billeter at 2018-09-24T16:02:38Z
-
31b3f5be
by Jim MacArthur at 2018-09-24T17:02:29Z
-
82cf1d69
by Jim MacArthur at 2018-09-24T17:02:29Z
12 changed files:
- buildstream/_artifactcache/cascache.py
- buildstream/_stream.py
- buildstream/element.py
- buildstream/sandbox/_sandboxremote.py
- buildstream/sandbox/sandbox.py
- setup.py
- tests/artifactcache/push.py
- + tests/frontend/project/elements/rebuild-target.bst
- + tests/frontend/project/elements/source-bundle/source-bundle-hello.bst
- + tests/frontend/project/files/source-bundle/llamas.txt
- + tests/frontend/rebuild.py
- + tests/frontend/source_bundle.py
Changes:
| ... | ... | @@ -348,19 +348,29 @@ class CASCache(ArtifactCache): |
| 348 | 348 |
return pushed
|
| 349 | 349 |
|
| 350 | 350 |
def push_directory(self, project, directory):
|
| 351 |
+ """ Push the given virtual directory to all remotes.
|
|
| 352 |
+ |
|
| 353 |
+ Args:
|
|
| 354 |
+ project (Project): The current project
|
|
| 355 |
+ directory (Directory): A virtual directory object to push.
|
|
| 356 |
+ |
|
| 357 |
+ Raises: ArtifactError if no push remotes are configured.
|
|
| 358 |
+ """
|
|
| 351 | 359 |
|
| 352 | 360 |
push_remotes = [r for r in self._remotes[project] if r.spec.push]
|
| 353 | 361 |
|
| 362 |
+ if not push_remotes:
|
|
| 363 |
+ raise ArtifactError("CASCache: push_directory was called, but no remote artifact " +
|
|
| 364 |
+ "servers are configured as push remotes.")
|
|
| 365 |
+ |
|
| 354 | 366 |
if directory.ref is None:
|
| 355 |
- return None
|
|
| 367 |
+ return
|
|
| 356 | 368 |
|
| 357 | 369 |
for remote in push_remotes:
|
| 358 | 370 |
remote.init()
|
| 359 | 371 |
|
| 360 | 372 |
self._send_directory(remote, directory.ref)
|
| 361 | 373 |
|
| 362 |
- return directory.ref
|
|
| 363 |
- |
|
| 364 | 374 |
def push_message(self, project, message):
|
| 365 | 375 |
|
| 366 | 376 |
push_remotes = [r for r in self._remotes[project] if r.spec.push]
|
| ... | ... | @@ -703,6 +703,7 @@ class Stream(): |
| 703 | 703 |
|
| 704 | 704 |
# Create a temporary directory to build the source tree in
|
| 705 | 705 |
builddir = self._context.builddir
|
| 706 |
+ os.makedirs(builddir, exist_ok=True)
|
|
| 706 | 707 |
prefix = "{}-".format(target.normal_name)
|
| 707 | 708 |
|
| 708 | 709 |
with TemporaryDirectory(prefix=prefix, dir=builddir) as tempdir:
|
| ... | ... | @@ -1085,6 +1086,7 @@ class Stream(): |
| 1085 | 1086 |
for element in elements:
|
| 1086 | 1087 |
source_dir = os.path.join(directory, "source")
|
| 1087 | 1088 |
element_source_dir = os.path.join(source_dir, element.normal_name)
|
| 1089 |
+ os.makedirs(element_source_dir)
|
|
| 1088 | 1090 |
|
| 1089 | 1091 |
element._stage_sources_at(element_source_dir)
|
| 1090 | 1092 |
|
| ... | ... | @@ -1532,8 +1532,6 @@ class Element(Plugin): |
| 1532 | 1532 |
with _signals.terminator(cleanup_rootdir), \
|
| 1533 | 1533 |
self.__sandbox(rootdir, output_file, output_file, self.__sandbox_config) as sandbox: # nopep8
|
| 1534 | 1534 |
|
| 1535 |
- sandbox_vroot = sandbox.get_virtual_directory()
|
|
| 1536 |
- |
|
| 1537 | 1535 |
# By default, the dynamic public data is the same as the static public data.
|
| 1538 | 1536 |
# The plugin's assemble() method may modify this, though.
|
| 1539 | 1537 |
self.__dynamic_public = _yaml.node_copy(self.__public)
|
| ... | ... | @@ -1581,7 +1579,6 @@ class Element(Plugin): |
| 1581 | 1579 |
finally:
|
| 1582 | 1580 |
if collect is not None:
|
| 1583 | 1581 |
try:
|
| 1584 |
- # Sandbox will probably have replaced its virtual directory, so get it again
|
|
| 1585 | 1582 |
sandbox_vroot = sandbox.get_virtual_directory()
|
| 1586 | 1583 |
collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
|
| 1587 | 1584 |
except VirtualDirectoryError:
|
| ... | ... | @@ -1606,6 +1603,7 @@ class Element(Plugin): |
| 1606 | 1603 |
collectvdir.export_files(filesdir, can_link=True)
|
| 1607 | 1604 |
|
| 1608 | 1605 |
try:
|
| 1606 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
| 1609 | 1607 |
sandbox_build_dir = sandbox_vroot.descend(
|
| 1610 | 1608 |
self.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
| 1611 | 1609 |
# Hard link files from build-root dir to buildtreedir directory
|
| ... | ... | @@ -2084,7 +2082,7 @@ class Element(Plugin): |
| 2084 | 2082 |
#
|
| 2085 | 2083 |
# Raises an error if the artifact is not cached.
|
| 2086 | 2084 |
#
|
| 2087 |
- def __assert_cached(self, keystrength=_KeyStrength.STRONG):
|
|
| 2085 |
+ def __assert_cached(self, keystrength=None):
|
|
| 2088 | 2086 |
assert self.__is_cached(keystrength=keystrength), "{}: Missing artifact {}".format(
|
| 2089 | 2087 |
self, self._get_brief_display_key())
|
| 2090 | 2088 |
|
| ... | ... | @@ -2137,14 +2135,11 @@ class Element(Plugin): |
| 2137 | 2135 |
project = self._get_project()
|
| 2138 | 2136 |
platform = Platform.get_platform()
|
| 2139 | 2137 |
|
| 2140 |
- if self.__remote_execution_url and self.BST_VIRTUAL_DIRECTORY:
|
|
| 2141 |
- if not self.__artifacts.has_push_remotes(element=self):
|
|
| 2142 |
- # Give an early warning if remote execution will not work
|
|
| 2143 |
- raise ElementError("Artifact {} is configured to use remote execution but has no push remotes. "
|
|
| 2144 |
- .format(self.name) +
|
|
| 2145 |
- "The remote artifact server(s) may not be correctly configured or contactable.")
|
|
| 2138 |
+ if (directory is not None and
|
|
| 2139 |
+ self.__remote_execution_url and
|
|
| 2140 |
+ self.BST_VIRTUAL_DIRECTORY):
|
|
| 2146 | 2141 |
|
| 2147 |
- self.info("Using a remote sandbox for artifact {}".format(self.name))
|
|
| 2142 |
+ self.info("Using a remote sandbox for artifact {} with directory '{}'".format(self.name, directory))
|
|
| 2148 | 2143 |
|
| 2149 | 2144 |
sandbox = SandboxRemote(context, project,
|
| 2150 | 2145 |
directory,
|
| ... | ... | @@ -173,8 +173,8 @@ class SandboxRemote(Sandbox): |
| 173 | 173 |
platform = Platform.get_platform()
|
| 174 | 174 |
cascache = platform.artifactcache
|
| 175 | 175 |
# Now, push that key (without necessarily needing a ref) to the remote.
|
| 176 |
- vdir_digest = cascache.push_directory(self._get_project(), upload_vdir)
|
|
| 177 |
- if not vdir_digest or not cascache.verify_digest_pushed(self._get_project(), vdir_digest):
|
|
| 176 |
+ cascache.push_directory(self._get_project(), upload_vdir)
|
|
| 177 |
+ if not cascache.verify_digest_pushed(self._get_project(), upload_vdir.ref):
|
|
| 178 | 178 |
raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
|
| 179 | 179 |
|
| 180 | 180 |
# Set up environment and working directory
|
| ... | ... | @@ -110,6 +110,10 @@ class Sandbox(): |
| 110 | 110 |
os.makedirs(directory_, exist_ok=True)
|
| 111 | 111 |
self._vdir = None
|
| 112 | 112 |
|
| 113 |
+ # This is set if anyone requests access to the underlying
|
|
| 114 |
+ # directory via get_directory.
|
|
| 115 |
+ self._never_cache_vdirs = False
|
|
| 116 |
+ |
|
| 113 | 117 |
def get_directory(self):
|
| 114 | 118 |
"""Fetches the sandbox root directory
|
| 115 | 119 |
|
| ... | ... | @@ -122,24 +126,28 @@ class Sandbox(): |
| 122 | 126 |
|
| 123 | 127 |
"""
|
| 124 | 128 |
if self.__allow_real_directory:
|
| 129 |
+ self._never_cache_vdirs = True
|
|
| 125 | 130 |
return self._root
|
| 126 | 131 |
else:
|
| 127 | 132 |
raise BstError("You can't use get_directory")
|
| 128 | 133 |
|
| 129 | 134 |
def get_virtual_directory(self):
|
| 130 |
- """Fetches the sandbox root directory
|
|
| 135 |
+ """Fetches the sandbox root directory as a virtual Directory.
|
|
| 131 | 136 |
|
| 132 | 137 |
The root directory is where artifacts for the base
|
| 133 |
- runtime environment should be staged. Only works if
|
|
| 134 |
- BST_VIRTUAL_DIRECTORY is not set.
|
|
| 138 |
+ runtime environment should be staged.
|
|
| 139 |
+ |
|
| 140 |
+ Use caution if you use get_directory and
|
|
| 141 |
+ get_virtual_directory. If you alter the contents of the
|
|
| 142 |
+ directory returned by get_directory, all objects returned by
|
|
| 143 |
+ get_virtual_directory or derived from them are invalid and you
|
|
| 144 |
+ must call get_virtual_directory again to get a new copy.
|
|
| 135 | 145 |
|
| 136 | 146 |
Returns:
|
| 137 |
- (str): The sandbox root directory
|
|
| 147 |
+ (Directory): The sandbox root directory
|
|
| 138 | 148 |
|
| 139 | 149 |
"""
|
| 140 |
- if not self._vdir:
|
|
| 141 |
- # BST_CAS_DIRECTORIES is a deliberately hidden environment variable which
|
|
| 142 |
- # can be used to switch on CAS-based directories for testing.
|
|
| 150 |
+ if self._vdir is None or self._never_cache_vdirs:
|
|
| 143 | 151 |
if 'BST_CAS_DIRECTORIES' in os.environ:
|
| 144 | 152 |
self._vdir = CasBasedDirectory(self.__context, ref=None)
|
| 145 | 153 |
else:
|
| ... | ... | @@ -264,8 +264,9 @@ setup(name='BuildStream', |
| 264 | 264 |
license='LGPL',
|
| 265 | 265 |
long_description=long_description,
|
| 266 | 266 |
long_description_content_type='text/x-rst; charset=UTF-8',
|
| 267 |
- url='https://gitlab.com/BuildStream/buildstream',
|
|
| 267 |
+ url='https://buildstream.build',
|
|
| 268 | 268 |
project_urls={
|
| 269 |
+ 'Source': 'https://gitlab.com/BuildStream/buildstream',
|
|
| 269 | 270 |
'Documentation': 'https://buildstream.gitlab.io/buildstream/',
|
| 270 | 271 |
'Tracker': 'https://gitlab.com/BuildStream/buildstream/issues',
|
| 271 | 272 |
'Mailing List': 'https://mail.gnome.org/mailman/listinfo/buildstream-list'
|
| ... | ... | @@ -228,9 +228,9 @@ def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_d |
| 228 | 228 |
directory = CasBasedDirectory(context, ref=artifact_digest)
|
| 229 | 229 |
|
| 230 | 230 |
# Push the CasBasedDirectory object
|
| 231 |
- directory_digest = cas.push_directory(project, directory)
|
|
| 231 |
+ cas.push_directory(project, directory)
|
|
| 232 | 232 |
|
| 233 |
- queue.put(directory_digest.hash)
|
|
| 233 |
+ queue.put(directory.ref.hash)
|
|
| 234 | 234 |
else:
|
| 235 | 235 |
queue.put("No remote configured")
|
| 236 | 236 |
|
| 1 |
+kind: compose
|
|
| 2 |
+ |
|
| 3 |
+build-depends:
|
|
| 4 |
+- target.bst
|
| 1 |
+kind: import
|
|
| 2 |
+description: the kind of this element must implement generate_script() method
|
|
| 3 |
+ |
|
| 4 |
+sources:
|
|
| 5 |
+- kind: local
|
|
| 6 |
+ path: files/source-bundle
|
| 1 |
+llamas
|
| 1 |
+import os
|
|
| 2 |
+import pytest
|
|
| 3 |
+from tests.testutils import cli
|
|
| 4 |
+ |
|
| 5 |
+# Project directory
|
|
| 6 |
+DATA_DIR = os.path.join(
|
|
| 7 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
| 8 |
+ "project",
|
|
| 9 |
+)
|
|
| 10 |
+ |
|
| 11 |
+ |
|
| 12 |
+def strict_args(args, strict):
|
|
| 13 |
+ if strict != "strict":
|
|
| 14 |
+ return ['--no-strict'] + args
|
|
| 15 |
+ return args
|
|
| 16 |
+ |
|
| 17 |
+ |
|
| 18 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 19 |
+@pytest.mark.parametrize("strict", ["strict", "non-strict"])
|
|
| 20 |
+def test_rebuild(datafiles, cli, strict):
|
|
| 21 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 22 |
+ checkout = os.path.join(cli.directory, 'checkout')
|
|
| 23 |
+ |
|
| 24 |
+ # First build intermediate target.bst
|
|
| 25 |
+ result = cli.run(project=project, args=strict_args(['build', 'target.bst'], strict))
|
|
| 26 |
+ result.assert_success()
|
|
| 27 |
+ |
|
| 28 |
+ # Modify base import
|
|
| 29 |
+ with open(os.path.join(project, 'files', 'dev-files', 'usr', 'include', 'new.h'), "w") as f:
|
|
| 30 |
+ f.write("#define NEW")
|
|
| 31 |
+ |
|
| 32 |
+ # Rebuild base import and build top-level rebuild-target.bst
|
|
| 33 |
+ # In non-strict mode, this does not rebuild intermediate target.bst,
|
|
| 34 |
+ # which means that a weakly cached target.bst will be staged as dependency.
|
|
| 35 |
+ result = cli.run(project=project, args=strict_args(['build', 'rebuild-target.bst'], strict))
|
|
| 36 |
+ result.assert_success()
|
| 1 |
+#
|
|
| 2 |
+# Copyright (C) 2018 Bloomberg Finance LP
|
|
| 3 |
+#
|
|
| 4 |
+# This program is free software; you can redistribute it and/or
|
|
| 5 |
+# modify it under the terms of the GNU Lesser General Public
|
|
| 6 |
+# License as published by the Free Software Foundation; either
|
|
| 7 |
+# version 2 of the License, or (at your option) any later version.
|
|
| 8 |
+#
|
|
| 9 |
+# This library is distributed in the hope that it will be useful,
|
|
| 10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
| 11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
| 12 |
+# Lesser General Public License for more details.
|
|
| 13 |
+#
|
|
| 14 |
+# You should have received a copy of the GNU Lesser General Public
|
|
| 15 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
| 16 |
+#
|
|
| 17 |
+# Authors: Chandan Singh <csingh43 bloomberg net>
|
|
| 18 |
+#
|
|
| 19 |
+ |
|
| 20 |
+import os
|
|
| 21 |
+import tarfile
|
|
| 22 |
+ |
|
| 23 |
+import pytest
|
|
| 24 |
+ |
|
| 25 |
+from tests.testutils import cli
|
|
| 26 |
+ |
|
| 27 |
+# Project directory
|
|
| 28 |
+DATA_DIR = os.path.join(
|
|
| 29 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
| 30 |
+ "project",
|
|
| 31 |
+)
|
|
| 32 |
+ |
|
| 33 |
+ |
|
| 34 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 35 |
+def test_source_bundle(cli, tmpdir, datafiles):
|
|
| 36 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 37 |
+ element_name = 'source-bundle/source-bundle-hello.bst'
|
|
| 38 |
+ normal_name = 'source-bundle-source-bundle-hello'
|
|
| 39 |
+ |
|
| 40 |
+ # Verify that we can correctly produce a source-bundle
|
|
| 41 |
+ args = ['source-bundle', element_name, '--directory', str(tmpdir)]
|
|
| 42 |
+ result = cli.run(project=project_path, args=args)
|
|
| 43 |
+ result.assert_success()
|
|
| 44 |
+ |
|
| 45 |
+ # Verify that the source-bundle contains our sources and a build script
|
|
| 46 |
+ with tarfile.open(os.path.join(str(tmpdir), '{}.tar.gz'.format(normal_name))) as bundle:
|
|
| 47 |
+ assert os.path.join(normal_name, 'source', normal_name, 'llamas.txt') in bundle.getnames()
|
|
| 48 |
+ assert os.path.join(normal_name, 'build.sh') in bundle.getnames()
|
