richardmaw-codethink pushed to branch richardmaw/centos-oldgit-test-fixes at BuildStream / buildstream
Commits:
-
f874295f
by Tristan Van Berkom at 2019-01-18T15:59:28Z
-
42a2fe3c
by Tristan Van Berkom at 2019-01-18T15:59:28Z
-
a8713ed2
by Tristan Van Berkom at 2019-01-18T15:59:28Z
-
a895cb2a
by Tristan Van Berkom at 2019-01-18T15:59:28Z
-
4236bcc7
by Tristan Van Berkom at 2019-01-18T16:35:23Z
-
951a8df1
by Angelos Evripiotis at 2019-01-18T18:02:22Z
-
9911023f
by Angelos Evripiotis at 2019-01-18T18:56:21Z
-
c9ce89d2
by Tristan Van Berkom at 2019-01-18T19:36:26Z
-
c536ab6a
by Tristan Van Berkom at 2019-01-18T19:36:26Z
-
99699ffc
by Tristan Van Berkom at 2019-01-18T19:36:26Z
-
8ce483d4
by Tristan Van Berkom at 2019-01-18T19:36:26Z
-
73c7252d
by Tristan Van Berkom at 2019-01-18T20:57:42Z
-
adfb9291
by Tristan Van Berkom at 2019-01-18T21:23:05Z
-
d114a6bd
by Tristan Van Berkom at 2019-01-18T22:07:22Z
-
bf591ade
by Chandan Singh at 2019-01-18T22:09:04Z
-
2233a532
by Chandan Singh at 2019-01-18T22:46:41Z
-
aa3411f9
by Tristan Maat at 2019-01-18T22:47:46Z
-
fff882fe
by Tristan Maat at 2019-01-18T22:47:46Z
-
e230dedb
by Tristan Van Berkom at 2019-01-18T23:23:35Z
-
43797617
by Richard Maw at 2019-01-21T10:41:37Z
-
ce8dab0f
by Richard Maw at 2019-01-21T10:41:37Z
-
50165081
by Richard Maw at 2019-01-21T10:41:37Z
-
8677a256
by Benjamin Schubert at 2019-01-21T10:41:37Z
16 changed files:
- .gitlab-ci.yml
- buildstream/_cas/cascache.py
- buildstream/_frontend/cli.py
- buildstream/_frontend/widget.py
- buildstream/_gitsourcebase.py
- buildstream/plugins/elements/import.py
- buildstream/plugins/sources/bzr.py
- buildstream/utils.py
- tests/artifactcache/expiry.py
- tests/frontend/track.py
- tests/frontend/workspace.py
- + tests/integration/messages.py
- tests/sources/git.py
- tests/testutils/python_repo.py
- tests/testutils/runcli.py
- tests/testutils/site.py
Changes:
| ... | ... | @@ -31,6 +31,7 @@ variables: |
| 31 | 31 |
- df -h
|
| 32 | 32 |
|
| 33 | 33 |
script:
|
| 34 |
+ - mkdir -p "${INTEGRATION_CACHE}"
|
|
| 34 | 35 |
- useradd -Um buildstream
|
| 35 | 36 |
- chown -R buildstream:buildstream .
|
| 36 | 37 |
|
| ... | ... | @@ -70,6 +71,10 @@ tests-python-3.7-stretch: |
| 70 | 71 |
# some of our base dependencies declare it as their runtime dependency.
|
| 71 | 72 |
TOXENV: py37
|
| 72 | 73 |
|
| 74 |
+tests-centos-7.6:
|
|
| 75 |
+ <<: *tests
|
|
| 76 |
+ image: buildstream/testsuite-centos:7.6-5da27168-32c47d1c
|
|
| 77 |
+ |
|
| 73 | 78 |
overnight-fedora-28-aarch64:
|
| 74 | 79 |
image: buildstream/testsuite-fedora:aarch64-28-5da27168-32c47d1c
|
| 75 | 80 |
tags:
|
| ... | ... | @@ -21,7 +21,7 @@ import hashlib |
| 21 | 21 |
import itertools
|
| 22 | 22 |
import os
|
| 23 | 23 |
import stat
|
| 24 |
-import tempfile
|
|
| 24 |
+import errno
|
|
| 25 | 25 |
import uuid
|
| 26 | 26 |
import contextlib
|
| 27 | 27 |
|
| ... | ... | @@ -129,7 +129,7 @@ class CASCache(): |
| 129 | 129 |
else:
|
| 130 | 130 |
return dest
|
| 131 | 131 |
|
| 132 |
- with tempfile.TemporaryDirectory(prefix='tmp', dir=self.tmpdir) as tmpdir:
|
|
| 132 |
+ with utils._tempdir(prefix='tmp', dir=self.tmpdir) as tmpdir:
|
|
| 133 | 133 |
checkoutdir = os.path.join(tmpdir, ref)
|
| 134 | 134 |
self._checkout(checkoutdir, tree)
|
| 135 | 135 |
|
| ... | ... | @@ -374,7 +374,7 @@ class CASCache(): |
| 374 | 374 |
for chunk in iter(lambda: tmp.read(4096), b""):
|
| 375 | 375 |
h.update(chunk)
|
| 376 | 376 |
else:
|
| 377 |
- tmp = stack.enter_context(tempfile.NamedTemporaryFile(dir=self.tmpdir))
|
|
| 377 |
+ tmp = stack.enter_context(utils._tempnamedfile(dir=self.tmpdir))
|
|
| 378 | 378 |
# Set mode bits to 0644
|
| 379 | 379 |
os.chmod(tmp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
|
| 380 | 380 |
|
| ... | ... | @@ -545,11 +545,7 @@ class CASCache(): |
| 545 | 545 |
def remove(self, ref, *, defer_prune=False):
|
| 546 | 546 |
|
| 547 | 547 |
# Remove cache ref
|
| 548 |
- refpath = self._refpath(ref)
|
|
| 549 |
- if not os.path.exists(refpath):
|
|
| 550 |
- raise CASCacheError("Could not find ref '{}'".format(ref))
|
|
| 551 |
- |
|
| 552 |
- os.unlink(refpath)
|
|
| 548 |
+ self._remove_ref(ref)
|
|
| 553 | 549 |
|
| 554 | 550 |
if not defer_prune:
|
| 555 | 551 |
pruned = self.prune()
|
| ... | ... | @@ -626,6 +622,55 @@ class CASCache(): |
| 626 | 622 |
def _refpath(self, ref):
|
| 627 | 623 |
return os.path.join(self.casdir, 'refs', 'heads', ref)
|
| 628 | 624 |
|
| 625 |
+ # _remove_ref()
|
|
| 626 |
+ #
|
|
| 627 |
+ # Removes a ref.
|
|
| 628 |
+ #
|
|
| 629 |
+ # This also takes care of pruning away directories which can
|
|
| 630 |
+ # be removed after having removed the given ref.
|
|
| 631 |
+ #
|
|
| 632 |
+ # Args:
|
|
| 633 |
+ # ref (str): The ref to remove
|
|
| 634 |
+ #
|
|
| 635 |
+ # Raises:
|
|
| 636 |
+ # (CASCacheError): If the ref didnt exist, or a system error
|
|
| 637 |
+ # occurred while removing it
|
|
| 638 |
+ #
|
|
| 639 |
+ def _remove_ref(self, ref):
|
|
| 640 |
+ |
|
| 641 |
+ # Remove the ref itself
|
|
| 642 |
+ refpath = self._refpath(ref)
|
|
| 643 |
+ try:
|
|
| 644 |
+ os.unlink(refpath)
|
|
| 645 |
+ except FileNotFoundError as e:
|
|
| 646 |
+ raise CASCacheError("Could not find ref '{}'".format(ref)) from e
|
|
| 647 |
+ |
|
| 648 |
+ # Now remove any leading directories
|
|
| 649 |
+ basedir = os.path.join(self.casdir, 'refs', 'heads')
|
|
| 650 |
+ components = list(os.path.split(ref))
|
|
| 651 |
+ while components:
|
|
| 652 |
+ components.pop()
|
|
| 653 |
+ refdir = os.path.join(basedir, *components)
|
|
| 654 |
+ |
|
| 655 |
+ # Break out once we reach the base
|
|
| 656 |
+ if refdir == basedir:
|
|
| 657 |
+ break
|
|
| 658 |
+ |
|
| 659 |
+ try:
|
|
| 660 |
+ os.rmdir(refdir)
|
|
| 661 |
+ except FileNotFoundError:
|
|
| 662 |
+ # The parent directory did not exist, but it's
|
|
| 663 |
+ # parent directory might still be ready to prune
|
|
| 664 |
+ pass
|
|
| 665 |
+ except OSError as e:
|
|
| 666 |
+ if e.errno == errno.ENOTEMPTY:
|
|
| 667 |
+ # The parent directory was not empty, so we
|
|
| 668 |
+ # cannot prune directories beyond this point
|
|
| 669 |
+ break
|
|
| 670 |
+ |
|
| 671 |
+ # Something went wrong here
|
|
| 672 |
+ raise CASCacheError("System error while removing ref '{}': {}".format(ref, e)) from e
|
|
| 673 |
+ |
|
| 629 | 674 |
# _commit_directory():
|
| 630 | 675 |
#
|
| 631 | 676 |
# Adds local directory to content addressable store.
|
| ... | ... | @@ -797,7 +842,7 @@ class CASCache(): |
| 797 | 842 |
# already in local repository
|
| 798 | 843 |
return objpath
|
| 799 | 844 |
|
| 800 |
- with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
|
|
| 845 |
+ with utils._tempnamedfile(dir=self.tmpdir) as f:
|
|
| 801 | 846 |
remote._fetch_blob(digest, f)
|
| 802 | 847 |
|
| 803 | 848 |
added_digest = self.add_object(path=f.name, link_directly=True)
|
| ... | ... | @@ -807,7 +852,7 @@ class CASCache(): |
| 807 | 852 |
|
| 808 | 853 |
def _batch_download_complete(self, batch):
|
| 809 | 854 |
for digest, data in batch.send():
|
| 810 |
- with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
|
|
| 855 |
+ with utils._tempnamedfile(dir=self.tmpdir) as f:
|
|
| 811 | 856 |
f.write(data)
|
| 812 | 857 |
f.flush()
|
| 813 | 858 |
|
| ... | ... | @@ -904,7 +949,7 @@ class CASCache(): |
| 904 | 949 |
|
| 905 | 950 |
def _fetch_tree(self, remote, digest):
|
| 906 | 951 |
# download but do not store the Tree object
|
| 907 |
- with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
|
|
| 952 |
+ with utils._tempnamedfile(dir=self.tmpdir) as out:
|
|
| 908 | 953 |
remote._fetch_blob(digest, out)
|
| 909 | 954 |
|
| 910 | 955 |
tree = remote_execution_pb2.Tree()
|
| ... | ... | @@ -554,6 +554,12 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, command) |
| 554 | 554 |
element, assuming it has already been built and all required
|
| 555 | 555 |
artifacts are in the local cache.
|
| 556 | 556 |
|
| 557 |
+ Use '--' to separate a command from the options to bst,
|
|
| 558 |
+ otherwise bst may respond to them instead. e.g.
|
|
| 559 |
+ |
|
| 560 |
+ \b
|
|
| 561 |
+ bst shell example.bst -- df -h
|
|
| 562 |
+ |
|
| 557 | 563 |
Use the --build option to create a temporary sysroot for
|
| 558 | 564 |
building the element instead.
|
| 559 | 565 |
|
| ... | ... | @@ -647,8 +647,9 @@ class LogLine(Widget): |
| 647 | 647 |
abbrev = False
|
| 648 | 648 |
if message.message_type not in ERROR_MESSAGES \
|
| 649 | 649 |
and not frontend_message and n_lines > self._message_lines:
|
| 650 |
- abbrev = True
|
|
| 651 | 650 |
lines = lines[0:self._message_lines]
|
| 651 |
+ if self._message_lines > 0:
|
|
| 652 |
+ abbrev = True
|
|
| 652 | 653 |
else:
|
| 653 | 654 |
lines[n_lines - 1] = lines[n_lines - 1].rstrip('\n')
|
| 654 | 655 |
|
| ... | ... | @@ -674,7 +675,7 @@ class LogLine(Widget): |
| 674 | 675 |
if self.context is not None and not self.context.log_verbose:
|
| 675 | 676 |
text += self._indent + self._err_profile.fmt("Log file: ")
|
| 676 | 677 |
text += self._indent + self._logfile_widget.render(message) + '\n'
|
| 677 |
- else:
|
|
| 678 |
+ elif self._log_lines > 0:
|
|
| 678 | 679 |
text += self._indent + self._err_profile.fmt("Printing the last {} lines from log file:"
|
| 679 | 680 |
.format(self._log_lines)) + '\n'
|
| 680 | 681 |
text += self._indent + self._logfile_widget.render(message, abbrev=False) + '\n'
|
| ... | ... | @@ -112,7 +112,8 @@ class GitMirror(SourceFetcher): |
| 112 | 112 |
else:
|
| 113 | 113 |
remote_name = "origin"
|
| 114 | 114 |
|
| 115 |
- self.source.call([self.source.host_git, 'fetch', remote_name, '--prune', '--force', '--tags'],
|
|
| 115 |
+ self.source.call([self.source.host_git, 'fetch', remote_name, '--prune',
|
|
| 116 |
+ '+refs/heads/*:refs/heads/*', '+refs/tags/*:refs/tags/*'],
|
|
| 116 | 117 |
fail="Failed to fetch from remote git repository: {}".format(url),
|
| 117 | 118 |
fail_temporarily=True,
|
| 118 | 119 |
cwd=self.mirror)
|
| ... | ... | @@ -28,17 +28,14 @@ some configuration data. |
| 28 | 28 |
The empty configuration is as such:
|
| 29 | 29 |
.. literalinclude:: ../../../buildstream/plugins/elements/import.yaml
|
| 30 | 30 |
:language: yaml
|
| 31 |
- |
|
| 32 |
-See :ref:`built-in functionality documentation <core_buildelement_builtins>` for
|
|
| 33 |
-details on common configuration options for build elements.
|
|
| 34 | 31 |
"""
|
| 35 | 32 |
|
| 36 | 33 |
import os
|
| 37 |
-from buildstream import Element, BuildElement, ElementError
|
|
| 34 |
+from buildstream import Element, ElementError
|
|
| 38 | 35 |
|
| 39 | 36 |
|
| 40 | 37 |
# Element implementation for the 'import' kind.
|
| 41 |
-class ImportElement(BuildElement):
|
|
| 38 |
+class ImportElement(Element):
|
|
| 42 | 39 |
# pylint: disable=attribute-defined-outside-init
|
| 43 | 40 |
|
| 44 | 41 |
# This plugin has been modified to avoid the use of Sandbox.get_directory
|
| ... | ... | @@ -93,10 +90,6 @@ class ImportElement(BuildElement): |
| 93 | 90 |
# And we're done
|
| 94 | 91 |
return '/output'
|
| 95 | 92 |
|
| 96 |
- def prepare(self, sandbox):
|
|
| 97 |
- # We inherit a non-default prepare from BuildElement.
|
|
| 98 |
- Element.prepare(self, sandbox)
|
|
| 99 |
- |
|
| 100 | 93 |
def generate_script(self):
|
| 101 | 94 |
build_root = self.get_variable('build-root')
|
| 102 | 95 |
install_root = self.get_variable('install-root')
|
| ... | ... | @@ -56,6 +56,7 @@ details on common configuration options for sources. |
| 56 | 56 |
|
| 57 | 57 |
import os
|
| 58 | 58 |
import shutil
|
| 59 |
+import fcntl
|
|
| 59 | 60 |
from contextlib import contextmanager
|
| 60 | 61 |
|
| 61 | 62 |
from buildstream import Source, SourceError, Consistency
|
| ... | ... | @@ -84,10 +85,12 @@ class BzrSource(Source): |
| 84 | 85 |
if self.ref is None or self.tracking is None:
|
| 85 | 86 |
return Consistency.INCONSISTENT
|
| 86 | 87 |
|
| 87 |
- if self._check_ref():
|
|
| 88 |
- return Consistency.CACHED
|
|
| 89 |
- else:
|
|
| 90 |
- return Consistency.RESOLVED
|
|
| 88 |
+ # Lock for the _check_ref()
|
|
| 89 |
+ with self._locked():
|
|
| 90 |
+ if self._check_ref():
|
|
| 91 |
+ return Consistency.CACHED
|
|
| 92 |
+ else:
|
|
| 93 |
+ return Consistency.RESOLVED
|
|
| 91 | 94 |
|
| 92 | 95 |
def load_ref(self, node):
|
| 93 | 96 |
self.ref = self.node_get_member(node, str, 'ref', None)
|
| ... | ... | @@ -100,7 +103,7 @@ class BzrSource(Source): |
| 100 | 103 |
|
| 101 | 104 |
def track(self):
|
| 102 | 105 |
with self.timed_activity("Tracking {}".format(self.url),
|
| 103 |
- silent_nested=True):
|
|
| 106 |
+ silent_nested=True), self._locked():
|
|
| 104 | 107 |
self._ensure_mirror(skip_ref_check=True)
|
| 105 | 108 |
ret, out = self.check_output([self.host_bzr, "version-info",
|
| 106 | 109 |
"--custom", "--template={revno}",
|
| ... | ... | @@ -114,7 +117,7 @@ class BzrSource(Source): |
| 114 | 117 |
|
| 115 | 118 |
def fetch(self):
|
| 116 | 119 |
with self.timed_activity("Fetching {}".format(self.url),
|
| 117 |
- silent_nested=True):
|
|
| 120 |
+ silent_nested=True), self._locked():
|
|
| 118 | 121 |
self._ensure_mirror()
|
| 119 | 122 |
|
| 120 | 123 |
def stage(self, directory):
|
| ... | ... | @@ -141,6 +144,26 @@ class BzrSource(Source): |
| 141 | 144 |
"--directory={}".format(directory), url],
|
| 142 | 145 |
fail="Failed to switch workspace's parent branch to {}".format(url))
|
| 143 | 146 |
|
| 147 |
+ # _locked()
|
|
| 148 |
+ #
|
|
| 149 |
+ # This context manager ensures exclusive access to the
|
|
| 150 |
+ # bzr repository.
|
|
| 151 |
+ #
|
|
| 152 |
+ @contextmanager
|
|
| 153 |
+ def _locked(self):
|
|
| 154 |
+ lockdir = os.path.join(self.get_mirror_directory(), 'locks')
|
|
| 155 |
+ lockfile = os.path.join(
|
|
| 156 |
+ lockdir,
|
|
| 157 |
+ utils.url_directory_name(self.original_url) + '.lock'
|
|
| 158 |
+ )
|
|
| 159 |
+ os.makedirs(lockdir, exist_ok=True)
|
|
| 160 |
+ with open(lockfile, 'w') as lock:
|
|
| 161 |
+ fcntl.flock(lock, fcntl.LOCK_EX)
|
|
| 162 |
+ try:
|
|
| 163 |
+ yield
|
|
| 164 |
+ finally:
|
|
| 165 |
+ fcntl.flock(lock, fcntl.LOCK_UN)
|
|
| 166 |
+ |
|
| 144 | 167 |
def _check_ref(self):
|
| 145 | 168 |
# If the mirror doesnt exist yet, then we dont have the ref
|
| 146 | 169 |
if not os.path.exists(self._get_branch_dir()):
|
| ... | ... | @@ -157,83 +180,27 @@ class BzrSource(Source): |
| 157 | 180 |
return os.path.join(self.get_mirror_directory(),
|
| 158 | 181 |
utils.url_directory_name(self.original_url))
|
| 159 | 182 |
|
| 160 |
- def _atomic_replace_mirrordir(self, srcdir):
|
|
| 161 |
- """Helper function to safely replace the mirror dir"""
|
|
| 183 |
+ def _ensure_mirror(self, skip_ref_check=False):
|
|
| 184 |
+ mirror_dir = self._get_mirror_dir()
|
|
| 185 |
+ bzr_metadata_dir = os.path.join(mirror_dir, ".bzr")
|
|
| 186 |
+ if not os.path.exists(bzr_metadata_dir):
|
|
| 187 |
+ self.call([self.host_bzr, "init-repo", "--no-trees", mirror_dir],
|
|
| 188 |
+ fail="Failed to initialize bzr repository")
|
|
| 189 |
+ |
|
| 190 |
+ branch_dir = os.path.join(mirror_dir, self.tracking)
|
|
| 191 |
+ branch_url = self.url + "/" + self.tracking
|
|
| 192 |
+ if not os.path.exists(branch_dir):
|
|
| 193 |
+ # `bzr branch` the branch if it doesn't exist
|
|
| 194 |
+ # to get the upstream code
|
|
| 195 |
+ self.call([self.host_bzr, "branch", branch_url, branch_dir],
|
|
| 196 |
+ fail="Failed to branch from {} to {}".format(branch_url, branch_dir))
|
|
| 162 | 197 |
|
| 163 |
- if not os.path.exists(self._get_mirror_dir()):
|
|
| 164 |
- # Just move the srcdir to the mirror dir
|
|
| 165 |
- try:
|
|
| 166 |
- os.rename(srcdir, self._get_mirror_dir())
|
|
| 167 |
- except OSError as e:
|
|
| 168 |
- raise SourceError("{}: Failed to move srcdir '{}' to mirror dir '{}'"
|
|
| 169 |
- .format(str(self), srcdir, self._get_mirror_dir())) from e
|
|
| 170 | 198 |
else:
|
| 171 |
- # Atomically swap the backup dir.
|
|
| 172 |
- backupdir = self._get_mirror_dir() + ".bak"
|
|
| 173 |
- try:
|
|
| 174 |
- os.rename(self._get_mirror_dir(), backupdir)
|
|
| 175 |
- except OSError as e:
|
|
| 176 |
- raise SourceError("{}: Failed to move mirrordir '{}' to backup dir '{}'"
|
|
| 177 |
- .format(str(self), self._get_mirror_dir(), backupdir)) from e
|
|
| 199 |
+ # `bzr pull` the branch if it does exist
|
|
| 200 |
+ # to get any changes to the upstream code
|
|
| 201 |
+ self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
|
|
| 202 |
+ fail="Failed to pull new changes for {}".format(branch_dir))
|
|
| 178 | 203 |
|
| 179 |
- try:
|
|
| 180 |
- os.rename(srcdir, self._get_mirror_dir())
|
|
| 181 |
- except OSError as e:
|
|
| 182 |
- # Attempt to put the backup back!
|
|
| 183 |
- os.rename(backupdir, self._get_mirror_dir())
|
|
| 184 |
- raise SourceError("{}: Failed to replace bzr repo '{}' with '{}"
|
|
| 185 |
- .format(str(self), srcdir, self._get_mirror_dir())) from e
|
|
| 186 |
- finally:
|
|
| 187 |
- if os.path.exists(backupdir):
|
|
| 188 |
- shutil.rmtree(backupdir)
|
|
| 189 |
- |
|
| 190 |
- @contextmanager
|
|
| 191 |
- def _atomic_repodir(self):
|
|
| 192 |
- """Context manager for working in a copy of the bzr repository
|
|
| 193 |
- |
|
| 194 |
- Yields:
|
|
| 195 |
- (str): A path to the copy of the bzr repo
|
|
| 196 |
- |
|
| 197 |
- This should be used because bzr does not give any guarantees of
|
|
| 198 |
- atomicity, and aborting an operation at the wrong time (or
|
|
| 199 |
- accidentally running multiple concurrent operations) can leave the
|
|
| 200 |
- repo in an inconsistent state.
|
|
| 201 |
- """
|
|
| 202 |
- with self.tempdir() as repodir:
|
|
| 203 |
- mirror_dir = self._get_mirror_dir()
|
|
| 204 |
- if os.path.exists(mirror_dir):
|
|
| 205 |
- try:
|
|
| 206 |
- # shutil.copytree doesn't like it if destination exists
|
|
| 207 |
- shutil.rmtree(repodir)
|
|
| 208 |
- shutil.copytree(mirror_dir, repodir)
|
|
| 209 |
- except (shutil.Error, OSError) as e:
|
|
| 210 |
- raise SourceError("{}: Failed to copy bzr repo from '{}' to '{}'"
|
|
| 211 |
- .format(str(self), mirror_dir, repodir)) from e
|
|
| 212 |
- |
|
| 213 |
- yield repodir
|
|
| 214 |
- self._atomic_replace_mirrordir(repodir)
|
|
| 215 |
- |
|
| 216 |
- def _ensure_mirror(self, skip_ref_check=False):
|
|
| 217 |
- with self._atomic_repodir() as repodir:
|
|
| 218 |
- # Initialize repo if no metadata
|
|
| 219 |
- bzr_metadata_dir = os.path.join(repodir, ".bzr")
|
|
| 220 |
- if not os.path.exists(bzr_metadata_dir):
|
|
| 221 |
- self.call([self.host_bzr, "init-repo", "--no-trees", repodir],
|
|
| 222 |
- fail="Failed to initialize bzr repository")
|
|
| 223 |
- |
|
| 224 |
- branch_dir = os.path.join(repodir, self.tracking)
|
|
| 225 |
- branch_url = self.url + "/" + self.tracking
|
|
| 226 |
- if not os.path.exists(branch_dir):
|
|
| 227 |
- # `bzr branch` the branch if it doesn't exist
|
|
| 228 |
- # to get the upstream code
|
|
| 229 |
- self.call([self.host_bzr, "branch", branch_url, branch_dir],
|
|
| 230 |
- fail="Failed to branch from {} to {}".format(branch_url, branch_dir))
|
|
| 231 |
- |
|
| 232 |
- else:
|
|
| 233 |
- # `bzr pull` the branch if it does exist
|
|
| 234 |
- # to get any changes to the upstream code
|
|
| 235 |
- self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
|
|
| 236 |
- fail="Failed to pull new changes for {}".format(branch_dir))
|
|
| 237 | 204 |
if not skip_ref_check and not self._check_ref():
|
| 238 | 205 |
raise SourceError("Failed to ensure ref '{}' was mirrored".format(self.ref),
|
| 239 | 206 |
reason="ref-not-mirrored")
|
| ... | ... | @@ -1032,6 +1032,36 @@ def _tempdir(suffix="", prefix="tmp", dir=None): # pylint: disable=redefined-bu |
| 1032 | 1032 |
cleanup_tempdir()
|
| 1033 | 1033 |
|
| 1034 | 1034 |
|
| 1035 |
+# _tempnamedfile()
|
|
| 1036 |
+#
|
|
| 1037 |
+# A context manager for doing work on an open temporary file
|
|
| 1038 |
+# which is guaranteed to be named and have an entry in the filesystem.
|
|
| 1039 |
+#
|
|
| 1040 |
+# Args:
|
|
| 1041 |
+# dir (str): A path to a parent directory for the temporary file
|
|
| 1042 |
+# suffix (str): A suffix for the temproary file name
|
|
| 1043 |
+# prefix (str): A prefix for the temporary file name
|
|
| 1044 |
+#
|
|
| 1045 |
+# Yields:
|
|
| 1046 |
+# (str): The temporary file handle
|
|
| 1047 |
+#
|
|
| 1048 |
+# Do not use tempfile.NamedTemporaryFile() directly, as this will
|
|
| 1049 |
+# leak files on the filesystem when BuildStream exits a process
|
|
| 1050 |
+# on SIGTERM.
|
|
| 1051 |
+#
|
|
| 1052 |
+@contextmanager
|
|
| 1053 |
+def _tempnamedfile(suffix="", prefix="tmp", dir=None): # pylint: disable=redefined-builtin
|
|
| 1054 |
+ temp = None
|
|
| 1055 |
+ |
|
| 1056 |
+ def close_tempfile():
|
|
| 1057 |
+ if temp is not None:
|
|
| 1058 |
+ temp.close()
|
|
| 1059 |
+ |
|
| 1060 |
+ with _signals.terminator(close_tempfile), \
|
|
| 1061 |
+ tempfile.NamedTemporaryFile(suffix=suffix, prefix=prefix, dir=dir) as temp:
|
|
| 1062 |
+ yield temp
|
|
| 1063 |
+ |
|
| 1064 |
+ |
|
| 1035 | 1065 |
# _kill_process_tree()
|
| 1036 | 1066 |
#
|
| 1037 | 1067 |
# Brutally murder a process and all of its children
|
| ... | ... | @@ -382,6 +382,7 @@ def test_extract_expiry(cli, datafiles, tmpdir): |
| 382 | 382 |
res = cli.run(project=project, args=['checkout', 'target.bst', os.path.join(str(tmpdir), 'checkout')])
|
| 383 | 383 |
res.assert_success()
|
| 384 | 384 |
|
| 385 |
+ # Get a snapshot of the extracts in advance
|
|
| 385 | 386 |
extractdir = os.path.join(project, 'cache', 'artifacts', 'extract', 'test', 'target')
|
| 386 | 387 |
extracts = os.listdir(extractdir)
|
| 387 | 388 |
assert(len(extracts) == 1)
|
| ... | ... | @@ -395,3 +396,16 @@ def test_extract_expiry(cli, datafiles, tmpdir): |
| 395 | 396 |
|
| 396 | 397 |
# Now the extract should be removed.
|
| 397 | 398 |
assert not os.path.exists(extract)
|
| 399 |
+ |
|
| 400 |
+ # As an added bonus, let's ensure that no directories have been left behind
|
|
| 401 |
+ #
|
|
| 402 |
+ # Now we should have a directory for the cached target2.bst, which
|
|
| 403 |
+ # replaced target.bst in the cache, we should not have a directory
|
|
| 404 |
+ # for the target.bst
|
|
| 405 |
+ refsdir = os.path.join(project, 'cache', 'artifacts', 'cas', 'refs', 'heads')
|
|
| 406 |
+ refsdirtest = os.path.join(refsdir, 'test')
|
|
| 407 |
+ refsdirtarget = os.path.join(refsdirtest, 'target')
|
|
| 408 |
+ refsdirtarget2 = os.path.join(refsdirtest, 'target2')
|
|
| 409 |
+ |
|
| 410 |
+ assert os.path.isdir(refsdirtarget2)
|
|
| 411 |
+ assert not os.path.exists(refsdirtarget)
|
| ... | ... | @@ -73,14 +73,36 @@ def test_track(cli, tmpdir, datafiles, ref_storage, kind): |
| 73 | 73 |
assert not os.path.exists(os.path.join(project, 'project.refs'))
|
| 74 | 74 |
|
| 75 | 75 |
|
| 76 |
+# NOTE:
|
|
| 77 |
+#
|
|
| 78 |
+# This test checks that recursive tracking works by observing
|
|
| 79 |
+# element states after running a recursive tracking operation.
|
|
| 80 |
+#
|
|
| 81 |
+# However, this test is ALSO valuable as it stresses the source
|
|
| 82 |
+# plugins in a situation where many source plugins are operating
|
|
| 83 |
+# at once on the same backing repository.
|
|
| 84 |
+#
|
|
| 85 |
+# Do not change this test to use a separate 'Repo' per element
|
|
| 86 |
+# as that would defeat the purpose of the stress test, otherwise
|
|
| 87 |
+# please refactor that aspect into another test.
|
|
| 88 |
+#
|
|
| 76 | 89 |
@pytest.mark.datafiles(DATA_DIR)
|
| 90 |
+@pytest.mark.parametrize("amount", [(1), (10)])
|
|
| 77 | 91 |
@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
|
| 78 |
-def test_track_recurse(cli, tmpdir, datafiles, kind):
|
|
| 92 |
+def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
|
|
| 79 | 93 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 80 | 94 |
dev_files_path = os.path.join(project, 'files', 'dev-files')
|
| 81 | 95 |
element_path = os.path.join(project, 'elements')
|
| 82 |
- element_dep_name = 'track-test-dep-{}.bst'.format(kind)
|
|
| 83 |
- element_target_name = 'track-test-target-{}.bst'.format(kind)
|
|
| 96 |
+ |
|
| 97 |
+ # Try to actually launch as many fetch jobs as possible at the same time
|
|
| 98 |
+ #
|
|
| 99 |
+ # This stresses the Source plugins and helps to ensure that
|
|
| 100 |
+ # they handle concurrent access to the store correctly.
|
|
| 101 |
+ cli.configure({
|
|
| 102 |
+ 'scheduler': {
|
|
| 103 |
+ 'fetchers': amount,
|
|
| 104 |
+ }
|
|
| 105 |
+ })
|
|
| 84 | 106 |
|
| 85 | 107 |
# Create our repo object of the given source type with
|
| 86 | 108 |
# the dev files, and then collect the initial ref.
|
| ... | ... | @@ -89,18 +111,26 @@ def test_track_recurse(cli, tmpdir, datafiles, kind): |
| 89 | 111 |
ref = repo.create(dev_files_path)
|
| 90 | 112 |
|
| 91 | 113 |
# Write out our test targets
|
| 92 |
- generate_element(repo, os.path.join(element_path, element_dep_name))
|
|
| 93 |
- generate_element(repo, os.path.join(element_path, element_target_name),
|
|
| 94 |
- dep_name=element_dep_name)
|
|
| 114 |
+ element_names = []
|
|
| 115 |
+ last_element_name = None
|
|
| 116 |
+ for i in range(amount + 1):
|
|
| 117 |
+ element_name = 'track-test-{}-{}.bst'.format(kind, i + 1)
|
|
| 118 |
+ filename = os.path.join(element_path, element_name)
|
|
| 119 |
+ |
|
| 120 |
+ element_names.append(element_name)
|
|
| 121 |
+ |
|
| 122 |
+ generate_element(repo, filename, dep_name=last_element_name)
|
|
| 123 |
+ last_element_name = element_name
|
|
| 95 | 124 |
|
| 96 | 125 |
# Assert that a fetch is needed
|
| 97 |
- assert cli.get_element_state(project, element_dep_name) == 'no reference'
|
|
| 98 |
- assert cli.get_element_state(project, element_target_name) == 'no reference'
|
|
| 126 |
+ states = cli.get_element_states(project, last_element_name)
|
|
| 127 |
+ for element_name in element_names:
|
|
| 128 |
+ assert states[element_name] == 'no reference'
|
|
| 99 | 129 |
|
| 100 | 130 |
# Now first try to track it
|
| 101 | 131 |
result = cli.run(project=project, args=[
|
| 102 | 132 |
'source', 'track', '--deps', 'all',
|
| 103 |
- element_target_name])
|
|
| 133 |
+ last_element_name])
|
|
| 104 | 134 |
result.assert_success()
|
| 105 | 135 |
|
| 106 | 136 |
# And now fetch it: The Source has probably already cached the
|
| ... | ... | @@ -109,12 +139,16 @@ def test_track_recurse(cli, tmpdir, datafiles, kind): |
| 109 | 139 |
# is the job of fetch.
|
| 110 | 140 |
result = cli.run(project=project, args=[
|
| 111 | 141 |
'source', 'fetch', '--deps', 'all',
|
| 112 |
- element_target_name])
|
|
| 142 |
+ last_element_name])
|
|
| 113 | 143 |
result.assert_success()
|
| 114 | 144 |
|
| 115 |
- # Assert that the dependency is buildable and the target is waiting
|
|
| 116 |
- assert cli.get_element_state(project, element_dep_name) == 'buildable'
|
|
| 117 |
- assert cli.get_element_state(project, element_target_name) == 'waiting'
|
|
| 145 |
+ # Assert that the base is buildable and the rest are waiting
|
|
| 146 |
+ states = cli.get_element_states(project, last_element_name)
|
|
| 147 |
+ for element_name in element_names:
|
|
| 148 |
+ if element_name == element_names[0]:
|
|
| 149 |
+ assert states[element_name] == 'buildable'
|
|
| 150 |
+ else:
|
|
| 151 |
+ assert states[element_name] == 'waiting'
|
|
| 118 | 152 |
|
| 119 | 153 |
|
| 120 | 154 |
@pytest.mark.datafiles(DATA_DIR)
|
| ... | ... | @@ -204,6 +204,7 @@ def test_open_multi(cli, tmpdir, datafiles): |
| 204 | 204 |
assert not ('.bzr' in workspace_lsdir)
|
| 205 | 205 |
|
| 206 | 206 |
|
| 207 |
+@pytest.mark.skipif(os.geteuid() == 0, reason="root may have CAP_DAC_OVERRIDE and ignore permissions")
|
|
| 207 | 208 |
@pytest.mark.datafiles(DATA_DIR)
|
| 208 | 209 |
def test_open_multi_unwritable(cli, tmpdir, datafiles):
|
| 209 | 210 |
workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
| 1 |
+#
|
|
| 2 |
+# Copyright (C) 2018 Codethink Limited
|
|
| 3 |
+#
|
|
| 4 |
+# This program is free software; you can redistribute it and/or
|
|
| 5 |
+# modify it under the terms of the GNU Lesser General Public
|
|
| 6 |
+# License as published by the Free Software Foundation; either
|
|
| 7 |
+# version 2 of the License, or (at your option) any later version.
|
|
| 8 |
+#
|
|
| 9 |
+# This library is distributed in the hope that it will be useful,
|
|
| 10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
| 11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
| 12 |
+# Lesser General Public License for more details.
|
|
| 13 |
+#
|
|
| 14 |
+# You should have received a copy of the GNU Lesser General Public
|
|
| 15 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
| 16 |
+#
|
|
| 17 |
+# Authors: Tristan Maat <tristan maat codethink co uk>
|
|
| 18 |
+#
|
|
| 19 |
+ |
|
| 20 |
+import os
|
|
| 21 |
+import pytest
|
|
| 22 |
+ |
|
| 23 |
+from buildstream import _yaml
|
|
| 24 |
+from buildstream._exceptions import ErrorDomain
|
|
| 25 |
+ |
|
| 26 |
+from tests.testutils import cli_integration as cli
|
|
| 27 |
+from tests.testutils.site import HAVE_BWRAP, IS_LINUX
|
|
| 28 |
+ |
|
| 29 |
+ |
|
| 30 |
+pytestmark = pytest.mark.integration
|
|
| 31 |
+ |
|
| 32 |
+ |
|
| 33 |
+# Project directory
|
|
| 34 |
+DATA_DIR = os.path.join(
|
|
| 35 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
| 36 |
+ "project",
|
|
| 37 |
+)
|
|
| 38 |
+ |
|
| 39 |
+ |
|
| 40 |
+@pytest.mark.integration
|
|
| 41 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 42 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
| 43 |
+def test_disable_message_lines(cli, tmpdir, datafiles):
|
|
| 44 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 45 |
+ element_path = os.path.join(project, 'elements')
|
|
| 46 |
+ element_name = 'message.bst'
|
|
| 47 |
+ |
|
| 48 |
+ element = {
|
|
| 49 |
+ 'kind': 'manual',
|
|
| 50 |
+ 'depends': [{
|
|
| 51 |
+ 'filename': 'base.bst'
|
|
| 52 |
+ }],
|
|
| 53 |
+ 'config': {
|
|
| 54 |
+ 'build-commands':
|
|
| 55 |
+ ['echo "Silly message"'],
|
|
| 56 |
+ 'strip-commands': []
|
|
| 57 |
+ }
|
|
| 58 |
+ }
|
|
| 59 |
+ |
|
| 60 |
+ os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
|
|
| 61 |
+ _yaml.dump(element, os.path.join(element_path, element_name))
|
|
| 62 |
+ |
|
| 63 |
+ # First we check that we get the "Silly message"
|
|
| 64 |
+ result = cli.run(project=project, args=["build", element_name])
|
|
| 65 |
+ result.assert_success()
|
|
| 66 |
+ assert 'echo "Silly message"' in result.stderr
|
|
| 67 |
+ |
|
| 68 |
+ # Let's now build it again, but with --message-lines 0
|
|
| 69 |
+ cli.remove_artifact_from_cache(project, element_name)
|
|
| 70 |
+ result = cli.run(project=project, args=["--message-lines", "0",
|
|
| 71 |
+ "build", element_name])
|
|
| 72 |
+ result.assert_success()
|
|
| 73 |
+ assert "Message contains " not in result.stderr
|
|
| 74 |
+ |
|
| 75 |
+ |
|
| 76 |
+@pytest.mark.integration
|
|
| 77 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 78 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
| 79 |
+def test_disable_error_lines(cli, tmpdir, datafiles):
|
|
| 80 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 81 |
+ element_path = os.path.join(project, 'elements')
|
|
| 82 |
+ element_name = 'message.bst'
|
|
| 83 |
+ |
|
| 84 |
+ element = {
|
|
| 85 |
+ 'kind': 'manual',
|
|
| 86 |
+ 'depends': [{
|
|
| 87 |
+ 'filename': 'base.bst'
|
|
| 88 |
+ }],
|
|
| 89 |
+ 'config': {
|
|
| 90 |
+ 'build-commands':
|
|
| 91 |
+ ['This is a syntax error > >'],
|
|
| 92 |
+ 'strip-commands': []
|
|
| 93 |
+ }
|
|
| 94 |
+ }
|
|
| 95 |
+ |
|
| 96 |
+ os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
|
|
| 97 |
+ _yaml.dump(element, os.path.join(element_path, element_name))
|
|
| 98 |
+ |
|
| 99 |
+ # First we check that we get the syntax error
|
|
| 100 |
+ result = cli.run(project=project, args=["--error-lines", "0",
|
|
| 101 |
+ "build", element_name])
|
|
| 102 |
+ result.assert_main_error(ErrorDomain.STREAM, None)
|
|
| 103 |
+ assert "This is a syntax error" in result.stderr
|
|
| 104 |
+ |
|
| 105 |
+ # Let's now build it again, but with --error-lines 0
|
|
| 106 |
+ cli.remove_artifact_from_cache(project, element_name)
|
|
| 107 |
+ result = cli.run(project=project, args=["--error-lines", "0",
|
|
| 108 |
+ "build", element_name])
|
|
| 109 |
+ result.assert_main_error(ErrorDomain.STREAM, None)
|
|
| 110 |
+ assert "Printing the last" not in result.stderr
|
| ... | ... | @@ -30,7 +30,7 @@ from buildstream import _yaml |
| 30 | 30 |
from buildstream.plugin import CoreWarnings
|
| 31 | 31 |
|
| 32 | 32 |
from tests.testutils import cli, create_repo
|
| 33 |
-from tests.testutils.site import HAVE_GIT
|
|
| 33 |
+from tests.testutils.site import HAVE_GIT, HAVE_OLD_GIT
|
|
| 34 | 34 |
|
| 35 | 35 |
DATA_DIR = os.path.join(
|
| 36 | 36 |
os.path.dirname(os.path.realpath(__file__)),
|
| ... | ... | @@ -664,6 +664,7 @@ def test_invalid_submodule(cli, tmpdir, datafiles, fail): |
| 664 | 664 |
|
| 665 | 665 |
|
| 666 | 666 |
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
| 667 |
+@pytest.mark.skipif(HAVE_OLD_GIT, reason="old git rm does not update .gitmodules")
|
|
| 667 | 668 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
|
| 668 | 669 |
@pytest.mark.parametrize("fail", ['warn', 'error'])
|
| 669 | 670 |
def test_track_invalid_submodule(cli, tmpdir, datafiles, fail):
|
| ... | ... | @@ -772,6 +773,7 @@ def test_track_fetch(cli, tmpdir, datafiles, ref_format, tag, extra_commit): |
| 772 | 773 |
|
| 773 | 774 |
|
| 774 | 775 |
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
| 776 |
+@pytest.mark.skipif(HAVE_OLD_GIT, reason="old git describe lacks --first-parent")
|
|
| 775 | 777 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
|
| 776 | 778 |
@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
|
| 777 | 779 |
@pytest.mark.parametrize("tag_type", [('annotated'), ('lightweight')])
|
| 1 |
-from setuptools.sandbox import run_setup
|
|
| 2 | 1 |
import os
|
| 3 | 2 |
import pytest
|
| 4 | 3 |
import re
|
| 5 | 4 |
import shutil
|
| 5 |
+import subprocess
|
|
| 6 | 6 |
|
| 7 | 7 |
|
| 8 | 8 |
SETUP_TEMPLATE = '''\
|
| ... | ... | @@ -88,7 +88,9 @@ def generate_pip_package(tmpdir, pypi, name, version='0.1'): |
| 88 | 88 |
f.write(INIT_TEMPLATE.format(name=name))
|
| 89 | 89 |
os.chmod(main_file, 0o644)
|
| 90 | 90 |
|
| 91 |
- run_setup(setup_file, ['sdist'])
|
|
| 91 |
+ # Run sdist with a fresh process
|
|
| 92 |
+ p = subprocess.run(['python3', 'setup.py', 'sdist'], cwd=tmpdir)
|
|
| 93 |
+ assert p.returncode == 0
|
|
| 92 | 94 |
|
| 93 | 95 |
# create directory for this package in pypi resulting in a directory
|
| 94 | 96 |
# tree resembling the following structure:
|
| ... | ... | @@ -245,8 +245,14 @@ class Cli(): |
| 245 | 245 |
|
| 246 | 246 |
def remove_artifact_from_cache(self, project, element_name,
|
| 247 | 247 |
*, cache_dir=None):
|
| 248 |
+ # Read configuration to figure out where artifacts are stored
|
|
| 248 | 249 |
if not cache_dir:
|
| 249 |
- cache_dir = os.path.join(project, 'cache', 'artifacts')
|
|
| 250 |
+ default = os.path.join(project, 'cache', 'artifacts')
|
|
| 251 |
+ |
|
| 252 |
+ if self.config is not None:
|
|
| 253 |
+ cache_dir = self.config.get('artifactdir', default)
|
|
| 254 |
+ else:
|
|
| 255 |
+ cache_dir = default
|
|
| 250 | 256 |
|
| 251 | 257 |
cache_dir = os.path.join(cache_dir, 'cas', 'refs', 'heads')
|
| 252 | 258 |
|
| ... | ... | @@ -375,6 +381,9 @@ class Cli(): |
| 375 | 381 |
# Fetch an element state by name by
|
| 376 | 382 |
# invoking bst show on the project with the CLI
|
| 377 | 383 |
#
|
| 384 |
+ # If you need to get the states of multiple elements,
|
|
| 385 |
+ # then use get_element_states(s) instead.
|
|
| 386 |
+ #
|
|
| 378 | 387 |
def get_element_state(self, project, element_name):
|
| 379 | 388 |
result = self.run(project=project, silent=True, args=[
|
| 380 | 389 |
'show',
|
| ... | ... | @@ -385,6 +394,25 @@ class Cli(): |
| 385 | 394 |
result.assert_success()
|
| 386 | 395 |
return result.output.strip()
|
| 387 | 396 |
|
| 397 |
+ # Fetch the states of elements for a given target / deps
|
|
| 398 |
+ #
|
|
| 399 |
+ # Returns a dictionary with the element names as keys
|
|
| 400 |
+ #
|
|
| 401 |
+ def get_element_states(self, project, target, deps='all'):
|
|
| 402 |
+ result = self.run(project=project, silent=True, args=[
|
|
| 403 |
+ 'show',
|
|
| 404 |
+ '--deps', deps,
|
|
| 405 |
+ '--format', '%{name}||%{state}',
|
|
| 406 |
+ target
|
|
| 407 |
+ ])
|
|
| 408 |
+ result.assert_success()
|
|
| 409 |
+ lines = result.output.splitlines()
|
|
| 410 |
+ states = {}
|
|
| 411 |
+ for line in lines:
|
|
| 412 |
+ split = line.split(sep='||')
|
|
| 413 |
+ states[split[0]] = split[1]
|
|
| 414 |
+ return states
|
|
| 415 |
+ |
|
| 388 | 416 |
# Fetch an element's cache key by invoking bst show
|
| 389 | 417 |
# on the project with the CLI
|
| 390 | 418 |
#
|
| ... | ... | @@ -2,6 +2,7 @@ |
| 2 | 2 |
# so we dont have to repeat this everywhere
|
| 3 | 3 |
#
|
| 4 | 4 |
import os
|
| 5 |
+import subprocess
|
|
| 5 | 6 |
import sys
|
| 6 | 7 |
|
| 7 | 8 |
from buildstream import _site, utils, ProgramNotFoundError
|
| ... | ... | @@ -16,8 +17,12 @@ except ProgramNotFoundError: |
| 16 | 17 |
try:
|
| 17 | 18 |
utils.get_host_tool('git')
|
| 18 | 19 |
HAVE_GIT = True
|
| 20 |
+ out = str(subprocess.check_output(['git', '--version']), "utf-8")
|
|
| 21 |
+ version = tuple(int(x) for x in out.split(' ', 2)[2].split('.'))
|
|
| 22 |
+ HAVE_OLD_GIT = version < (1, 8, 5)
|
|
| 19 | 23 |
except ProgramNotFoundError:
|
| 20 | 24 |
HAVE_GIT = False
|
| 25 |
+ HAVE_OLD_GIT = False
|
|
| 21 | 26 |
|
| 22 | 27 |
try:
|
| 23 | 28 |
utils.get_host_tool('ostree')
|
