Tiago Gomes pushed to branch tiagogomes/issue-520 at BuildStream / buildstream
Commits:
-
6d349610
by Javier Jardón at 2018-08-07T10:53:23Z
-
493d19d2
by James Ennis at 2018-08-07T11:08:36Z
-
fc9869e2
by James Ennis at 2018-08-07T11:56:21Z
-
42aa3999
by William Salmon at 2018-08-07T13:41:02Z
-
2ceb5dec
by Will Salmon at 2018-08-07T14:46:36Z
-
eee4b674
by Jürg Billeter at 2018-08-07T15:36:35Z
-
ea27e389
by Jürg Billeter at 2018-08-07T15:36:35Z
-
fa5a59f0
by Jürg Billeter at 2018-08-07T16:48:21Z
-
ed653fbc
by Chandan Singh at 2018-08-08T00:06:28Z
-
65f382f1
by Chandan Singh at 2018-08-08T10:52:32Z
-
e9cedc01
by Tiago Gomes at 2018-08-08T14:42:29Z
9 changed files:
- .gitlab-ci.yml
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_artifactcache/casserver.py
- buildstream/_pipeline.py
- buildstream/plugins/sources/git.py
- doc/source/install_linux_distro.rst
- setup.py
- tests/sources/git.py
Changes:
... | ... | @@ -26,15 +26,6 @@ source_dist: |
26 | 26 |
- tar -ztf dist/*
|
27 | 27 |
- tarball=$(cd dist && echo $(ls *))
|
28 | 28 |
|
29 |
- # Create an installer script
|
|
30 |
- - |
|
|
31 |
- cat > dist/install.sh << EOF
|
|
32 |
- #!/bin/sh
|
|
33 |
- tar -zxf ${tarball}
|
|
34 |
- cd ${tarball%.tar.gz}
|
|
35 |
- pip3 install --no-index .
|
|
36 |
- EOF
|
|
37 |
- |
|
38 | 29 |
# unpack tarball as `dist/buildstream` directory
|
39 | 30 |
- |
|
40 | 31 |
cat > dist/unpack.sh << EOF
|
... | ... | @@ -44,7 +35,6 @@ source_dist: |
44 | 35 |
EOF
|
45 | 36 |
|
46 | 37 |
# Make our helpers executable
|
47 |
- - chmod +x dist/install.sh
|
|
48 | 38 |
- chmod +x dist/unpack.sh
|
49 | 39 |
artifacts:
|
50 | 40 |
paths:
|
... | ... | @@ -80,6 +80,8 @@ class ArtifactCache(): |
80 | 80 |
self.context = context
|
81 | 81 |
self.required_artifacts = set()
|
82 | 82 |
self.extractdir = os.path.join(context.artifactdir, 'extract')
|
83 |
+ self.tmpdir = os.path.join(context.artifactdir, 'tmp')
|
|
84 |
+ |
|
83 | 85 |
self.max_size = context.cache_quota
|
84 | 86 |
self.estimated_size = None
|
85 | 87 |
|
... | ... | @@ -89,7 +91,8 @@ class ArtifactCache(): |
89 | 91 |
self._local = False
|
90 | 92 |
self.cache_size = None
|
91 | 93 |
|
92 |
- os.makedirs(context.artifactdir, exist_ok=True)
|
|
94 |
+ os.makedirs(self.extractdir, exist_ok=True)
|
|
95 |
+ os.makedirs(self.tmpdir, exist_ok=True)
|
|
93 | 96 |
|
94 | 97 |
################################################
|
95 | 98 |
# Methods implemented on the abstract class #
|
... | ... | @@ -56,7 +56,7 @@ class CASCache(ArtifactCache): |
56 | 56 |
super().__init__(context)
|
57 | 57 |
|
58 | 58 |
self.casdir = os.path.join(context.artifactdir, 'cas')
|
59 |
- os.makedirs(os.path.join(self.casdir, 'tmp'), exist_ok=True)
|
|
59 |
+ os.makedirs(self.casdir, exist_ok=True)
|
|
60 | 60 |
|
61 | 61 |
self._enable_push = enable_push
|
62 | 62 |
|
... | ... | @@ -85,8 +85,6 @@ class CASCache(ArtifactCache): |
85 | 85 |
# artifact has already been extracted
|
86 | 86 |
return dest
|
87 | 87 |
|
88 |
- os.makedirs(self.extractdir, exist_ok=True)
|
|
89 |
- |
|
90 | 88 |
with tempfile.TemporaryDirectory(prefix='tmp', dir=self.extractdir) as tmpdir:
|
91 | 89 |
checkoutdir = os.path.join(tmpdir, ref)
|
92 | 90 |
self._checkout(checkoutdir, tree)
|
... | ... | @@ -394,7 +392,7 @@ class CASCache(ArtifactCache): |
394 | 392 |
try:
|
395 | 393 |
h = hashlib.sha256()
|
396 | 394 |
# Always write out new file to avoid corruption if input file is modified
|
397 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as out:
|
|
395 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
|
|
398 | 396 |
# Set mode bits to 0644
|
399 | 397 |
os.chmod(out.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
|
400 | 398 |
|
... | ... | @@ -764,7 +762,7 @@ class CASCache(ArtifactCache): |
764 | 762 |
# already in local cache
|
765 | 763 |
return
|
766 | 764 |
|
767 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as out:
|
|
765 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
|
|
768 | 766 |
self._fetch_blob(remote, tree, out)
|
769 | 767 |
|
770 | 768 |
directory = remote_execution_pb2.Directory()
|
... | ... | @@ -778,7 +776,7 @@ class CASCache(ArtifactCache): |
778 | 776 |
# already in local cache
|
779 | 777 |
continue
|
780 | 778 |
|
781 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as f:
|
|
779 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
|
|
782 | 780 |
self._fetch_blob(remote, filenode.digest, f)
|
783 | 781 |
|
784 | 782 |
digest = self.add_object(path=f.name)
|
... | ... | @@ -846,6 +844,9 @@ class _CASRemote(): |
846 | 844 |
|
847 | 845 |
|
848 | 846 |
def _grouper(iterable, n):
|
849 |
- # pylint: disable=stop-iteration-return
|
|
850 | 847 |
while True:
|
851 |
- yield itertools.chain([next(iterable)], itertools.islice(iterable, n - 1))
|
|
848 |
+ try:
|
|
849 |
+ current = next(iterable)
|
|
850 |
+ except StopIteration:
|
|
851 |
+ return
|
|
852 |
+ yield itertools.chain([current], itertools.islice(iterable, n - 1))
|
... | ... | @@ -161,7 +161,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer): |
161 | 161 |
offset = 0
|
162 | 162 |
finished = False
|
163 | 163 |
resource_name = None
|
164 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.cas.casdir, 'tmp')) as out:
|
|
164 |
+ with tempfile.NamedTemporaryFile(dir=self.cas.tmpdir) as out:
|
|
165 | 165 |
for request in request_iterator:
|
166 | 166 |
assert not finished
|
167 | 167 |
assert request.write_offset == offset
|
... | ... | @@ -358,10 +358,24 @@ class Pipeline(): |
358 | 358 |
inconsistent.append(element)
|
359 | 359 |
|
360 | 360 |
if inconsistent:
|
361 |
- detail = "Exact versions are missing for the following elements\n" + \
|
|
362 |
- "Try tracking these elements first with `bst track`\n\n"
|
|
361 |
+ detail = "Exact versions are missing for the following elements:\n\n"
|
|
362 |
+ |
|
363 |
+ missingTrack = 0
|
|
363 | 364 |
for element in inconsistent:
|
364 |
- detail += " " + element._get_full_name() + "\n"
|
|
365 |
+ detail += " " + element._get_full_name()
|
|
366 |
+ for source in element.sources():
|
|
367 |
+ if not source._get_consistency() and not source.get_ref():
|
|
368 |
+ if hasattr(source, 'tracking') and source.tracking is None:
|
|
369 |
+ detail += ": Source {} is missing ref and track. ".format(source._get_full_name()) + \
|
|
370 |
+ "Please specify a ref or branch/tag to track."
|
|
371 |
+ missingTrack = 1
|
|
372 |
+ |
|
373 |
+ detail += "\n"
|
|
374 |
+ |
|
375 |
+ if missingTrack:
|
|
376 |
+ detail += "\nThen track these elements with `bst track`\n"
|
|
377 |
+ else:
|
|
378 |
+ detail += "\nTry tracking these elements first with `bst track`\n"
|
|
365 | 379 |
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
|
366 | 380 |
|
367 | 381 |
#############################################################
|
... | ... | @@ -363,6 +363,12 @@ class GitSource(Source): |
363 | 363 |
|
364 | 364 |
# If self.tracking is not specified it's not an error, just silently return
|
365 | 365 |
if not self.tracking:
|
366 |
+ # Is there a better way to check if a ref is given.
|
|
367 |
+ if self.mirror.ref is None:
|
|
368 |
+ detail = 'Without a tracking branch ref can not be updated. Please ' + \
|
|
369 |
+ 'provide a ref or a track.'
|
|
370 |
+ raise SourceError("{}: No track or ref".format(self),
|
|
371 |
+ detail=detail, reason="track-attempt-no-track")
|
|
366 | 372 |
return None
|
367 | 373 |
|
368 | 374 |
with self.timed_activity("Tracking {} from {}"
|
... | ... | @@ -57,9 +57,20 @@ Install the dependencies with:: |
57 | 57 |
For the default plugins::
|
58 | 58 |
|
59 | 59 |
sudo pacman -S \
|
60 |
- bzr git lzip ostree patch python-arpy python-gobject
|
|
60 |
+ bzr git lzip ostree patch python-gobject
|
|
61 | 61 |
|
62 | 62 |
|
63 |
+The package *python-arpy* is required by the deb source plugin. This is not
|
|
64 |
+obtainable via `pacman`, you must get *python-arpy* from AUR:
|
|
65 |
+https://aur.archlinux.org/packages/python-arpy/
|
|
66 |
+ |
|
67 |
+To install::
|
|
68 |
+ |
|
69 |
+ wget https://aur.archlinux.org/cgit/aur.git/snapshot/python-arpy.tar.gz
|
|
70 |
+ tar -xvf python-arpy.tar.gz
|
|
71 |
+ cd python-arpy
|
|
72 |
+ makepkg -si
|
|
73 |
+ |
|
63 | 74 |
Debian
|
64 | 75 |
++++++
|
65 | 76 |
Install the dependencies with::
|
... | ... | @@ -218,14 +229,16 @@ Arch Linux |
218 | 229 |
~~~~~~~~~~
|
219 | 230 |
Packages for Arch exist in `AUR <https://wiki.archlinux.org/index.php/Arch_User_Repository#Installing_packages>`_.
|
220 | 231 |
Two different package versions are available:
|
232 |
+ |
|
221 | 233 |
* Latest release: `buildstream <https://aur.archlinux.org/packages/buildstream>`_
|
222 | 234 |
* Latest development snapshot: `buildstream-git <https://aur.archlinux.org/packages/buildstream-git>`_
|
223 | 235 |
|
236 |
+ |
|
224 | 237 |
Fedora
|
225 | 238 |
~~~~~~
|
226 | 239 |
|
227 | 240 |
BuildStream is not yet in the official Fedora repositories, but you can
|
228 |
-install it from a Copr:
|
|
241 |
+install it from a Copr::
|
|
229 | 242 |
|
230 | 243 |
sudo dnf copr enable bochecha/buildstream
|
231 | 244 |
sudo dnf install buildstream
|
... | ... | @@ -272,6 +272,5 @@ setup(name='BuildStream', |
272 | 272 |
'pytest-cov >= 2.5.0',
|
273 | 273 |
# Provide option to run tests in parallel, less reliable
|
274 | 274 |
'pytest-xdist',
|
275 |
- 'pytest >= 3.1.0',
|
|
276 |
- 'pylint >= 1.8 , < 2'],
|
|
275 |
+ 'pytest >= 3.1.0'],
|
|
277 | 276 |
zip_safe=False)
|
... | ... | @@ -359,3 +359,45 @@ def test_submodule_track_ignore_inconsistent(cli, tmpdir, datafiles): |
359 | 359 |
|
360 | 360 |
# Assert that we are just fine without it, and emit a warning to the user.
|
361 | 361 |
assert "Ignoring inconsistent submodule" in result.stderr
|
362 |
+ |
|
363 |
+ |
|
364 |
+@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
|
365 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
|
|
366 |
+def test_submodule_track_no_ref_or_track(cli, tmpdir, datafiles):
|
|
367 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
368 |
+ |
|
369 |
+ # Create the repo from 'repofiles' subdir
|
|
370 |
+ repo = create_repo('git', str(tmpdir))
|
|
371 |
+ ref = repo.create(os.path.join(project, 'repofiles'))
|
|
372 |
+ |
|
373 |
+ # Write out our test target
|
|
374 |
+ gitsource = repo.source_config(ref=None)
|
|
375 |
+ gitsource.pop('track')
|
|
376 |
+ element = {
|
|
377 |
+ 'kind': 'import',
|
|
378 |
+ 'sources': [
|
|
379 |
+ gitsource
|
|
380 |
+ ]
|
|
381 |
+ }
|
|
382 |
+ |
|
383 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
384 |
+ |
|
385 |
+ # Track will encounter an inconsistent submodule without any ref
|
|
386 |
+ result = cli.run(project=project, args=['track', 'target.bst'])
|
|
387 |
+ result.assert_main_error(ErrorDomain.STREAM, None)
|
|
388 |
+ result.assert_task_error(ErrorDomain.SOURCE, 'track-attempt-no-track')
|
|
389 |
+ |
|
390 |
+ # Assert that we are just fine without it, and emit a warning to the user.
|
|
391 |
+ assert "FAILURE git source at" in result.stderr
|
|
392 |
+ assert "Without a tracking branch ref can not be updated. Please " + \
|
|
393 |
+ "provide a ref or a track." in result.stderr
|
|
394 |
+ |
|
395 |
+ # Track will encounter an inconsistent submodule without any ref
|
|
396 |
+ result = cli.run(project=project, args=['build', 'target.bst'])
|
|
397 |
+ result.assert_main_error(ErrorDomain.PIPELINE, 'inconsistent-pipeline')
|
|
398 |
+ result.assert_task_error(None, None)
|
|
399 |
+ |
|
400 |
+ # Assert that we are just fine without it, and emit a warning to the user.
|
|
401 |
+ assert "Exact versions are missing for the following elements" in result.stderr
|
|
402 |
+ assert "is missing ref and track." in result.stderr
|
|
403 |
+ assert "Then track these elements with `bst track`" in result.stderr
|