Benjamin Schubert pushed to branch bschubert/remove-pytest-runner at BuildStream / buildstream
Commits:
-
1c24979a
by Angelos Evripiotis at 2018-10-19T21:17:01Z
-
43a2eee3
by Angelos Evripiotis at 2018-10-19T21:48:59Z
-
12719f0d
by Jürg Billeter at 2018-10-22T17:05:41Z
-
a7a28d14
by Jürg Billeter at 2018-10-22T17:05:41Z
-
be29e0f5
by Jürg Billeter at 2018-10-22T17:53:26Z
-
b74aca1a
by Jürg Billeter at 2018-10-23T09:22:19Z
-
c7dda150
by Jürg Billeter at 2018-10-23T09:48:00Z
-
74c115b9
by Angelos Evripiotis at 2018-10-23T10:07:31Z
-
ecb58b42
by Phil Dawson at 2018-10-23T10:33:47Z
-
aa0cbf5d
by Martin Blanchard at 2018-10-23T10:54:40Z
-
552f5fc6
by Jim MacArthur at 2018-10-23T11:19:48Z
-
f2f81870
by Benjamin Schubert at 2018-10-25T08:54:07Z
18 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_scheduler/queues/queue.py
- buildstream/_yaml.py
- buildstream/buildelement.py
- buildstream/plugin.py
- buildstream/plugins/elements/autotools.yaml
- buildstream/plugins/elements/cmake.yaml
- buildstream/plugins/elements/make.yaml
- buildstream/plugins/elements/manual.yaml
- buildstream/plugins/elements/meson.yaml
- buildstream/plugins/elements/qmake.yaml
- buildstream/plugins/sources/deb.py
- buildstream/plugins/sources/git.py
- buildstream/plugins/sources/tar.py
- buildstream/sandbox/_sandboxremote.py
- setup.py
- tests/testutils/artifactshare.py
Changes:
... | ... | @@ -228,7 +228,7 @@ class ArtifactCache(): |
228 | 228 |
self._required_elements.update(elements)
|
229 | 229 |
|
230 | 230 |
# For the cache keys which were resolved so far, we bump
|
231 |
- # the atime of them.
|
|
231 |
+ # the mtime of them.
|
|
232 | 232 |
#
|
233 | 233 |
# This is just in case we have concurrent instances of
|
234 | 234 |
# BuildStream running with the same artifact cache, it will
|
... | ... | @@ -240,7 +240,7 @@ class ArtifactCache(): |
240 | 240 |
for key in (strong_key, weak_key):
|
241 | 241 |
if key:
|
242 | 242 |
try:
|
243 |
- self.update_atime(key)
|
|
243 |
+ self.update_mtime(element, key)
|
|
244 | 244 |
except ArtifactError:
|
245 | 245 |
pass
|
246 | 246 |
|
... | ... | @@ -391,15 +391,16 @@ class ArtifactCache(): |
391 | 391 |
def preflight(self):
|
392 | 392 |
pass
|
393 | 393 |
|
394 |
- # update_atime()
|
|
394 |
+ # update_mtime()
|
|
395 | 395 |
#
|
396 |
- # Update the atime of an artifact.
|
|
396 |
+ # Update the mtime of an artifact.
|
|
397 | 397 |
#
|
398 | 398 |
# Args:
|
399 |
+ # element (Element): The Element to update
|
|
399 | 400 |
# key (str): The key of the artifact.
|
400 | 401 |
#
|
401 |
- def update_atime(self, key):
|
|
402 |
- raise ImplError("Cache '{kind}' does not implement contains()"
|
|
402 |
+ def update_mtime(self, element, key):
|
|
403 |
+ raise ImplError("Cache '{kind}' does not implement update_mtime()"
|
|
403 | 404 |
.format(kind=type(self).__name__))
|
404 | 405 |
|
405 | 406 |
# initialize_remotes():
|
... | ... | @@ -538,8 +538,9 @@ class CASCache(ArtifactCache): |
538 | 538 |
except FileNotFoundError as e:
|
539 | 539 |
raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
|
540 | 540 |
|
541 |
- def update_atime(self, ref):
|
|
541 |
+ def update_mtime(self, element, key):
|
|
542 | 542 |
try:
|
543 |
+ ref = self.get_artifact_fullname(element, key)
|
|
543 | 544 |
os.utime(self._refpath(ref))
|
544 | 545 |
except FileNotFoundError as e:
|
545 | 546 |
raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
|
... | ... | @@ -208,7 +208,7 @@ class Queue(): |
208 | 208 |
# This will have different results for elements depending
|
209 | 209 |
# on the Queue.status() implementation.
|
210 | 210 |
#
|
211 |
- # o Elements which are QueueStatus.WAIT will not be effected
|
|
211 |
+ # o Elements which are QueueStatus.WAIT will not be affected
|
|
212 | 212 |
#
|
213 | 213 |
# o Elements which are QueueStatus.SKIP will move directly
|
214 | 214 |
# to the dequeue pool
|
... | ... | @@ -972,7 +972,7 @@ def node_validate(node, valid_keys): |
972 | 972 |
#
|
973 | 973 |
# The purpose of this is to create a virtual copy-on-write
|
974 | 974 |
# copy of a dictionary, so that mutating it in any way does
|
975 |
-# not effect the underlying dictionaries.
|
|
975 |
+# not affect the underlying dictionaries.
|
|
976 | 976 |
#
|
977 | 977 |
# collections.ChainMap covers this already mostly, but fails
|
978 | 978 |
# to record internal state so as to hide keys which have been
|
... | ... | @@ -176,7 +176,7 @@ class BuildElement(Element): |
176 | 176 |
|
177 | 177 |
# Specifying notparallel for a given element effects the
|
178 | 178 |
# cache key, while having the side effect of setting max-jobs to 1,
|
179 |
- # which is normally automatically resolved and does not effect
|
|
179 |
+ # which is normally automatically resolved and does not affect
|
|
180 | 180 |
# the cache key.
|
181 | 181 |
if self.get_variable('notparallel'):
|
182 | 182 |
dictionary['notparallel'] = True
|
... | ... | @@ -266,7 +266,7 @@ class Plugin(): |
266 | 266 |
such as an sha256 sum of a tarball content.
|
267 | 267 |
|
268 | 268 |
Elements and Sources should implement this by collecting any configurations
|
269 |
- which could possibly effect the output and return a dictionary of these settings.
|
|
269 |
+ which could possibly affect the output and return a dictionary of these settings.
|
|
270 | 270 |
|
271 | 271 |
For Sources, this is guaranteed to only be called if
|
272 | 272 |
:func:`Source.get_consistency() <buildstream.source.Source.get_consistency>`
|
... | ... | @@ -123,7 +123,7 @@ environment: |
123 | 123 |
V: 1
|
124 | 124 |
|
125 | 125 |
# And dont consider MAKEFLAGS or V as something which may
|
126 |
-# effect build output.
|
|
126 |
+# affect build output.
|
|
127 | 127 |
environment-nocache:
|
128 | 128 |
- MAKEFLAGS
|
129 | 129 |
- V
|
... | ... | @@ -66,7 +66,7 @@ environment: |
66 | 66 |
V: 1
|
67 | 67 |
|
68 | 68 |
# And dont consider JOBS or V as something which may
|
69 |
-# effect build output.
|
|
69 |
+# affect build output.
|
|
70 | 70 |
environment-nocache:
|
71 | 71 |
- JOBS
|
72 | 72 |
- V
|
... | ... | @@ -36,7 +36,7 @@ environment: |
36 | 36 |
V: 1
|
37 | 37 |
|
38 | 38 |
# And dont consider MAKEFLAGS or V as something which may
|
39 |
-# effect build output.
|
|
39 |
+# affect build output.
|
|
40 | 40 |
environment-nocache:
|
41 | 41 |
- MAKEFLAGS
|
42 | 42 |
- V
|
... | ... | @@ -35,7 +35,7 @@ environment: |
35 | 35 |
V: 1
|
36 | 36 |
|
37 | 37 |
# And dont consider MAKEFLAGS or V as something which may
|
38 |
-# effect build output.
|
|
38 |
+# affect build output.
|
|
39 | 39 |
environment-nocache:
|
40 | 40 |
- MAKEFLAGS
|
41 | 41 |
- V
|
... | ... | @@ -74,6 +74,6 @@ environment: |
74 | 74 |
%{max-jobs}
|
75 | 75 |
|
76 | 76 |
# And dont consider NINJAJOBS as something which may
|
77 |
-# effect build output.
|
|
77 |
+# affect build output.
|
|
78 | 78 |
environment-nocache:
|
79 | 79 |
- NINJAJOBS
|
... | ... | @@ -44,7 +44,7 @@ environment: |
44 | 44 |
V: 1
|
45 | 45 |
|
46 | 46 |
# And dont consider MAKEFLAGS or V as something which may
|
47 |
-# effect build output.
|
|
47 |
+# affect build output.
|
|
48 | 48 |
environment-nocache:
|
49 | 49 |
- MAKEFLAGS
|
50 | 50 |
- V
|
... | ... | @@ -50,7 +50,7 @@ deb - stage files from .deb packages |
50 | 50 |
"""
|
51 | 51 |
|
52 | 52 |
import tarfile
|
53 |
-from contextlib import contextmanager, ExitStack
|
|
53 |
+from contextlib import contextmanager
|
|
54 | 54 |
import arpy # pylint: disable=import-error
|
55 | 55 |
|
56 | 56 |
from .tar import TarSource
|
... | ... | @@ -69,8 +69,7 @@ class DebSource(TarSource): |
69 | 69 |
|
70 | 70 |
@contextmanager
|
71 | 71 |
def _get_tar(self):
|
72 |
- with ExitStack() as context:
|
|
73 |
- deb_file = context.enter_context(open(self._get_mirror_file(), 'rb'))
|
|
72 |
+ with open(self._get_mirror_file(), 'rb') as deb_file:
|
|
74 | 73 |
arpy_archive = arpy.Archive(fileobj=deb_file)
|
75 | 74 |
arpy_archive.read_all_headers()
|
76 | 75 |
data_tar_arpy = [v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k][0]
|
... | ... | @@ -415,7 +415,7 @@ class GitSource(Source): |
415 | 415 |
def get_unique_key(self):
|
416 | 416 |
# Here we want to encode the local name of the repository and
|
417 | 417 |
# the ref, if the user changes the alias to fetch the same sources
|
418 |
- # from another location, it should not effect the cache key.
|
|
418 |
+ # from another location, it should not affect the cache key.
|
|
419 | 419 |
key = [self.original_url, self.mirror.ref]
|
420 | 420 |
|
421 | 421 |
# Only modify the cache key with checkout_submodules if it's something
|
... | ... | @@ -57,7 +57,7 @@ tar - stage files from tar archives |
57 | 57 |
|
58 | 58 |
import os
|
59 | 59 |
import tarfile
|
60 |
-from contextlib import contextmanager, ExitStack
|
|
60 |
+from contextlib import contextmanager
|
|
61 | 61 |
from tempfile import TemporaryFile
|
62 | 62 |
|
63 | 63 |
from buildstream import SourceError
|
... | ... | @@ -88,8 +88,7 @@ class TarSource(DownloadableFileSource): |
88 | 88 |
def _run_lzip(self):
|
89 | 89 |
assert self.host_lzip
|
90 | 90 |
with TemporaryFile() as lzip_stdout:
|
91 |
- with ExitStack() as context:
|
|
92 |
- lzip_file = context.enter_context(open(self._get_mirror_file(), 'r'))
|
|
91 |
+ with open(self._get_mirror_file(), 'r') as lzip_file:
|
|
93 | 92 |
self.call([self.host_lzip, '-d'],
|
94 | 93 |
stdin=lzip_file,
|
95 | 94 |
stdout=lzip_stdout)
|
... | ... | @@ -76,8 +76,7 @@ class SandboxRemote(Sandbox): |
76 | 76 |
# Upload the Command message to the remote CAS server
|
77 | 77 |
command_digest = cascache.push_message(self._get_project(), remote_command)
|
78 | 78 |
if not command_digest or not cascache.verify_digest_pushed(self._get_project(), command_digest):
|
79 |
- # Command push failed
|
|
80 |
- return None
|
|
79 |
+ raise SandboxError("Failed pushing build command to remote CAS.")
|
|
81 | 80 |
|
82 | 81 |
# Create and send the action.
|
83 | 82 |
action = remote_execution_pb2.Action(command_digest=command_digest,
|
... | ... | @@ -88,27 +87,57 @@ class SandboxRemote(Sandbox): |
88 | 87 |
# Upload the Action message to the remote CAS server
|
89 | 88 |
action_digest = cascache.push_message(self._get_project(), action)
|
90 | 89 |
if not action_digest or not cascache.verify_digest_pushed(self._get_project(), action_digest):
|
91 |
- # Action push failed
|
|
92 |
- return None
|
|
90 |
+ raise SandboxError("Failed pushing build action to remote CAS.")
|
|
93 | 91 |
|
94 | 92 |
# Next, try to create a communication channel to the BuildGrid server.
|
95 | 93 |
channel = grpc.insecure_channel(self.server_url)
|
96 | 94 |
stub = remote_execution_pb2_grpc.ExecutionStub(channel)
|
97 | 95 |
request = remote_execution_pb2.ExecuteRequest(action_digest=action_digest,
|
98 | 96 |
skip_cache_lookup=False)
|
99 |
- try:
|
|
100 |
- operation_iterator = stub.Execute(request)
|
|
101 |
- except grpc.RpcError:
|
|
102 |
- return None
|
|
97 |
+ |
|
98 |
+ def __run_remote_command(stub, execute_request=None, running_operation=None):
|
|
99 |
+ try:
|
|
100 |
+ last_operation = None
|
|
101 |
+ if execute_request is not None:
|
|
102 |
+ operation_iterator = stub.Execute(execute_request)
|
|
103 |
+ else:
|
|
104 |
+ request = remote_execution_pb2.WaitExecutionRequest(name=running_operation.name)
|
|
105 |
+ operation_iterator = stub.WaitExecution(request)
|
|
106 |
+ |
|
107 |
+ for operation in operation_iterator:
|
|
108 |
+ if operation.done:
|
|
109 |
+ return operation
|
|
110 |
+ else:
|
|
111 |
+ last_operation = operation
|
|
112 |
+ except grpc.RpcError as e:
|
|
113 |
+ status_code = e.code()
|
|
114 |
+ if status_code == grpc.StatusCode.UNAVAILABLE:
|
|
115 |
+ raise SandboxError("Failed contacting remote execution server at {}."
|
|
116 |
+ .format(self.server_url))
|
|
117 |
+ |
|
118 |
+ elif status_code in (grpc.StatusCode.INVALID_ARGUMENT,
|
|
119 |
+ grpc.StatusCode.FAILED_PRECONDITION,
|
|
120 |
+ grpc.StatusCode.RESOURCE_EXHAUSTED,
|
|
121 |
+ grpc.StatusCode.INTERNAL,
|
|
122 |
+ grpc.StatusCode.DEADLINE_EXCEEDED):
|
|
123 |
+ raise SandboxError("{} ({}).".format(e.details(), status_code.name))
|
|
124 |
+ |
|
125 |
+ elif running_operation and status_code == grpc.StatusCode.UNIMPLEMENTED:
|
|
126 |
+ raise SandboxError("Failed trying to recover from connection loss: "
|
|
127 |
+ "server does not support operation status polling recovery.")
|
|
128 |
+ |
|
129 |
+ return last_operation
|
|
103 | 130 |
|
104 | 131 |
operation = None
|
105 | 132 |
with self._get_context().timed_activity("Waiting for the remote build to complete"):
|
106 |
- # It is advantageous to check operation_iterator.code() is grpc.StatusCode.OK here,
|
|
107 |
- # which will check the server is actually contactable. However, calling it when the
|
|
108 |
- # server is available seems to cause .code() to hang forever.
|
|
109 |
- for operation in operation_iterator:
|
|
110 |
- if operation.done:
|
|
111 |
- break
|
|
133 |
+ operation = __run_remote_command(stub, execute_request=request)
|
|
134 |
+ if operation is None:
|
|
135 |
+ return None
|
|
136 |
+ elif operation.done:
|
|
137 |
+ return operation
|
|
138 |
+ |
|
139 |
+ while operation is not None and not operation.done:
|
|
140 |
+ operation = __run_remote_command(stub, running_operation=operation)
|
|
112 | 141 |
|
113 | 142 |
return operation
|
114 | 143 |
|
... | ... | @@ -192,7 +221,6 @@ class SandboxRemote(Sandbox): |
192 | 221 |
|
193 | 222 |
if operation is None:
|
194 | 223 |
# Failure of remote execution, usually due to an error in BuildStream
|
195 |
- # NB This error could be raised in __run_remote_command
|
|
196 | 224 |
raise SandboxError("No response returned from server")
|
197 | 225 |
|
198 | 226 |
assert not operation.HasField('error') and operation.HasField('response')
|
... | ... | @@ -18,6 +18,8 @@ |
18 | 18 |
# Authors:
|
19 | 19 |
# Tristan Van Berkom <tristan vanberkom codethink co uk>
|
20 | 20 |
|
21 |
+from distutils import log
|
|
22 |
+from distutils.errors import DistutilsError
|
|
21 | 23 |
import os
|
22 | 24 |
import re
|
23 | 25 |
import shutil
|
... | ... | @@ -39,6 +41,7 @@ if sys.version_info[0] != REQUIRED_PYTHON_MAJOR or sys.version_info[1] < REQUIRE |
39 | 41 |
try:
|
40 | 42 |
from setuptools import setup, find_packages, Command
|
41 | 43 |
from setuptools.command.easy_install import ScriptWriter
|
44 |
+ from setuptools.command.test import test as TestCommand
|
|
42 | 45 |
except ImportError:
|
43 | 46 |
print("BuildStream requires setuptools in order to build. Install it using"
|
44 | 47 |
" your package manager (usually python3-setuptools) or via pip (pip3"
|
... | ... | @@ -219,9 +222,55 @@ class BuildGRPC(Command): |
219 | 222 |
f.write(code)
|
220 | 223 |
|
221 | 224 |
|
225 |
+#####################################################
|
|
226 |
+# Pytest command #
|
|
227 |
+#####################################################
|
|
228 |
+class PyTest(TestCommand):
|
|
229 |
+ """Defines a pytest command class to run tests from setup.py"""
|
|
230 |
+ |
|
231 |
+ user_options = TestCommand.user_options + [
|
|
232 |
+ ("addopts=", None, "Arguments to pass to pytest"),
|
|
233 |
+ ('index-url=''Tests failed', log.ERROR)
|
|
261 |
+ raise DistutilsError('Tests failed')
|
|
262 |
+ |
|
263 |
+ @property
|
|
264 |
+ def _argv(self):
|
|
265 |
+ import shlex
|
|
266 |
+ |
|
267 |
+ return ["pytest"] + shlex.split(self.addopts)
|
|
268 |
+ |
|
269 |
+ |
|
222 | 270 |
def get_cmdclass():
|
223 | 271 |
cmdclass = {
|
224 | 272 |
'build_grpc': BuildGRPC,
|
273 |
+ 'pytest': PyTest,
|
|
225 | 274 |
}
|
226 | 275 |
cmdclass.update(versioneer.get_cmdclass())
|
227 | 276 |
return cmdclass
|
... | ... | @@ -305,6 +354,5 @@ setup(name='BuildStream', |
305 | 354 |
'grpcio >= 1.10',
|
306 | 355 |
],
|
307 | 356 |
entry_points=bst_install_entry_points,
|
308 |
- setup_requires=['pytest-runner'],
|
|
309 | 357 |
tests_require=dev_requires,
|
310 | 358 |
zip_safe=False)
|
... | ... | @@ -122,9 +122,8 @@ class ArtifactShare(): |
122 | 122 |
# same algo for creating an artifact reference
|
123 | 123 |
#
|
124 | 124 |
|
125 |
- # Chop off the .bst suffix first
|
|
126 |
- assert element_name.endswith('.bst')
|
|
127 |
- element_name = element_name[:-4]
|
|
125 |
+ # Replace path separator and chop off the .bst suffix
|
|
126 |
+ element_name = os.path.splitext(element_name.replace(os.sep, '-'))[0]
|
|
128 | 127 |
|
129 | 128 |
valid_chars = string.digits + string.ascii_letters + '-._'
|
130 | 129 |
element_name = ''.join([
|