Tristan Van Berkom pushed to branch tristan/detox-tests at BuildStream / buildstream
Commits:
-
17c5ca2d
by Valentin David at 2019-01-09T10:45:23Z
-
605f8d11
by Valentin David at 2019-01-09T10:45:23Z
-
65ea03ab
by Valentin David at 2019-01-09T10:45:23Z
-
ba05c4fa
by Valentin David at 2019-01-09T12:58:59Z
-
eb0dbcfc
by Valentin David at 2019-01-09T13:56:58Z
-
f7681925
by Valentin David at 2019-01-09T14:44:22Z
-
f87d1c93
by Jim MacArthur at 2019-01-09T15:01:18Z
-
ed8bc5bc
by Jim MacArthur at 2019-01-09T15:30:28Z
-
3a06278e
by Tristan Van Berkom at 2019-01-09T15:38:56Z
-
4274889d
by Tristan Van Berkom at 2019-01-09T15:38:56Z
-
b57deea9
by Tristan Van Berkom at 2019-01-09T15:38:56Z
11 changed files:
- .gitignore
- .gitlab-ci.yml
- CONTRIBUTING.rst
- buildstream/_artifactcache/cascache.py
- buildstream/_context.py
- buildstream/_project.py
- buildstream/plugins/elements/script.py
- buildstream/sandbox/_sandboxremote.py
- doc/source/format_project.rst
- doc/source/using_config.rst
- tox.ini
Changes:
... | ... | @@ -13,11 +13,12 @@ tests/**/*.pyc |
13 | 13 |
integration-cache/
|
14 | 14 |
tmp
|
15 | 15 |
.coverage
|
16 |
+.coverage-reports/
|
|
16 | 17 |
.coverage.*
|
17 | 18 |
.cache
|
18 | 19 |
.pytest_cache/
|
19 | 20 |
*.bst/
|
20 |
-.tox
|
|
21 |
+.tox/
|
|
21 | 22 |
|
22 | 23 |
# Pycache, in case buildstream is ran directly from within the source
|
23 | 24 |
# tree
|
... | ... | @@ -13,6 +13,7 @@ variables: |
13 | 13 |
PYTEST_ADDOPTS: "--color=yes"
|
14 | 14 |
INTEGRATION_CACHE: "${CI_PROJECT_DIR}/cache/integration-cache"
|
15 | 15 |
TEST_COMMAND: "tox -- --color=yes --integration"
|
16 |
+ COVERAGE_PREFIX: "${CI_JOB_NAME}."
|
|
16 | 17 |
|
17 | 18 |
|
18 | 19 |
#####################################################
|
... | ... | @@ -24,9 +25,6 @@ variables: |
24 | 25 |
.tests-template: &tests
|
25 | 26 |
stage: test
|
26 | 27 |
|
27 |
- variables:
|
|
28 |
- COVERAGE_DIR: coverage-linux
|
|
29 |
- |
|
30 | 28 |
before_script:
|
31 | 29 |
# Diagnostics
|
32 | 30 |
- mount
|
... | ... | @@ -40,14 +38,11 @@ variables: |
40 | 38 |
- su buildstream -c "${TEST_COMMAND}"
|
41 | 39 |
|
42 | 40 |
after_script:
|
43 |
- # Collect our reports
|
|
44 |
- - mkdir -p ${COVERAGE_DIR}
|
|
45 |
- - cp .coverage ${COVERAGE_DIR}/coverage."${CI_JOB_NAME}"
|
|
46 | 41 |
except:
|
47 | 42 |
- schedules
|
48 | 43 |
artifacts:
|
49 | 44 |
paths:
|
50 |
- - ${COVERAGE_DIR}
|
|
45 |
+ - .coverage-reports
|
|
51 | 46 |
|
52 | 47 |
tests-debian-9:
|
53 | 48 |
image: buildstream/testsuite-debian:9-5da27168-32c47d1c
|
... | ... | @@ -83,7 +78,6 @@ tests-unix: |
83 | 78 |
<<: *tests
|
84 | 79 |
variables:
|
85 | 80 |
BST_FORCE_BACKEND: "unix"
|
86 |
- COVERAGE_DIR: coverage-unix
|
|
87 | 81 |
|
88 | 82 |
script:
|
89 | 83 |
|
... | ... | @@ -239,22 +233,22 @@ coverage: |
239 | 233 |
stage: post
|
240 | 234 |
coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/'
|
241 | 235 |
script:
|
242 |
- - pip3 install -r requirements/requirements.txt -r requirements/dev-requirements.txt
|
|
243 |
- - pip3 install --no-index .
|
|
244 |
- - mkdir report
|
|
245 |
- - cd report
|
|
246 |
- - cp ../coverage-unix/coverage.* .
|
|
247 |
- - cp ../coverage-linux/coverage.* .
|
|
248 |
- - ls coverage.*
|
|
249 |
- - coverage combine --rcfile=../.coveragerc -a coverage.*
|
|
250 |
- - coverage report --rcfile=../.coveragerc -m
|
|
236 |
+ - cp -a .coverage-reports/ ./coverage-sources
|
|
237 |
+ - tox -e coverage
|
|
238 |
+ - cp -a .coverage-reports/ ./coverage-report
|
|
251 | 239 |
dependencies:
|
252 | 240 |
- tests-debian-9
|
253 | 241 |
- tests-fedora-27
|
254 | 242 |
- tests-fedora-28
|
243 |
+ - tests-fedora-missing-deps
|
|
244 |
+ - tests-ubuntu-18.04
|
|
255 | 245 |
- tests-unix
|
256 | 246 |
except:
|
257 | 247 |
- schedules
|
248 |
+ artifacts:
|
|
249 |
+ paths:
|
|
250 |
+ - coverage-sources/
|
|
251 |
+ - coverage-report/
|
|
258 | 252 |
|
259 | 253 |
# Deploy, only for merges which land on master branch.
|
260 | 254 |
#
|
... | ... | @@ -1498,6 +1498,10 @@ option when running tox:: |
1498 | 1498 |
|
1499 | 1499 |
tox -e py37
|
1500 | 1500 |
|
1501 |
+If you do have multiple python versions installed and would like to test against
|
|
1502 |
+multiple versions, then we recommend using `detox <https://github.com/tox-dev/detox>`_,
|
|
1503 |
+just run it with the same arguments you would give `tox`.
|
|
1504 |
+ |
|
1501 | 1505 |
Linting is performed separately from testing. In order to run the linting step which
|
1502 | 1506 |
consists of running the ``pycodestyle`` and ``pylint`` tools, run the following::
|
1503 | 1507 |
|
... | ... | @@ -1574,6 +1578,19 @@ can run ``tox`` with ``-r`` or ``--recreate`` option. |
1574 | 1578 |
./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track'
|
1575 | 1579 |
|
1576 | 1580 |
|
1581 |
+Observing coverage
|
|
1582 |
+~~~~~~~~~~~~~~~~~~
|
|
1583 |
+Once you have run the tests using `tox` (or `detox`), some coverage reports will
|
|
1584 |
+have been left behind.
|
|
1585 |
+ |
|
1586 |
+To view the coverage report of the last test run, simply run::
|
|
1587 |
+ |
|
1588 |
+ tox -e coverage
|
|
1589 |
+ |
|
1590 |
+This will collate any reports from separate python environments that may be
|
|
1591 |
+under test before displaying the combined coverage.
|
|
1592 |
+ |
|
1593 |
+ |
|
1577 | 1594 |
Adding tests
|
1578 | 1595 |
~~~~~~~~~~~~
|
1579 | 1596 |
Tests are found in the tests subdirectory, inside of which
|
... | ... | @@ -53,7 +53,7 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key |
53 | 53 |
#
|
54 | 54 |
@staticmethod
|
55 | 55 |
def _new_from_config_node(spec_node, basedir=None):
|
56 |
- _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
|
|
56 |
+ _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name'])
|
|
57 | 57 |
url = _yaml.node_get(spec_node, str, 'url')
|
58 | 58 |
push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
|
59 | 59 |
if not url:
|
... | ... | @@ -61,7 +61,7 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key |
61 | 61 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
62 | 62 |
"{}: empty artifact cache URL".format(provenance))
|
63 | 63 |
|
64 |
- instance_name = _yaml.node_get(spec_node, str, 'instance_name', default_value=None)
|
|
64 |
+ instance_name = _yaml.node_get(spec_node, str, 'instance-name', default_value=None)
|
|
65 | 65 |
|
66 | 66 |
server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
|
67 | 67 |
if server_cert and basedir:
|
... | ... | @@ -34,6 +34,7 @@ from ._artifactcache import ArtifactCache |
34 | 34 |
from ._artifactcache.cascache import CASCache
|
35 | 35 |
from ._workspaces import Workspaces, WorkspaceProjectCache, WORKSPACE_PROJECT_FILE
|
36 | 36 |
from .plugin import _plugin_lookup
|
37 |
+from .sandbox import SandboxRemote
|
|
37 | 38 |
|
38 | 39 |
|
39 | 40 |
# Context()
|
... | ... | @@ -72,6 +73,9 @@ class Context(): |
72 | 73 |
# The locations from which to push and pull prebuilt artifacts
|
73 | 74 |
self.artifact_cache_specs = None
|
74 | 75 |
|
76 |
+ # The global remote execution configuration
|
|
77 |
+ self.remote_execution_specs = None
|
|
78 |
+ |
|
75 | 79 |
# The directory to store build logs
|
76 | 80 |
self.logdir = None
|
77 | 81 |
|
... | ... | @@ -187,7 +191,7 @@ class Context(): |
187 | 191 |
_yaml.node_validate(defaults, [
|
188 | 192 |
'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
189 | 193 |
'scheduler', 'artifacts', 'logging', 'projects',
|
190 |
- 'cache', 'prompt', 'workspacedir',
|
|
194 |
+ 'cache', 'prompt', 'workspacedir', 'remote-execution'
|
|
191 | 195 |
])
|
192 | 196 |
|
193 | 197 |
for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
|
... | ... | @@ -212,6 +216,8 @@ class Context(): |
212 | 216 |
# Load artifact share configuration
|
213 | 217 |
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
|
214 | 218 |
|
219 |
+ self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
|
|
220 |
+ |
|
215 | 221 |
# Load pull build trees configuration
|
216 | 222 |
self.pull_buildtrees = _yaml.node_get(cache, bool, 'pull-buildtrees')
|
217 | 223 |
|
... | ... | @@ -271,7 +277,8 @@ class Context(): |
271 | 277 |
# Shallow validation of overrides, parts of buildstream which rely
|
272 | 278 |
# on the overrides are expected to validate elsewhere.
|
273 | 279 |
for _, overrides in _yaml.node_items(self._project_overrides):
|
274 |
- _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror'])
|
|
280 |
+ _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror',
|
|
281 |
+ 'remote-execution'])
|
|
275 | 282 |
|
276 | 283 |
profile_end(Topics.LOAD_CONTEXT, 'load')
|
277 | 284 |
|
... | ... | @@ -507,7 +507,16 @@ class Project(): |
507 | 507 |
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
|
508 | 508 |
|
509 | 509 |
# Load remote-execution configuration for this project
|
510 |
- self.remote_execution_specs = SandboxRemote.specs_from_config_node(config, self.directory)
|
|
510 |
+ project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
|
|
511 |
+ override_specs = SandboxRemote.specs_from_config_node(
|
|
512 |
+ self._context.get_overrides(self.name), self.directory)
|
|
513 |
+ |
|
514 |
+ if override_specs is not None:
|
|
515 |
+ self.remote_execution_specs = override_specs
|
|
516 |
+ elif project_specs is not None:
|
|
517 |
+ self.remote_execution_specs = project_specs
|
|
518 |
+ else:
|
|
519 |
+ self.remote_execution_specs = self._context.remote_execution_specs
|
|
511 | 520 |
|
512 | 521 |
# Load sandbox environment variables
|
513 | 522 |
self.base_environment = _yaml.node_get(config, Mapping, 'environment')
|
... | ... | @@ -42,6 +42,9 @@ import buildstream |
42 | 42 |
class ScriptElement(buildstream.ScriptElement):
|
43 | 43 |
# pylint: disable=attribute-defined-outside-init
|
44 | 44 |
|
45 |
+ # This plugin has been modified to avoid the use of Sandbox.get_directory
|
|
46 |
+ BST_VIRTUAL_DIRECTORY = True
|
|
47 |
+ |
|
45 | 48 |
def configure(self, node):
|
46 | 49 |
for n in self.node_get_member(node, list, 'layout', []):
|
47 | 50 |
dst = self.node_subst_member(n, 'destination')
|
... | ... | @@ -62,10 +62,32 @@ class SandboxRemote(Sandbox): |
62 | 62 |
self.storage_url = config.storage_service['url']
|
63 | 63 |
self.exec_url = config.exec_service['url']
|
64 | 64 |
|
65 |
+ exec_certs = {}
|
|
66 |
+ for key in ['client-cert', 'client-key', 'server-cert']:
|
|
67 |
+ if key in config.exec_service:
|
|
68 |
+ with open(config.exec_service[key], 'rb') as f:
|
|
69 |
+ exec_certs[key] = f.read()
|
|
70 |
+ |
|
71 |
+ self.exec_credentials = grpc.ssl_channel_credentials(
|
|
72 |
+ root_certificates=exec_certs.get('server-cert'),
|
|
73 |
+ private_key=exec_certs.get('client-key'),
|
|
74 |
+ certificate_chain=exec_certs.get('client-cert'))
|
|
75 |
+ |
|
76 |
+ action_certs = {}
|
|
77 |
+ for key in ['client-cert', 'client-key', 'server-cert']:
|
|
78 |
+ if key in config.action_service:
|
|
79 |
+ with open(config.action_service[key], 'rb') as f:
|
|
80 |
+ action_certs[key] = f.read()
|
|
81 |
+ |
|
65 | 82 |
if config.action_service:
|
66 | 83 |
self.action_url = config.action_service['url']
|
84 |
+ self.action_credentials = grpc.ssl_channel_credentials(
|
|
85 |
+ root_certificates=action_certs.get('server-cert'),
|
|
86 |
+ private_key=action_certs.get('client-key'),
|
|
87 |
+ certificate_chain=action_certs.get('client-cert'))
|
|
67 | 88 |
else:
|
68 | 89 |
self.action_url = None
|
90 |
+ self.action_credentials = None
|
|
69 | 91 |
|
70 | 92 |
self.server_instance = config.exec_service.get('instance', None)
|
71 | 93 |
self.storage_instance = config.storage_service.get('instance', None)
|
... | ... | @@ -81,7 +103,7 @@ class SandboxRemote(Sandbox): |
81 | 103 |
self._get_context().message(Message(None, MessageType.INFO, msg))
|
82 | 104 |
|
83 | 105 |
@staticmethod
|
84 |
- def specs_from_config_node(config_node, basedir):
|
|
106 |
+ def specs_from_config_node(config_node, basedir=None):
|
|
85 | 107 |
|
86 | 108 |
def require_node(config, keyname):
|
87 | 109 |
val = config.get(keyname)
|
... | ... | @@ -109,10 +131,10 @@ class SandboxRemote(Sandbox): |
109 | 131 |
remote_exec_storage_config = require_node(remote_config, 'storage-service')
|
110 | 132 |
remote_exec_action_config = remote_config.get('action-cache-service', {})
|
111 | 133 |
|
112 |
- _yaml.node_validate(remote_exec_service_config, ['url', 'instance'])
|
|
134 |
+ _yaml.node_validate(remote_exec_service_config, ['url', 'instance'] + tls_keys)
|
|
113 | 135 |
_yaml.node_validate(remote_exec_storage_config, ['url', 'instance'] + tls_keys)
|
114 | 136 |
if remote_exec_action_config:
|
115 |
- _yaml.node_validate(remote_exec_action_config, ['url'])
|
|
137 |
+ _yaml.node_validate(remote_exec_action_config, ['url'] + tls_keys)
|
|
116 | 138 |
else:
|
117 | 139 |
remote_config['action-service'] = None
|
118 | 140 |
|
... | ... | @@ -135,6 +157,19 @@ class SandboxRemote(Sandbox): |
135 | 157 |
"remote-execution configuration. Your config is missing '{}'."
|
136 | 158 |
.format(str(provenance), tls_keys, key))
|
137 | 159 |
|
160 |
+ def resolve_path(path):
|
|
161 |
+ if basedir and path:
|
|
162 |
+ return os.path.join(basedir, path)
|
|
163 |
+ else:
|
|
164 |
+ return path
|
|
165 |
+ |
|
166 |
+ for key in tls_keys:
|
|
167 |
+ for d in (remote_config['execution-service'],
|
|
168 |
+ remote_config['storage-service'],
|
|
169 |
+ remote_exec_action_config):
|
|
170 |
+ if key in d:
|
|
171 |
+ d[key] = resolve_path(d[key])
|
|
172 |
+ |
|
138 | 173 |
spec = RemoteExecutionSpec(remote_config['execution-service'],
|
139 | 174 |
remote_config['storage-service'],
|
140 | 175 |
remote_exec_action_config)
|
... | ... | @@ -295,6 +330,8 @@ class SandboxRemote(Sandbox): |
295 | 330 |
"for example: http://buildservice:50051.")
|
296 | 331 |
if url.scheme == 'http':
|
297 | 332 |
channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
|
333 |
+ elif url.scheme == 'https':
|
|
334 |
+ channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.exec_credentials)
|
|
298 | 335 |
else:
|
299 | 336 |
raise SandboxError("Remote execution currently only supports the 'http' protocol "
|
300 | 337 |
"and '{}' was supplied.".format(url.scheme))
|
... | ... | @@ -352,11 +389,11 @@ class SandboxRemote(Sandbox): |
352 | 389 |
if not url.port:
|
353 | 390 |
raise SandboxError("You must supply a protocol and port number in the action-cache-service url, "
|
354 | 391 |
"for example: http://buildservice:50051.")
|
355 |
- if not url.scheme == "http":
|
|
356 |
- raise SandboxError("Currently only support http for the action cache"
|
|
357 |
- "and {} was supplied".format(url.scheme))
|
|
392 |
+ if url.scheme == 'http':
|
|
393 |
+ channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
|
|
394 |
+ elif url.scheme == 'https':
|
|
395 |
+ channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.action_credentials)
|
|
358 | 396 |
|
359 |
- channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
|
|
360 | 397 |
request = remote_execution_pb2.GetActionResultRequest(action_digest=action_digest)
|
361 | 398 |
stub = remote_execution_pb2_grpc.ActionCacheStub(channel)
|
362 | 399 |
try:
|
... | ... | @@ -218,6 +218,7 @@ The use of ports are required to distinguish between pull only access and |
218 | 218 |
push/pull access. For information regarding the server/client certificates
|
219 | 219 |
and keys, please see: :ref:`Key pair for the server <server_authentication>`.
|
220 | 220 |
|
221 |
+.. _project_remote_execution:
|
|
221 | 222 |
|
222 | 223 |
Remote execution
|
223 | 224 |
~~~~~~~~~~~~~~~~
|
... | ... | @@ -243,9 +244,6 @@ using the `remote-execution` option: |
243 | 244 |
action-cache-service:
|
244 | 245 |
url: http://bar.action.com:50052
|
245 | 246 |
|
246 |
-The execution-service part of remote execution does not support encrypted
|
|
247 |
-connections yet, so the protocol must always be http.
|
|
248 |
- |
|
249 | 247 |
storage-service specifies a remote CAS store and the parameters are the
|
250 | 248 |
same as those used to specify an :ref:`artifact server <artifacts>`.
|
251 | 249 |
|
... | ... | @@ -268,6 +266,9 @@ instance names. |
268 | 266 |
|
269 | 267 |
The Remote Execution API can be found via https://github.com/bazelbuild/remote-apis.
|
270 | 268 |
|
269 |
+Remote execution configuration can be also provided in the `user
|
|
270 |
+configuration <user_config_remote_execution>`.
|
|
271 |
+ |
|
271 | 272 |
.. _project_essentials_mirrors:
|
272 | 273 |
|
273 | 274 |
Mirrors
|
... | ... | @@ -100,6 +100,54 @@ pull only access and push/pull access. For information regarding this and the |
100 | 100 |
server/client certificates and keys, please see:
|
101 | 101 |
:ref:`Key pair for the server <server_authentication>`.
|
102 | 102 |
|
103 |
+.. _user_config_remote_execution:
|
|
104 |
+ |
|
105 |
+Remote execution
|
|
106 |
+~~~~~~~~~~~~~~~~
|
|
107 |
+ |
|
108 |
+The same configuration for :ref:`remote execution <project_remote_execution>`
|
|
109 |
+in ``project.conf`` can be provided in the user configuation.
|
|
110 |
+ |
|
111 |
+There is only one remote execution configuration used per project.
|
|
112 |
+ |
|
113 |
+The project overrides will be taken in priority. The global
|
|
114 |
+configuration will be used as fallback.
|
|
115 |
+ |
|
116 |
+1. Global remote execution fallback:
|
|
117 |
+ |
|
118 |
+.. code:: yaml
|
|
119 |
+ |
|
120 |
+ remote-execution:
|
|
121 |
+ execution-service:
|
|
122 |
+ url: http://execution.fallback.example.com:50051
|
|
123 |
+ instance-name: main
|
|
124 |
+ storage-service:
|
|
125 |
+ url: https://storage.fallback.example.com:11002/
|
|
126 |
+ server-cert: /keys/server.crt
|
|
127 |
+ client-cert: /keys/client.crt
|
|
128 |
+ client-key: /keys/client.key
|
|
129 |
+ instance-name: main
|
|
130 |
+ action-cache-service:
|
|
131 |
+ url: http://action.flalback.example.com:50052
|
|
132 |
+ |
|
133 |
+2. Project override:
|
|
134 |
+ |
|
135 |
+.. code:: yaml
|
|
136 |
+ |
|
137 |
+ projects:
|
|
138 |
+ some_project:
|
|
139 |
+ remote-execution:
|
|
140 |
+ execution-service:
|
|
141 |
+ url: http://execution.some_project.example.com:50051
|
|
142 |
+ instance-name: main
|
|
143 |
+ storage-service:
|
|
144 |
+ url: https://storage.some_project.example.com:11002/
|
|
145 |
+ server-cert: /some_project_keys/server.crt
|
|
146 |
+ client-cert: /some_project_keys/client.crt
|
|
147 |
+ client-key: /some_project_keys/client.key
|
|
148 |
+ instance-name: main
|
|
149 |
+ action-cache-service:
|
|
150 |
+ url: http://action.some_project.example.com:50052
|
|
103 | 151 |
|
104 | 152 |
|
105 | 153 |
Strict build plan
|
1 |
+#
|
|
2 |
+# Tox global configuration
|
|
3 |
+#
|
|
1 | 4 |
[tox]
|
2 | 5 |
envlist = py35,py36,py37
|
3 | 6 |
skip_missing_interpreters = true
|
4 | 7 |
|
8 |
+#
|
|
9 |
+# Defaults for all environments
|
|
10 |
+#
|
|
11 |
+# Anything specified here is iherited by the sections
|
|
12 |
+#
|
|
5 | 13 |
[testenv]
|
6 |
-commands = pytest {posargs}
|
|
14 |
+commands =
|
|
15 |
+ pytest --basetemp {envtmpdir} {posargs} {toxinidir}
|
|
16 |
+ mkdir -p {toxinidir}/.coverage-reports
|
|
7 | 17 |
deps =
|
8 | 18 |
-rrequirements/requirements.txt
|
9 | 19 |
-rrequirements/dev-requirements.txt
|
... | ... | @@ -13,6 +23,32 @@ passenv = |
13 | 23 |
GI_TYPELIB_PATH
|
14 | 24 |
INTEGRATION_CACHE
|
15 | 25 |
|
26 |
+#
|
|
27 |
+# These keys are not inherited by any other sections
|
|
28 |
+#
|
|
29 |
+setenv =
|
|
30 |
+ py{35,36,37}: COVERAGE_FILE = .coverage.{env:COVERAGE_PREFIX:}{envname}
|
|
31 |
+whitelist_externals =
|
|
32 |
+ py{35,36,37}:
|
|
33 |
+ cp
|
|
34 |
+ mkdir
|
|
35 |
+ |
|
36 |
+#
|
|
37 |
+# Coverage reporting
|
|
38 |
+#
|
|
39 |
+[testenv:coverage]
|
|
40 |
+commands =
|
|
41 |
+ - coverage combine --rcfile={toxinidir}/.coveragerc {toxinidir}/.coverage-reports/
|
|
42 |
+ coverage report --rcfile={toxinidir}/.coveragerc -m
|
|
43 |
+deps =
|
|
44 |
+ -rrequirements/requirements.txt
|
|
45 |
+ -rrequirements/dev-requirements.txt
|
|
46 |
+setenv =
|
|
47 |
+ COVERAGE_FILE = {toxinidir}/.coverage-reports/.coverage
|
|
48 |
+ |
|
49 |
+#
|
|
50 |
+# Running linters
|
|
51 |
+#
|
|
16 | 52 |
[testenv:lint]
|
17 | 53 |
commands =
|
18 | 54 |
pycodestyle
|
... | ... | @@ -22,6 +58,9 @@ deps = |
22 | 58 |
-rrequirements/dev-requirements.txt
|
23 | 59 |
-rrequirements/plugin-requirements.txt
|
24 | 60 |
|
61 |
+#
|
|
62 |
+# Building documentation
|
|
63 |
+#
|
|
25 | 64 |
[testenv:docs]
|
26 | 65 |
commands =
|
27 | 66 |
make -C doc
|