Daniel pushed to branch danielsilverstone-ct/fix-lint-issues at BuildStream / buildstream
Commits:
-
37e32bbe
by Daniel Silverstone at 2018-10-24T08:27:58Z
-
a96175cd
by Daniel Silverstone at 2018-10-24T08:28:04Z
-
bdc99787
by Daniel Silverstone at 2018-10-24T08:28:04Z
-
a5cf9781
by Daniel Silverstone at 2018-10-24T08:28:04Z
-
460bde90
by Daniel Silverstone at 2018-10-24T08:28:31Z
-
1de42941
by Daniel Silverstone at 2018-10-24T08:28:35Z
-
5249fcea
by Daniel Silverstone at 2018-10-24T08:28:59Z
-
8e6aa97c
by Daniel Silverstone at 2018-10-24T08:29:03Z
-
d67252ae
by Daniel Silverstone at 2018-10-24T08:30:05Z
-
ca589f81
by Daniel Silverstone at 2018-10-24T08:30:08Z
-
d839b6e9
by Daniel Silverstone at 2018-10-24T08:30:08Z
-
04e45f22
by Daniel Silverstone at 2018-10-24T08:30:29Z
-
45a66c11
by Daniel Silverstone at 2018-10-24T08:30:33Z
-
de1e0a52
by Daniel Silverstone at 2018-10-24T08:30:33Z
-
a3271709
by Daniel Silverstone at 2018-10-24T08:31:00Z
8 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_yaml.py
- buildstream/_yamlcache.py
- buildstream/buildelement.py
- buildstream/element.py
- buildstream/plugin.py
- buildstream/source.py
- buildstream/utils.py
Changes:
... | ... | @@ -156,7 +156,7 @@ class ArtifactCache(): |
156 | 156 |
def setup_remotes(self, *, use_config=False, remote_url=None):
|
157 | 157 |
|
158 | 158 |
# Ensure we do not double-initialise since this can be expensive
|
159 |
- assert(not self._remotes_setup)
|
|
159 |
+ assert not self._remotes_setup
|
|
160 | 160 |
self._remotes_setup = True
|
161 | 161 |
|
162 | 162 |
# Initialize remote artifact caches. We allow the commandline to override
|
... | ... | @@ -252,7 +252,7 @@ class ArtifactCache(): |
252 | 252 |
# (int): The size of the cache after having cleaned up
|
253 | 253 |
#
|
254 | 254 |
def clean(self):
|
255 |
- artifacts = self.list_artifacts()
|
|
255 |
+ artifacts = self.list_artifacts() # pylint: disable=assignment-from-no-return
|
|
256 | 256 |
|
257 | 257 |
# Build a set of the cache keys which are required
|
258 | 258 |
# based on the required elements at cleanup time
|
... | ... | @@ -294,7 +294,7 @@ class ArtifactCache(): |
294 | 294 |
if key not in required_artifacts:
|
295 | 295 |
|
296 | 296 |
# Remove the actual artifact, if it's not required.
|
297 |
- size = self.remove(to_remove)
|
|
297 |
+ size = self.remove(to_remove) # pylint: disable=assignment-from-no-return
|
|
298 | 298 |
|
299 | 299 |
# Remove the size from the removed size
|
300 | 300 |
self.set_cache_size(self._cache_size - size)
|
... | ... | @@ -311,7 +311,7 @@ class ArtifactCache(): |
311 | 311 |
# (int): The size of the artifact cache.
|
312 | 312 |
#
|
313 | 313 |
def compute_cache_size(self):
|
314 |
- self._cache_size = self.calculate_cache_size()
|
|
314 |
+ self._cache_size = self.calculate_cache_size() # pylint: disable=assignment-from-no-return
|
|
315 | 315 |
|
316 | 316 |
return self._cache_size
|
317 | 317 |
|
... | ... | @@ -473,7 +473,7 @@ def node_get_project_path(node, key, project_dir, *, |
473 | 473 |
if sys.version_info[0] == 3 and sys.version_info[1] < 6:
|
474 | 474 |
full_resolved_path = (project_dir_path / path).resolve()
|
475 | 475 |
else:
|
476 |
- full_resolved_path = (project_dir_path / path).resolve(strict=True)
|
|
476 |
+ full_resolved_path = (project_dir_path / path).resolve(strict=True) # pylint: disable=unexpected-keyword-arg
|
|
477 | 477 |
except FileNotFoundError:
|
478 | 478 |
raise LoadError(LoadErrorReason.MISSING_FILE,
|
479 | 479 |
"{}: Specified path '{}' does not exist"
|
... | ... | @@ -27,9 +27,8 @@ import sys |
27 | 27 |
from contextlib import contextmanager
|
28 | 28 |
from collections import namedtuple
|
29 | 29 |
|
30 |
-from ._cachekey import generate_key
|
|
31 | 30 |
from ._context import Context
|
32 |
-from . import utils, _yaml
|
|
31 |
+from . import _yaml
|
|
33 | 32 |
|
34 | 33 |
|
35 | 34 |
YAML_CACHE_FILENAME = "yaml_cache.pickle"
|
... | ... | @@ -207,7 +206,7 @@ class YamlCache(): |
207 | 206 |
filepath = os.path.relpath(full_path, project.directory)
|
208 | 207 |
else:
|
209 | 208 |
filepath = full_path
|
210 |
- return full_path
|
|
209 |
+ return filepath
|
|
211 | 210 |
|
212 | 211 |
# _calculate_key():
|
213 | 212 |
#
|
... | ... | @@ -329,7 +328,7 @@ class BstUnpickler(pickle.Unpickler): |
329 | 328 |
if not project:
|
330 | 329 |
projects = [p.name for p in self._context.get_projects()]
|
331 | 330 |
raise pickle.UnpicklingError("No project with name {} found in {}"
|
332 |
- .format(key_id, projects))
|
|
331 |
+ .format(project_tag, projects))
|
|
333 | 332 |
else:
|
334 | 333 |
project = None
|
335 | 334 |
name = tagged_name
|
... | ... | @@ -152,7 +152,7 @@ class BuildElement(Element): |
152 | 152 |
#############################################################
|
153 | 153 |
def configure(self, node):
|
154 | 154 |
|
155 |
- self.__commands = {}
|
|
155 |
+ self.__commands = {} # pylint: disable=attribute-defined-outside-init
|
|
156 | 156 |
|
157 | 157 |
# FIXME: Currently this forcefully validates configurations
|
158 | 158 |
# for all BuildElement subclasses so they are unable to
|
... | ... | @@ -432,7 +432,7 @@ class Element(Plugin): |
432 | 432 |
visited=visited, recursed=True)
|
433 | 433 |
|
434 | 434 |
# Yeild self only at the end, after anything needed has been traversed
|
435 |
- if should_yield and (recurse or recursed) and (scope == Scope.ALL or scope == Scope.RUN):
|
|
435 |
+ if should_yield and (recurse or recursed) and (scope in (Scope.ALL, Scope.RUN)):
|
|
436 | 436 |
yield self
|
437 | 437 |
|
438 | 438 |
def search(self, scope, name):
|
... | ... | @@ -1563,7 +1563,7 @@ class Element(Plugin): |
1563 | 1563 |
# Step 3 - Prepare
|
1564 | 1564 |
self.__prepare(sandbox)
|
1565 | 1565 |
# Step 4 - Assemble
|
1566 |
- collect = self.assemble(sandbox)
|
|
1566 |
+ collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
|
|
1567 | 1567 |
self.__set_build_result(success=True, description="succeeded")
|
1568 | 1568 |
except BstError as e:
|
1569 | 1569 |
# If an error occurred assembling an element in a sandbox,
|
... | ... | @@ -2521,7 +2521,7 @@ class Element(Plugin): |
2521 | 2521 |
strong_key = meta['strong']
|
2522 | 2522 |
weak_key = meta['weak']
|
2523 | 2523 |
|
2524 |
- assert key == strong_key or key == weak_key
|
|
2524 |
+ assert key in (strong_key, weak_key)
|
|
2525 | 2525 |
|
2526 | 2526 |
self.__metadata_keys[strong_key] = meta
|
2527 | 2527 |
self.__metadata_keys[weak_key] = meta
|
... | ... | @@ -751,9 +751,7 @@ class Plugin(): |
751 | 751 |
self.__context.message(message)
|
752 | 752 |
|
753 | 753 |
def __note_command(self, output, *popenargs, **kwargs):
|
754 |
- workdir = os.getcwd()
|
|
755 |
- if 'cwd' in kwargs:
|
|
756 |
- workdir = kwargs['cwd']
|
|
754 |
+ workdir = kwargs.get('cwd', os.getcwd())
|
|
757 | 755 |
command = " ".join(popenargs[0])
|
758 | 756 |
output.write('Running host command {}: {}\n'.format(workdir, command))
|
759 | 757 |
output.flush()
|
... | ... | @@ -637,7 +637,7 @@ class Source(Plugin): |
637 | 637 |
# Source consistency interrogations are silent.
|
638 | 638 |
context = self._get_context()
|
639 | 639 |
with context.silence():
|
640 |
- self.__consistency = self.get_consistency()
|
|
640 |
+ self.__consistency = self.get_consistency() # pylint: disable=assignment-from-no-return
|
|
641 | 641 |
|
642 | 642 |
# Return cached consistency
|
643 | 643 |
#
|
... | ... | @@ -687,14 +687,14 @@ class Source(Plugin): |
687 | 687 |
|
688 | 688 |
key['directory'] = self.__directory
|
689 | 689 |
if include_source:
|
690 |
- key['unique'] = self.get_unique_key()
|
|
690 |
+ key['unique'] = self.get_unique_key() # pylint: disable=assignment-from-no-return
|
|
691 | 691 |
|
692 | 692 |
return key
|
693 | 693 |
|
694 | 694 |
# Wrapper for set_ref(), also returns whether it changed.
|
695 | 695 |
#
|
696 | 696 |
def _set_ref(self, ref, node):
|
697 |
- current_ref = self.get_ref()
|
|
697 |
+ current_ref = self.get_ref() # pylint: disable=assignment-from-no-return
|
|
698 | 698 |
changed = False
|
699 | 699 |
|
700 | 700 |
# This comparison should work even for tuples and lists,
|
... | ... | @@ -773,7 +773,7 @@ class Source(Plugin): |
773 | 773 |
elif project.ref_storage == ProjectRefStorage.PROJECT_REFS:
|
774 | 774 |
|
775 | 775 |
# First warn if there is a ref already loaded, and reset it
|
776 |
- redundant_ref = self.get_ref()
|
|
776 |
+ redundant_ref = self.get_ref() # pylint: disable=assignment-from-no-return
|
|
777 | 777 |
if redundant_ref is not None:
|
778 | 778 |
self.set_ref(None, {})
|
779 | 779 |
|
... | ... | @@ -883,7 +883,7 @@ class Source(Plugin): |
883 | 883 |
else:
|
884 | 884 |
new_ref = self.__do_track()
|
885 | 885 |
|
886 |
- current_ref = self.get_ref()
|
|
886 |
+ current_ref = self.get_ref() # pylint: disable=assignment-from-no-return
|
|
887 | 887 |
|
888 | 888 |
if new_ref is None:
|
889 | 889 |
# No tracking, keep current ref
|
... | ... | @@ -1038,15 +1038,12 @@ class Source(Plugin): |
1038 | 1038 |
if not mirrors or not alias:
|
1039 | 1039 |
return self.track(**kwargs)
|
1040 | 1040 |
|
1041 |
- context = self._get_context()
|
|
1042 |
- source_kind = type(self)
|
|
1043 |
- |
|
1044 | 1041 |
# NOTE: We are assuming here that tracking only requires substituting the
|
1045 | 1042 |
# first alias used
|
1046 | 1043 |
for uri in reversed(project.get_alias_uris(alias, first_pass=self.__first_pass)):
|
1047 | 1044 |
new_source = self.__clone_for_uri(uri)
|
1048 | 1045 |
try:
|
1049 |
- ref = new_source.track(**kwargs)
|
|
1046 |
+ ref = new_source.track(**kwargs) # pylint: disable=assignment-from-none
|
|
1050 | 1047 |
# FIXME: Need to consider temporary vs. permanent failures,
|
1051 | 1048 |
# and how this works with retries.
|
1052 | 1049 |
except BstError as e:
|
... | ... | @@ -29,13 +29,13 @@ import re |
29 | 29 |
import shutil
|
30 | 30 |
import signal
|
31 | 31 |
import stat
|
32 |
+from stat import S_ISDIR
|
|
32 | 33 |
import string
|
33 | 34 |
import subprocess
|
34 | 35 |
import tempfile
|
35 | 36 |
import itertools
|
36 | 37 |
import functools
|
37 | 38 |
from contextlib import contextmanager
|
38 |
-from stat import S_ISDIR
|
|
39 | 39 |
|
40 | 40 |
import psutil
|
41 | 41 |
|
... | ... | @@ -1088,7 +1088,7 @@ def _call(*popenargs, terminate=False, **kwargs): |
1088 | 1088 |
os.killpg(group_id, signal.SIGCONT)
|
1089 | 1089 |
|
1090 | 1090 |
with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
|
1091 |
- process = subprocess.Popen(*popenargs, preexec_fn=preexec_fn, **kwargs)
|
|
1091 |
+ process = subprocess.Popen(*popenargs, preexec_fn=preexec_fn, **kwargs) # pylint: disable=subprocess-popen-preexec-fn
|
|
1092 | 1092 |
output, _ = process.communicate()
|
1093 | 1093 |
exit_code = process.poll()
|
1094 | 1094 |
|