Tom Pollard pushed to branch tpollard/908 at BuildStream / buildstream
Commits:
-
8b85b257
by Tom Pollard at 2019-02-21T17:58:21Z
2 changed files:
Changes:
1 |
+import os
|
|
2 |
+import shutil
|
|
3 |
+ |
|
4 |
+from . import _yaml
|
|
5 |
+from ._versions import BST_CORE_ARTIFACT_VERSION
|
|
6 |
+from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
|
|
7 |
+ ErrorDomain, ArtifactElementError
|
|
8 |
+from .utils import UtilError
|
|
9 |
+from . import Scope
|
|
10 |
+from . import utils
|
|
11 |
+from .types import _KeyStrength
|
|
12 |
+from .storage._casbaseddirectory import CasBasedDirectory
|
|
13 |
+from .storage.directory import VirtualDirectoryError
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+# An Artifact class to abtract artifact operations
|
|
17 |
+# from the Element class
|
|
18 |
+#
|
|
19 |
+# Args:
|
|
20 |
+# context (Context): The BuildStream context
|
|
21 |
+#
|
|
22 |
+class Artifact():
|
|
23 |
+ |
|
24 |
+ def __init__(self, context):
|
|
25 |
+ self.__artifacts = context.artifactcache
|
|
26 |
+ self.context = context
|
|
27 |
+ |
|
28 |
+ # Abstract the checking of buildtree state away from Element
|
|
29 |
+ #
|
|
30 |
+ def cached_buildtree(self, element):
|
|
31 |
+ |
|
32 |
+ context = self.context
|
|
33 |
+ |
|
34 |
+ if not element._cached():
|
|
35 |
+ return False
|
|
36 |
+ |
|
37 |
+ key_strength = _KeyStrength.STRONG if context.get_strict() else _KeyStrength.WEAK
|
|
38 |
+ if not self.__artifacts.contains_subdir_artifact(element, element._get_cache_key(strength=key_strength),
|
|
39 |
+ 'buildtree'):
|
|
40 |
+ return False
|
|
41 |
+ |
|
42 |
+ return True
|
|
43 |
+ |
|
44 |
+ def cache_artifact(self, element, rootdir, sandbox, collect, buildresult, keys, publicdata):
|
|
45 |
+ if collect is not None:
|
|
46 |
+ try:
|
|
47 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
48 |
+ collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
|
|
49 |
+ except VirtualDirectoryError:
|
|
50 |
+ # No collect directory existed
|
|
51 |
+ collectvdir = None
|
|
52 |
+ |
|
53 |
+ context = self.context
|
|
54 |
+ |
|
55 |
+ assemblevdir = CasBasedDirectory(cas_cache=self.__artifacts.cas, ref=None)
|
|
56 |
+ logsvdir = assemblevdir.descend("logs", create=True)
|
|
57 |
+ metavdir = assemblevdir.descend("meta", create=True)
|
|
58 |
+ buildtreevdir = assemblevdir.descend("buildtree", create=True)
|
|
59 |
+ |
|
60 |
+ # Create artifact directory structure
|
|
61 |
+ assembledir = os.path.join(rootdir, 'artifact')
|
|
62 |
+ logsdir = os.path.join(assembledir, 'logs')
|
|
63 |
+ metadir = os.path.join(assembledir, 'meta')
|
|
64 |
+ os.mkdir(assembledir)
|
|
65 |
+ os.mkdir(logsdir)
|
|
66 |
+ os.mkdir(metadir)
|
|
67 |
+ |
|
68 |
+ if collect is not None and collectvdir is not None:
|
|
69 |
+ filesvdir = assemblevdir.descend("files", create=True)
|
|
70 |
+ filesvdir.import_files(collectvdir)
|
|
71 |
+ |
|
72 |
+ cache_buildtrees = context.cache_buildtrees
|
|
73 |
+ build_success = buildresult[0]
|
|
74 |
+ |
|
75 |
+ # cache_buildtrees defaults to 'always', as such the
|
|
76 |
+ # default behaviour is to attempt to cache them. If only
|
|
77 |
+ # caching failed artifact buildtrees, then query the build
|
|
78 |
+ # result. Element types without a build-root dir will be cached
|
|
79 |
+ # with an empty buildtreedir regardless of this configuration.
|
|
80 |
+ |
|
81 |
+ if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
|
|
82 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
83 |
+ try:
|
|
84 |
+ sandbox_build_dir = sandbox_vroot.descend(
|
|
85 |
+ element.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
|
86 |
+ buildtreevdir.import_files(sandbox_build_dir)
|
|
87 |
+ except VirtualDirectoryError:
|
|
88 |
+ # Directory could not be found. Pre-virtual
|
|
89 |
+ # directory behaviour was to continue silently
|
|
90 |
+ # if the directory could not be found.
|
|
91 |
+ pass
|
|
92 |
+ |
|
93 |
+ # Write some logs out to normal directories: logsdir and metadir
|
|
94 |
+ # Copy build log
|
|
95 |
+ log_filename = context.get_log_filename()
|
|
96 |
+ element._build_log_path = os.path.join(logsdir, 'build.log')
|
|
97 |
+ if log_filename:
|
|
98 |
+ shutil.copyfile(log_filename, element._build_log_path)
|
|
99 |
+ |
|
100 |
+ # Store public data
|
|
101 |
+ _yaml.dump(_yaml.node_sanitize(publicdata), os.path.join(metadir, 'public.yaml'))
|
|
102 |
+ |
|
103 |
+ # Store result
|
|
104 |
+ build_result_dict = {"success": buildresult[0], "description": buildresult[1]}
|
|
105 |
+ if buildresult[2] is not None:
|
|
106 |
+ build_result_dict["detail"] = buildresult[2]
|
|
107 |
+ _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
|
|
108 |
+ |
|
109 |
+ # ensure we have cache keys
|
|
110 |
+ element._assemble_done()
|
|
111 |
+ |
|
112 |
+ # Store keys.yaml
|
|
113 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
114 |
+ 'strong': element._get_cache_key(),
|
|
115 |
+ 'weak': element._get_cache_key(_KeyStrength.WEAK),
|
|
116 |
+ }), os.path.join(metadir, 'keys.yaml'))
|
|
117 |
+ |
|
118 |
+ # Store dependencies.yaml
|
|
119 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
120 |
+ e.name: e._get_cache_key() for e in element.dependencies(Scope.BUILD)
|
|
121 |
+ }), os.path.join(metadir, 'dependencies.yaml'))
|
|
122 |
+ |
|
123 |
+ # Store workspaced.yaml
|
|
124 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
125 |
+ 'workspaced': bool(element._get_workspace())
|
|
126 |
+ }), os.path.join(metadir, 'workspaced.yaml'))
|
|
127 |
+ |
|
128 |
+ # Store workspaced-dependencies.yaml
|
|
129 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
130 |
+ 'workspaced-dependencies': [
|
|
131 |
+ e.name for e in element.dependencies(Scope.BUILD)
|
|
132 |
+ if e._get_workspace()
|
|
133 |
+ ]
|
|
134 |
+ }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
|
|
135 |
+ |
|
136 |
+ metavdir.import_files(metadir)
|
|
137 |
+ logsvdir.import_files(logsdir)
|
|
138 |
+ |
|
139 |
+ artifact_size = assemblevdir.get_size()
|
|
140 |
+ self.__artifacts.commit(element, assemblevdir, keys)
|
|
141 |
+ |
|
142 |
+ if collect is not None and collectvdir is None:
|
|
143 |
+ raise ArtifactElementError("Not Found")
|
|
144 |
+ |
|
145 |
+ return artifact_size
|
|
146 |
+ |
|
147 |
+ # load_public_data():
|
|
148 |
+ #
|
|
149 |
+ # Loads the public data from the cached artifact
|
|
150 |
+ #
|
|
151 |
+ def load_public_data(self, element):
|
|
152 |
+ |
|
153 |
+ assert element._cached()
|
|
154 |
+ |
|
155 |
+ # Load the public data from the artifact
|
|
156 |
+ artifact_base, _ = self.extract(element)
|
|
157 |
+ metadir = os.path.join(artifact_base, 'meta')
|
|
158 |
+ data = _yaml.load(os.path.join(metadir, 'public.yaml'))
|
|
159 |
+ |
|
160 |
+ return data
|
|
161 |
+ |
|
162 |
+ def load_build_result(self, element, key):
|
|
163 |
+ |
|
164 |
+ assert key is not None
|
|
165 |
+ artifact_base, _ = self.extract(element, key)
|
|
166 |
+ |
|
167 |
+ metadir = os.path.join(artifact_base, 'meta')
|
|
168 |
+ result_path = os.path.join(metadir, 'build-result.yaml')
|
|
169 |
+ if not os.path.exists(result_path):
|
|
170 |
+ build_result = (True, "succeeded", None)
|
|
171 |
+ return build_result
|
|
172 |
+ |
|
173 |
+ data = _yaml.load(result_path)
|
|
174 |
+ build_result = (data["success"], data.get("description"), data.get("detail"))
|
|
175 |
+ |
|
176 |
+ return build_result
|
|
177 |
+ |
|
178 |
+ def extract(self, element, key=None):
|
|
179 |
+ |
|
180 |
+ if key is None:
|
|
181 |
+ context = self.context
|
|
182 |
+ |
|
183 |
+ # Use weak cache key, if context allows use of weak cache keys
|
|
184 |
+ key_strength = _KeyStrength.STRONG
|
|
185 |
+ key = element._get_cache_key(strength=key_strength)
|
|
186 |
+ if not context.get_strict() and not key:
|
|
187 |
+ key = element._get_cache_key(strength=_KeyStrength.WEAK)
|
|
188 |
+ |
|
189 |
+ return (self.__artifacts.extract(element, key), key)
|
|
190 |
+ |
|
191 |
+ def get_artifact_metadata_keys(self, element, key, metadata_keys):
|
|
192 |
+ |
|
193 |
+ # Now extract it and possibly derive the key
|
|
194 |
+ artifact_base, key = self.extract(element, key)
|
|
195 |
+ |
|
196 |
+ # Now try the cache, once we're sure about the key
|
|
197 |
+ if key in metadata_keys:
|
|
198 |
+ return (metadata_keys[key]['strong'],
|
|
199 |
+ metadata_keys[key]['weak'], None)
|
|
200 |
+ |
|
201 |
+ # Parse the expensive yaml now and cache the result
|
|
202 |
+ meta_file = os.path.join(artifact_base, 'meta', 'keys.yaml')
|
|
203 |
+ meta = _yaml.load(meta_file)
|
|
204 |
+ strong_key = meta['strong']
|
|
205 |
+ weak_key = meta['weak']
|
|
206 |
+ |
|
207 |
+ assert key in (strong_key, weak_key)
|
|
208 |
+ |
|
209 |
+ metadata_keys[strong_key] = meta
|
|
210 |
+ metadata_keys[weak_key] = meta
|
|
211 |
+ |
|
212 |
+ return (strong_key, weak_key, metadata_keys)
|
|
213 |
+ |
|
214 |
+ def get_artifact_metadata_dependencies(self, element, key, metadata_dependencies, metadata_keys):
|
|
215 |
+ |
|
216 |
+ # Extract it and possibly derive the key
|
|
217 |
+ artifact_base, key = self.extract(element, key)
|
|
218 |
+ |
|
219 |
+ # Now try the cache, once we're sure about the key
|
|
220 |
+ if key in metadata_dependencies:
|
|
221 |
+ return (metadata_dependencies[key], None, None)
|
|
222 |
+ |
|
223 |
+ # Parse the expensive yaml now and cache the result
|
|
224 |
+ meta_file = os.path.join(artifact_base, 'meta', 'dependencies.yaml')
|
|
225 |
+ meta = _yaml.load(meta_file)
|
|
226 |
+ |
|
227 |
+ # Cache it under both strong and weak keys
|
|
228 |
+ strong_key, weak_key, metadata_keys = self.get_artifact_metadata_keys(element, key, metadata_keys)
|
|
229 |
+ metadata_dependencies[strong_key] = meta
|
|
230 |
+ metadata_dependencies[weak_key] = meta
|
|
231 |
+ |
|
232 |
+ return (meta, metadata_dependencies, metadata_keys)
|
|
233 |
+ |
|
234 |
+ def get_artifact_metadata_workspaced(self, element, key, metadata_workspaced, metadata_keys):
|
|
235 |
+ |
|
236 |
+ # Extract it and possibly derive the key
|
|
237 |
+ artifact_base, key = self.extract(element, key)
|
|
238 |
+ |
|
239 |
+ # Now try the cache, once we're sure about the key
|
|
240 |
+ if key in metadata_workspaced:
|
|
241 |
+ return (metadata_workspaced[key], None, None)
|
|
242 |
+ |
|
243 |
+ # Parse the expensive yaml now and cache the result
|
|
244 |
+ meta_file = os.path.join(artifact_base, 'meta', 'workspaced.yaml')
|
|
245 |
+ meta = _yaml.load(meta_file)
|
|
246 |
+ workspaced = meta['workspaced']
|
|
247 |
+ |
|
248 |
+ # Cache it under both strong and weak keys
|
|
249 |
+ strong_key, weak_key, metadata_keys = self.get_artifact_metadata_keys(element, key, metadata_keys)
|
|
250 |
+ metadata_workspaced[strong_key] = workspaced
|
|
251 |
+ metadata_workspaced[weak_key] = workspaced
|
|
252 |
+ |
|
253 |
+ return (workspaced, metadata_workspaced, metadata_keys)
|
|
254 |
+ |
|
255 |
+ def get_artifact_metadata_workspaced_dependencies(self, element, key, metadata_workspaced_dependencies,
|
|
256 |
+ metadata_keys):
|
|
257 |
+ |
|
258 |
+ # Extract it and possibly derive the key
|
|
259 |
+ artifact_base, key = self.extract(element, key)
|
|
260 |
+ |
|
261 |
+ # Now try the cache, once we're sure about the key
|
|
262 |
+ if key in metadata_workspaced_dependencies:
|
|
263 |
+ return (metadata_workspaced_dependencies[key], None, None)
|
|
264 |
+ |
|
265 |
+ # Parse the expensive yaml now and cache the result
|
|
266 |
+ meta_file = os.path.join(artifact_base, 'meta', 'workspaced-dependencies.yaml')
|
|
267 |
+ meta = _yaml.load(meta_file)
|
|
268 |
+ workspaced = meta['workspaced-dependencies']
|
|
269 |
+ |
|
270 |
+ # Cache it under both strong and weak keys
|
|
271 |
+ strong_key, weak_key, metadata_keys = self.get_artifact_metadata_keys(element, key, metadata_keys)
|
|
272 |
+ metadata_workspaced_dependencies[strong_key] = workspaced
|
|
273 |
+ metadata_workspaced_dependencies[weak_key] = workspaced
|
|
274 |
+ return (workspaced, metadata_workspaced_dependencies, metadata_keys)
|
... | ... | @@ -88,7 +88,7 @@ from . import _yaml |
88 | 88 |
from ._variables import Variables
|
89 | 89 |
from ._versions import BST_CORE_ARTIFACT_VERSION
|
90 | 90 |
from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
|
91 |
- ErrorDomain
|
|
91 |
+ ErrorDomain, ArtifactElementError
|
|
92 | 92 |
from .utils import UtilError
|
93 | 93 |
from . import Plugin, Consistency, Scope
|
94 | 94 |
from . import SandboxFlags, SandboxCommandError
|
... | ... | @@ -100,6 +100,7 @@ from ._platform import Platform |
100 | 100 |
from .sandbox._config import SandboxConfig
|
101 | 101 |
from .sandbox._sandboxremote import SandboxRemote
|
102 | 102 |
from .types import _KeyStrength, CoreWarnings
|
103 |
+from ._artifact import Artifact
|
|
103 | 104 |
|
104 | 105 |
from .storage.directory import Directory
|
105 | 106 |
from .storage._filebaseddirectory import FileBasedDirectory
|
... | ... | @@ -218,6 +219,7 @@ class Element(Plugin): |
218 | 219 |
self.__required = False # Whether the artifact is required in the current session
|
219 | 220 |
self.__build_result = None # The result of assembling this Element (success, description, detail)
|
220 | 221 |
self._build_log_path = None # The path of the build log for this Element
|
222 |
+ self.__artifact = Artifact(context)
|
|
221 | 223 |
|
222 | 224 |
self.__batch_prepare_assemble = False # Whether batching across prepare()/assemble() is configured
|
223 | 225 |
self.__batch_prepare_assemble_flags = 0 # Sandbox flags for batching across prepare()/assemble()
|
... | ... | @@ -1671,112 +1673,18 @@ class Element(Plugin): |
1671 | 1673 |
cleanup_rootdir()
|
1672 | 1674 |
|
1673 | 1675 |
def _cache_artifact(self, rootdir, sandbox, collect):
|
1676 |
+ buildresult = self.__build_result
|
|
1677 |
+ keys = self.__get_cache_keys_for_commit()
|
|
1678 |
+ publicdata = self.__dynamic_public
|
|
1674 | 1679 |
with self.timed_activity("Caching artifact"):
|
1675 |
- if collect is not None:
|
|
1676 |
- try:
|
|
1677 |
- sandbox_vroot = sandbox.get_virtual_directory()
|
|
1678 |
- collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
|
|
1679 |
- except VirtualDirectoryError:
|
|
1680 |
- # No collect directory existed
|
|
1681 |
- collectvdir = None
|
|
1682 |
- |
|
1683 |
- context = self._get_context()
|
|
1684 |
- |
|
1685 |
- assemblevdir = CasBasedDirectory(cas_cache=context.artifactcache.cas, ref=None)
|
|
1686 |
- logsvdir = assemblevdir.descend("logs", create=True)
|
|
1687 |
- metavdir = assemblevdir.descend("meta", create=True)
|
|
1688 |
- buildtreevdir = assemblevdir.descend("buildtree", create=True)
|
|
1689 |
- |
|
1690 |
- # Create artifact directory structure
|
|
1691 |
- assembledir = os.path.join(rootdir, 'artifact')
|
|
1692 |
- logsdir = os.path.join(assembledir, 'logs')
|
|
1693 |
- metadir = os.path.join(assembledir, 'meta')
|
|
1694 |
- os.mkdir(assembledir)
|
|
1695 |
- os.mkdir(logsdir)
|
|
1696 |
- os.mkdir(metadir)
|
|
1697 |
- |
|
1698 |
- if collect is not None and collectvdir is not None:
|
|
1699 |
- filesvdir = assemblevdir.descend("files", create=True)
|
|
1700 |
- filesvdir.import_files(collectvdir)
|
|
1701 |
- |
|
1702 |
- cache_buildtrees = context.cache_buildtrees
|
|
1703 |
- build_success = self.__build_result[0]
|
|
1704 |
- |
|
1705 |
- # cache_buildtrees defaults to 'always', as such the
|
|
1706 |
- # default behaviour is to attempt to cache them. If only
|
|
1707 |
- # caching failed artifact buildtrees, then query the build
|
|
1708 |
- # result. Element types without a build-root dir will be cached
|
|
1709 |
- # with an empty buildtreedir regardless of this configuration.
|
|
1710 |
- |
|
1711 |
- if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
|
|
1712 |
- sandbox_vroot = sandbox.get_virtual_directory()
|
|
1713 |
- try:
|
|
1714 |
- sandbox_build_dir = sandbox_vroot.descend(
|
|
1715 |
- self.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
|
1716 |
- buildtreevdir.import_files(sandbox_build_dir)
|
|
1717 |
- except VirtualDirectoryError:
|
|
1718 |
- # Directory could not be found. Pre-virtual
|
|
1719 |
- # directory behaviour was to continue silently
|
|
1720 |
- # if the directory could not be found.
|
|
1721 |
- pass
|
|
1722 |
- |
|
1723 |
- # Write some logs out to normal directories: logsdir and metadir
|
|
1724 |
- # Copy build log
|
|
1725 |
- log_filename = context.get_log_filename()
|
|
1726 |
- self._build_log_path = os.path.join(logsdir, 'build.log')
|
|
1727 |
- if log_filename:
|
|
1728 |
- shutil.copyfile(log_filename, self._build_log_path)
|
|
1729 |
- |
|
1730 |
- # Store public data
|
|
1731 |
- _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
|
|
1732 |
- |
|
1733 |
- # Store result
|
|
1734 |
- build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
|
|
1735 |
- if self.__build_result[2] is not None:
|
|
1736 |
- build_result_dict["detail"] = self.__build_result[2]
|
|
1737 |
- _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
|
|
1738 |
- |
|
1739 |
- # ensure we have cache keys
|
|
1740 |
- self._assemble_done()
|
|
1741 |
- |
|
1742 |
- # Store keys.yaml
|
|
1743 |
- _yaml.dump(_yaml.node_sanitize({
|
|
1744 |
- 'strong': self._get_cache_key(),
|
|
1745 |
- 'weak': self._get_cache_key(_KeyStrength.WEAK),
|
|
1746 |
- }), os.path.join(metadir, 'keys.yaml'))
|
|
1747 |
- |
|
1748 |
- # Store dependencies.yaml
|
|
1749 |
- _yaml.dump(_yaml.node_sanitize({
|
|
1750 |
- e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
|
|
1751 |
- }), os.path.join(metadir, 'dependencies.yaml'))
|
|
1752 |
- |
|
1753 |
- # Store workspaced.yaml
|
|
1754 |
- _yaml.dump(_yaml.node_sanitize({
|
|
1755 |
- 'workspaced': bool(self._get_workspace())
|
|
1756 |
- }), os.path.join(metadir, 'workspaced.yaml'))
|
|
1757 |
- |
|
1758 |
- # Store workspaced-dependencies.yaml
|
|
1759 |
- _yaml.dump(_yaml.node_sanitize({
|
|
1760 |
- 'workspaced-dependencies': [
|
|
1761 |
- e.name for e in self.dependencies(Scope.BUILD)
|
|
1762 |
- if e._get_workspace()
|
|
1763 |
- ]
|
|
1764 |
- }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
|
|
1765 |
- |
|
1766 |
- metavdir.import_files(metadir)
|
|
1767 |
- logsvdir.import_files(logsdir)
|
|
1768 |
- |
|
1769 |
- artifact_size = assemblevdir.get_size()
|
|
1770 |
- self.__artifacts.commit(self, assemblevdir, self.__get_cache_keys_for_commit())
|
|
1771 |
- |
|
1772 |
- if collect is not None and collectvdir is None:
|
|
1680 |
+ try:
|
|
1681 |
+ return self.__artifact.cache_artifact(self, rootdir, sandbox, collect, buildresult, keys, publicdata)
|
|
1682 |
+ except ArtifactElementError:
|
|
1773 | 1683 |
raise ElementError(
|
1774 | 1684 |
"Directory '{}' was not found inside the sandbox, "
|
1775 | 1685 |
"unable to collect artifact contents"
|
1776 | 1686 |
.format(collect))
|
1777 | 1687 |
|
1778 |
- return artifact_size
|
|
1779 |
- |
|
1780 | 1688 |
def _get_build_log(self):
|
1781 | 1689 |
return self._build_log_path
|
1782 | 1690 |
|
... | ... | @@ -2071,17 +1979,7 @@ class Element(Plugin): |
2071 | 1979 |
# not its contents.
|
2072 | 1980 |
#
|
2073 | 1981 |
def _cached_buildtree(self):
|
2074 |
- context = self._get_context()
|
|
2075 |
- |
|
2076 |
- if not self._cached():
|
|
2077 |
- return False
|
|
2078 |
- |
|
2079 |
- key_strength = _KeyStrength.STRONG if context.get_strict() else _KeyStrength.WEAK
|
|
2080 |
- if not self.__artifacts.contains_subdir_artifact(self, self._get_cache_key(strength=key_strength),
|
|
2081 |
- 'buildtree'):
|
|
2082 |
- return False
|
|
2083 |
- |
|
2084 |
- return True
|
|
1982 |
+ return self.__artifact.cached_buildtree(self)
|
|
2085 | 1983 |
|
2086 | 1984 |
# _fetch()
|
2087 | 1985 |
#
|
... | ... | @@ -2637,16 +2535,9 @@ class Element(Plugin): |
2637 | 2535 |
#
|
2638 | 2536 |
def __extract(self, key=None):
|
2639 | 2537 |
|
2640 |
- if key is None:
|
|
2641 |
- context = self._get_context()
|
|
2642 |
- key = self.__strict_cache_key
|
|
2643 |
- |
|
2644 |
- # Use weak cache key, if artifact is missing for strong cache key
|
|
2645 |
- # and the context allows use of weak cache keys
|
|
2646 |
- if not context.get_strict() and not self.__artifacts.contains(self, key):
|
|
2647 |
- key = self._get_cache_key(strength=_KeyStrength.WEAK)
|
|
2538 |
+ artifact_base, key = self.__artifact.extract(self, key)
|
|
2648 | 2539 |
|
2649 |
- return (self.__artifacts.extract(self, key), key)
|
|
2540 |
+ return (artifact_base, key)
|
|
2650 | 2541 |
|
2651 | 2542 |
# __get_artifact_metadata_keys():
|
2652 | 2543 |
#
|
... | ... | @@ -2661,24 +2552,14 @@ class Element(Plugin): |
2661 | 2552 |
#
|
2662 | 2553 |
def __get_artifact_metadata_keys(self, key=None):
|
2663 | 2554 |
|
2664 |
- # Now extract it and possibly derive the key
|
|
2665 |
- artifact_base, key = self.__extract(key)
|
|
2555 |
+ metadata_keys = self.__metadata_keys
|
|
2666 | 2556 |
|
2667 |
- # Now try the cache, once we're sure about the key
|
|
2668 |
- if key in self.__metadata_keys:
|
|
2669 |
- return (self.__metadata_keys[key]['strong'],
|
|
2670 |
- self.__metadata_keys[key]['weak'])
|
|
2557 |
+ strong_key, weak_key, metadata_keys = self.__artifact.get_artifact_metadata_keys(self, key, metadata_keys)
|
|
2671 | 2558 |
|
2672 |
- # Parse the expensive yaml now and cache the result
|
|
2673 |
- meta_file = os.path.join(artifact_base, 'meta', 'keys.yaml')
|
|
2674 |
- meta = _yaml.load(meta_file)
|
|
2675 |
- strong_key = meta['strong']
|
|
2676 |
- weak_key = meta['weak']
|
|
2559 |
+ # Update keys if needed
|
|
2560 |
+ if metadata_keys:
|
|
2561 |
+ self.__metadata_keys = metadata_keys
|
|
2677 | 2562 |
|
2678 |
- assert key in (strong_key, weak_key)
|
|
2679 |
- |
|
2680 |
- self.__metadata_keys[strong_key] = meta
|
|
2681 |
- self.__metadata_keys[weak_key] = meta
|
|
2682 | 2563 |
return (strong_key, weak_key)
|
2683 | 2564 |
|
2684 | 2565 |
# __get_artifact_metadata_dependencies():
|
... | ... | @@ -2693,21 +2574,16 @@ class Element(Plugin): |
2693 | 2574 |
#
|
2694 | 2575 |
def __get_artifact_metadata_dependencies(self, key=None):
|
2695 | 2576 |
|
2696 |
- # Extract it and possibly derive the key
|
|
2697 |
- artifact_base, key = self.__extract(key)
|
|
2698 |
- |
|
2699 |
- # Now try the cache, once we're sure about the key
|
|
2700 |
- if key in self.__metadata_dependencies:
|
|
2701 |
- return self.__metadata_dependencies[key]
|
|
2577 |
+ metadata = [self.__metadata_dependencies, self.__metadata_keys]
|
|
2578 |
+ meta, meta_deps, meta_keys = self.__artifact.get_artifact_metadata_dependencies(self, key, *metadata)
|
|
2702 | 2579 |
|
2703 |
- # Parse the expensive yaml now and cache the result
|
|
2704 |
- meta_file = os.path.join(artifact_base, 'meta', 'dependencies.yaml')
|
|
2705 |
- meta = _yaml.load(meta_file)
|
|
2580 |
+ # Update deps if needed
|
|
2581 |
+ if meta_deps:
|
|
2582 |
+ self.__metadata_dependencies = meta_deps
|
|
2583 |
+ # Update keys if needed, no need to check if deps not updated
|
|
2584 |
+ if meta_keys:
|
|
2585 |
+ self.__metadata_keys = meta_keys
|
|
2706 | 2586 |
|
2707 |
- # Cache it under both strong and weak keys
|
|
2708 |
- strong_key, weak_key = self.__get_artifact_metadata_keys(key)
|
|
2709 |
- self.__metadata_dependencies[strong_key] = meta
|
|
2710 |
- self.__metadata_dependencies[weak_key] = meta
|
|
2711 | 2587 |
return meta
|
2712 | 2588 |
|
2713 | 2589 |
# __get_artifact_metadata_workspaced():
|
... | ... | @@ -2720,24 +2596,19 @@ class Element(Plugin): |
2720 | 2596 |
# Returns:
|
2721 | 2597 |
# (bool): Whether the given artifact was workspaced
|
2722 | 2598 |
#
|
2723 |
- def __get_artifact_metadata_workspaced(self, key=None):
|
|
2724 | 2599 |
|
2725 |
- # Extract it and possibly derive the key
|
|
2726 |
- artifact_base, key = self.__extract(key)
|
|
2600 |
+ def __get_artifact_metadata_workspaced(self, key=None):
|
|
2727 | 2601 |
|
2728 |
- # Now try the cache, once we're sure about the key
|
|
2729 |
- if key in self.__metadata_workspaced:
|
|
2730 |
- return self.__metadata_workspaced[key]
|
|
2602 |
+ metadata = [self.__metadata_workspaced, self.__metadata_keys]
|
|
2603 |
+ workspaced, meta_workspaced, meta_keys = self.__artifact.get_artifact_metadata_workspaced(self, key, *metadata)
|
|
2731 | 2604 |
|
2732 |
- # Parse the expensive yaml now and cache the result
|
|
2733 |
- meta_file = os.path.join(artifact_base, 'meta', 'workspaced.yaml')
|
|
2734 |
- meta = _yaml.load(meta_file)
|
|
2735 |
- workspaced = meta['workspaced']
|
|
2605 |
+ # Update workspaced if needed
|
|
2606 |
+ if meta_workspaced:
|
|
2607 |
+ self.__metadata_workspaced = meta_workspaced
|
|
2608 |
+ # Update keys if needed, no need to check if workspaced not updated
|
|
2609 |
+ if meta_keys:
|
|
2610 |
+ self.__metadata_keys = meta_keys
|
|
2736 | 2611 |
|
2737 |
- # Cache it under both strong and weak keys
|
|
2738 |
- strong_key, weak_key = self.__get_artifact_metadata_keys(key)
|
|
2739 |
- self.__metadata_workspaced[strong_key] = workspaced
|
|
2740 |
- self.__metadata_workspaced[weak_key] = workspaced
|
|
2741 | 2612 |
return workspaced
|
2742 | 2613 |
|
2743 | 2614 |
# __get_artifact_metadata_workspaced_dependencies():
|
... | ... | @@ -2752,22 +2623,17 @@ class Element(Plugin): |
2752 | 2623 |
#
|
2753 | 2624 |
def __get_artifact_metadata_workspaced_dependencies(self, key=None):
|
2754 | 2625 |
|
2755 |
- # Extract it and possibly derive the key
|
|
2756 |
- artifact_base, key = self.__extract(key)
|
|
2626 |
+ metadata = [self.__metadata_workspaced_dependencies, self.__metadata_keys]
|
|
2627 |
+ workspaced, meta_workspaced_deps,\
|
|
2628 |
+ meta_keys = self.__artifact.get_artifact_metadata_workspaced_dependencies(self, key, *metadata)
|
|
2757 | 2629 |
|
2758 |
- # Now try the cache, once we're sure about the key
|
|
2759 |
- if key in self.__metadata_workspaced_dependencies:
|
|
2760 |
- return self.__metadata_workspaced_dependencies[key]
|
|
2630 |
+ # Update workspaced if needed
|
|
2631 |
+ if meta_workspaced_deps:
|
|
2632 |
+ self.__metadata_workspaced_dependencies = meta_workspaced_deps
|
|
2633 |
+ # Update keys if needed, no need to check if workspaced not updated
|
|
2634 |
+ if meta_keys:
|
|
2635 |
+ self.__metadata_keys = meta_keys
|
|
2761 | 2636 |
|
2762 |
- # Parse the expensive yaml now and cache the result
|
|
2763 |
- meta_file = os.path.join(artifact_base, 'meta', 'workspaced-dependencies.yaml')
|
|
2764 |
- meta = _yaml.load(meta_file)
|
|
2765 |
- workspaced = meta['workspaced-dependencies']
|
|
2766 |
- |
|
2767 |
- # Cache it under both strong and weak keys
|
|
2768 |
- strong_key, weak_key = self.__get_artifact_metadata_keys(key)
|
|
2769 |
- self.__metadata_workspaced_dependencies[strong_key] = workspaced
|
|
2770 |
- self.__metadata_workspaced_dependencies[weak_key] = workspaced
|
|
2771 | 2637 |
return workspaced
|
2772 | 2638 |
|
2773 | 2639 |
# __load_public_data():
|
... | ... | @@ -2775,29 +2641,20 @@ class Element(Plugin): |
2775 | 2641 |
# Loads the public data from the cached artifact
|
2776 | 2642 |
#
|
2777 | 2643 |
def __load_public_data(self):
|
2778 |
- self.__assert_cached()
|
|
2779 | 2644 |
assert self.__dynamic_public is None
|
2780 | 2645 |
|
2781 |
- # Load the public data from the artifact
|
|
2782 |
- artifact_base, _ = self.__extract()
|
|
2783 |
- metadir = os.path.join(artifact_base, 'meta')
|
|
2784 |
- self.__dynamic_public = _yaml.load(os.path.join(metadir, 'public.yaml'))
|
|
2646 |
+ self.__dynamic_public = self.__artifact.load_public_data(self)
|
|
2785 | 2647 |
|
2786 | 2648 |
def __load_build_result(self, keystrength):
|
2787 | 2649 |
self.__assert_cached(keystrength=keystrength)
|
2788 | 2650 |
assert self.__build_result is None
|
2789 | 2651 |
|
2790 |
- artifact_base, _ = self.__extract(key=self.__weak_cache_key if keystrength is _KeyStrength.WEAK
|
|
2791 |
- else self.__strict_cache_key)
|
|
2792 |
- |
|
2793 |
- metadir = os.path.join(artifact_base, 'meta')
|
|
2794 |
- result_path = os.path.join(metadir, 'build-result.yaml')
|
|
2795 |
- if not os.path.exists(result_path):
|
|
2796 |
- self.__build_result = (True, "succeeded", None)
|
|
2797 |
- return
|
|
2652 |
+ # _get_cache_key with _KeyStrength.STRONG returns self.__cache_key, which can be `None`
|
|
2653 |
+ # leading to a failed assertion from extract() using get_artifact_name(), so explicility pass
|
|
2654 |
+ # self.__strict_cache_key
|
|
2655 |
+ key = self.__weak_cache_key if keystrength is _KeyStrength.WEAK else self.__strict_cache_key
|
|
2798 | 2656 |
|
2799 |
- data = _yaml.load(result_path)
|
|
2800 |
- self.__build_result = (data["success"], data.get("description"), data.get("detail"))
|
|
2657 |
+ self.__build_result = self.__artifact.load_build_result(self, key)
|
|
2801 | 2658 |
|
2802 | 2659 |
def __get_build_result(self, keystrength):
|
2803 | 2660 |
if keystrength is None:
|