Raoul Hidalgo Charman pushed to branch raoul/802-refactor-artifactcache at BuildStream / buildstream
Commits:
- 
8221e090
by Raoul Hidalgo Charman at 2018-12-14T13:35:19Z
- 
8d4b0bf9
by Raoul Hidalgo Charman at 2018-12-14T13:35:24Z
6 changed files:
- buildstream/_artifactcache.py
- buildstream/_cas/cascache.py
- buildstream/_cas/casremote.py
- buildstream/_context.py
- buildstream/data/userconfig.yaml
- tests/testutils/runcli.py
Changes:
| ... | ... | @@ -28,7 +28,7 @@ from ._message import Message, MessageType | 
| 28 | 28 |  from . import utils
 | 
| 29 | 29 |  from . import _yaml
 | 
| 30 | 30 |  | 
| 31 | -from ._cas.casremote import CASRemote, CASRemoteSpec
 | |
| 31 | +from ._cas.casremote import BlobNotFound, CASRemote, CASRemoteSpec
 | |
| 32 | 32 |  | 
| 33 | 33 |  | 
| 34 | 34 |  CACHE_SIZE_FILE = "cache_size"
 | 
| ... | ... | @@ -374,7 +374,7 @@ class ArtifactCache(): | 
| 374 | 374 |          q = multiprocessing.Queue()
 | 
| 375 | 375 |          for remote_spec in remote_specs:
 | 
| 376 | 376 |  | 
| 377 | -            error = CASRemote.check_remote(remote_spec, q)
 | |
| 377 | +            error = CASRemote.check_remote(remote_spec, self.context.tmpdir, q)
 | |
| 378 | 378 |  | 
| 379 | 379 |              if error and on_failure:
 | 
| 380 | 380 |                  on_failure(remote_spec.url, error)
 | 
| ... | ... | @@ -385,7 +385,7 @@ class ArtifactCache(): | 
| 385 | 385 |                  if remote_spec.push:
 | 
| 386 | 386 |                      self._has_push_remotes = True
 | 
| 387 | 387 |  | 
| 388 | -                remotes[remote_spec.url] = CASRemote(remote_spec)
 | |
| 388 | +                remotes[remote_spec.url] = CASRemote(remote_spec, self.context.tmpdir)
 | |
| 389 | 389 |  | 
| 390 | 390 |          for project in self.context.get_projects():
 | 
| 391 | 391 |              remote_specs = self.global_remote_specs
 | 
| ... | ... | @@ -634,7 +634,7 @@ class ArtifactCache(): | 
| 634 | 634 |      # Returns:
 | 
| 635 | 635 |      #   (bool): True if pull was successful, False if artifact was not available
 | 
| 636 | 636 |      #
 | 
| 637 | -    def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
 | |
| 637 | +    def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=[]):
 | |
| 638 | 638 |          ref = self.get_artifact_fullname(element, key)
 | 
| 639 | 639 |  | 
| 640 | 640 |          project = element._get_project()
 | 
| ... | ... | @@ -644,13 +644,31 @@ class ArtifactCache(): | 
| 644 | 644 |                  display_key = element._get_brief_display_key()
 | 
| 645 | 645 |                  element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
 | 
| 646 | 646 |  | 
| 647 | -                if self.cas.pull(ref, remote, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
 | |
| 648 | -                    element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
 | |
| 647 | +                root_digest = remote.get_reference(ref)
 | |
| 648 | + | |
| 649 | +                if root_digest:
 | |
| 650 | +                    try:
 | |
| 651 | +                        for blob_digest in remote.yield_blob_digests(
 | |
| 652 | +                                root_digest, progress=progress, subdir=subdir,
 | |
| 653 | +                                excluded_subdirs=excluded_subdirs):
 | |
| 654 | +                            if self.cas.check_blob(blob_digest):
 | |
| 655 | +                                continue
 | |
| 656 | +                            remote.request_blob(blob_digest)
 | |
| 657 | +                            for blob_file in remote.get_blobs():
 | |
| 658 | +                                self.cas.add_object(path=blob_file.name, link_directly=True)
 | |
| 659 | +                        self.cas.set_ref(ref, root_digest)
 | |
| 660 | +                    except BlobNotFound:
 | |
| 661 | +                        element.info("Remote ({}) is missing blobs for {}".format(
 | |
| 662 | +                            remote.spec.url, element._get_brief_display_key()))
 | |
| 663 | +                        continue
 | |
| 664 | + | |
| 649 | 665 |                      if subdir:
 | 
| 650 | 666 |                          # Attempt to extract subdir into artifact extract dir if it already exists
 | 
| 651 | 667 |                          # without containing the subdir. If the respective artifact extract dir does not
 | 
| 652 | 668 |                          # exist a complete extraction will complete.
 | 
| 653 | 669 |                          self.extract(element, key, subdir)
 | 
| 670 | + | |
| 671 | +                    element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
 | |
| 654 | 672 |                      # no need to pull from additional remotes
 | 
| 655 | 673 |                      return True
 | 
| 656 | 674 |                  else:
 | 
| ... | ... | @@ -33,7 +33,7 @@ from .._protos.buildstream.v2 import buildstream_pb2 | 
| 33 | 33 |  from .. import utils
 | 
| 34 | 34 |  from .._exceptions import CASError
 | 
| 35 | 35 |  | 
| 36 | -from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
 | |
| 36 | +from .casremote import _CASBatchUpdate
 | |
| 37 | 37 |  | 
| 38 | 38 |  | 
| 39 | 39 |  # A CASCache manages a CAS repository as specified in the Remote Execution API.
 | 
| ... | ... | @@ -183,50 +183,6 @@ class CASCache(): | 
| 183 | 183 |  | 
| 184 | 184 |          return modified, removed, added
 | 
| 185 | 185 |  | 
| 186 | -    # pull():
 | |
| 187 | -    #
 | |
| 188 | -    # Pull a ref from a remote repository.
 | |
| 189 | -    #
 | |
| 190 | -    # Args:
 | |
| 191 | -    #     ref (str): The ref to pull
 | |
| 192 | -    #     remote (CASRemote): The remote repository to pull from
 | |
| 193 | -    #     progress (callable): The progress callback, if any
 | |
| 194 | -    #     subdir (str): The optional specific subdir to pull
 | |
| 195 | -    #     excluded_subdirs (list): The optional list of subdirs to not pull
 | |
| 196 | -    #
 | |
| 197 | -    # Returns:
 | |
| 198 | -    #   (bool): True if pull was successful, False if ref was not available
 | |
| 199 | -    #
 | |
| 200 | -    def pull(self, ref, remote, *, progress=None, subdir=None, excluded_subdirs=None):
 | |
| 201 | -        try:
 | |
| 202 | -            remote.init()
 | |
| 203 | - | |
| 204 | -            request = buildstream_pb2.GetReferenceRequest()
 | |
| 205 | -            request.key = ref
 | |
| 206 | -            response = remote.ref_storage.GetReference(request)
 | |
| 207 | - | |
| 208 | -            tree = remote_execution_pb2.Digest()
 | |
| 209 | -            tree.hash = response.digest.hash
 | |
| 210 | -            tree.size_bytes = response.digest.size_bytes
 | |
| 211 | - | |
| 212 | -            # Check if the element artifact is present, if so just fetch the subdir.
 | |
| 213 | -            if subdir and os.path.exists(self.objpath(tree)):
 | |
| 214 | -                self._fetch_subdir(remote, tree, subdir)
 | |
| 215 | -            else:
 | |
| 216 | -                # Fetch artifact, excluded_subdirs determined in pullqueue
 | |
| 217 | -                self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
 | |
| 218 | - | |
| 219 | -            self.set_ref(ref, tree)
 | |
| 220 | - | |
| 221 | -            return True
 | |
| 222 | -        except grpc.RpcError as e:
 | |
| 223 | -            if e.code() != grpc.StatusCode.NOT_FOUND:
 | |
| 224 | -                raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
 | |
| 225 | -            else:
 | |
| 226 | -                return False
 | |
| 227 | -        except BlobNotFound as e:
 | |
| 228 | -            return False
 | |
| 229 | - | |
| 230 | 186 |      # pull_tree():
 | 
| 231 | 187 |      #
 | 
| 232 | 188 |      # Pull a single Tree rather than a ref.
 | 
| ... | ... | @@ -591,6 +547,16 @@ class CASCache(): | 
| 591 | 547 |          reachable = set()
 | 
| 592 | 548 |          self._reachable_refs_dir(reachable, tree, update_mtime=True)
 | 
| 593 | 549 |  | 
| 550 | +    # Check to see if a blob is in the local CAS
 | |
| 551 | +    # return None if not
 | |
| 552 | +    def check_blob(self, digest):
 | |
| 553 | +        objpath = self.objpath(digest)
 | |
| 554 | +        if os.path.exists(objpath):
 | |
| 555 | +            # already in local repository
 | |
| 556 | +            return objpath
 | |
| 557 | +        else:
 | |
| 558 | +            return None
 | |
| 559 | + | |
| 594 | 560 |      ################################################
 | 
| 595 | 561 |      #             Local Private Methods            #
 | 
| 596 | 562 |      ################################################
 | 
| ... | ... | @@ -805,103 +771,6 @@ class CASCache(): | 
| 805 | 771 |  | 
| 806 | 772 |          return objpath
 | 
| 807 | 773 |  | 
| 808 | -    def _batch_download_complete(self, batch):
 | |
| 809 | -        for digest, data in batch.send():
 | |
| 810 | -            with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
 | |
| 811 | -                f.write(data)
 | |
| 812 | -                f.flush()
 | |
| 813 | - | |
| 814 | -                added_digest = self.add_object(path=f.name, link_directly=True)
 | |
| 815 | -                assert added_digest.hash == digest.hash
 | |
| 816 | - | |
| 817 | -    # Helper function for _fetch_directory().
 | |
| 818 | -    def _fetch_directory_batch(self, remote, batch, fetch_queue, fetch_next_queue):
 | |
| 819 | -        self._batch_download_complete(batch)
 | |
| 820 | - | |
| 821 | -        # All previously scheduled directories are now locally available,
 | |
| 822 | -        # move them to the processing queue.
 | |
| 823 | -        fetch_queue.extend(fetch_next_queue)
 | |
| 824 | -        fetch_next_queue.clear()
 | |
| 825 | -        return _CASBatchRead(remote)
 | |
| 826 | - | |
| 827 | -    # Helper function for _fetch_directory().
 | |
| 828 | -    def _fetch_directory_node(self, remote, digest, batch, fetch_queue, fetch_next_queue, *, recursive=False):
 | |
| 829 | -        in_local_cache = os.path.exists(self.objpath(digest))
 | |
| 830 | - | |
| 831 | -        if in_local_cache:
 | |
| 832 | -            # Skip download, already in local cache.
 | |
| 833 | -            pass
 | |
| 834 | -        elif (digest.size_bytes >= remote.max_batch_total_size_bytes or
 | |
| 835 | -              not remote.batch_read_supported):
 | |
| 836 | -            # Too large for batch request, download in independent request.
 | |
| 837 | -            self._ensure_blob(remote, digest)
 | |
| 838 | -            in_local_cache = True
 | |
| 839 | -        else:
 | |
| 840 | -            if not batch.add(digest):
 | |
| 841 | -                # Not enough space left in batch request.
 | |
| 842 | -                # Complete pending batch first.
 | |
| 843 | -                batch = self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
 | |
| 844 | -                batch.add(digest)
 | |
| 845 | - | |
| 846 | -        if recursive:
 | |
| 847 | -            if in_local_cache:
 | |
| 848 | -                # Add directory to processing queue.
 | |
| 849 | -                fetch_queue.append(digest)
 | |
| 850 | -            else:
 | |
| 851 | -                # Directory will be available after completing pending batch.
 | |
| 852 | -                # Add directory to deferred processing queue.
 | |
| 853 | -                fetch_next_queue.append(digest)
 | |
| 854 | - | |
| 855 | -        return batch
 | |
| 856 | - | |
| 857 | -    # _fetch_directory():
 | |
| 858 | -    #
 | |
| 859 | -    # Fetches remote directory and adds it to content addressable store.
 | |
| 860 | -    #
 | |
| 861 | -    # Fetches files, symbolic links and recursively other directories in
 | |
| 862 | -    # the remote directory and adds them to the content addressable
 | |
| 863 | -    # store.
 | |
| 864 | -    #
 | |
| 865 | -    # Args:
 | |
| 866 | -    #     remote (Remote): The remote to use.
 | |
| 867 | -    #     dir_digest (Digest): Digest object for the directory to fetch.
 | |
| 868 | -    #     excluded_subdirs (list): The optional list of subdirs to not fetch
 | |
| 869 | -    #
 | |
| 870 | -    def _fetch_directory(self, remote, dir_digest, *, excluded_subdirs=None):
 | |
| 871 | -        fetch_queue = [dir_digest]
 | |
| 872 | -        fetch_next_queue = []
 | |
| 873 | -        batch = _CASBatchRead(remote)
 | |
| 874 | -        if not excluded_subdirs:
 | |
| 875 | -            excluded_subdirs = []
 | |
| 876 | - | |
| 877 | -        while len(fetch_queue) + len(fetch_next_queue) > 0:
 | |
| 878 | -            if not fetch_queue:
 | |
| 879 | -                batch = self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
 | |
| 880 | - | |
| 881 | -            dir_digest = fetch_queue.pop(0)
 | |
| 882 | - | |
| 883 | -            objpath = self._ensure_blob(remote, dir_digest)
 | |
| 884 | - | |
| 885 | -            directory = remote_execution_pb2.Directory()
 | |
| 886 | -            with open(objpath, 'rb') as f:
 | |
| 887 | -                directory.ParseFromString(f.read())
 | |
| 888 | - | |
| 889 | -            for dirnode in directory.directories:
 | |
| 890 | -                if dirnode.name not in excluded_subdirs:
 | |
| 891 | -                    batch = self._fetch_directory_node(remote, dirnode.digest, batch,
 | |
| 892 | -                                                       fetch_queue, fetch_next_queue, recursive=True)
 | |
| 893 | - | |
| 894 | -            for filenode in directory.files:
 | |
| 895 | -                batch = self._fetch_directory_node(remote, filenode.digest, batch,
 | |
| 896 | -                                                   fetch_queue, fetch_next_queue)
 | |
| 897 | - | |
| 898 | -        # Fetch final batch
 | |
| 899 | -        self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
 | |
| 900 | - | |
| 901 | -    def _fetch_subdir(self, remote, tree, subdir):
 | |
| 902 | -        subdirdigest = self._get_subdir(tree, subdir)
 | |
| 903 | -        self._fetch_directory(remote, subdirdigest)
 | |
| 904 | - | |
| 905 | 774 |      def _fetch_tree(self, remote, digest):
 | 
| 906 | 775 |          # download but do not store the Tree object
 | 
| 907 | 776 |          with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
 | 
| ... | ... | @@ -3,6 +3,7 @@ import io | 
| 3 | 3 |  import os
 | 
| 4 | 4 |  import multiprocessing
 | 
| 5 | 5 |  import signal
 | 
| 6 | +import tempfile
 | |
| 6 | 7 |  from urllib.parse import urlparse
 | 
| 7 | 8 |  import uuid
 | 
| 8 | 9 |  | 
| ... | ... | @@ -77,7 +78,7 @@ class BlobNotFound(CASError): | 
| 77 | 78 |  # Represents a single remote CAS cache.
 | 
| 78 | 79 |  #
 | 
| 79 | 80 |  class CASRemote():
 | 
| 80 | -    def __init__(self, spec):
 | |
| 81 | +    def __init__(self, spec, tmpdir):
 | |
| 81 | 82 |          self.spec = spec
 | 
| 82 | 83 |          self._initialized = False
 | 
| 83 | 84 |          self.channel = None
 | 
| ... | ... | @@ -89,6 +90,11 @@ class CASRemote(): | 
| 89 | 90 |          self.capabilities = None
 | 
| 90 | 91 |          self.max_batch_total_size_bytes = None
 | 
| 91 | 92 |  | 
| 93 | +        self.tmpdir = tmpdir
 | |
| 94 | +        os.makedirs(tmpdir, exist_ok=True)
 | |
| 95 | + | |
| 96 | +        self.__tmp_downloads = []  # files in the tmpdir waiting to be added to local caches
 | |
| 97 | + | |
| 92 | 98 |      def init(self):
 | 
| 93 | 99 |          if not self._initialized:
 | 
| 94 | 100 |              url = urlparse(self.spec.url)
 | 
| ... | ... | @@ -170,11 +176,11 @@ class CASRemote(): | 
| 170 | 176 |      # in the main BuildStream process
 | 
| 171 | 177 |      # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
 | 
| 172 | 178 |      @classmethod
 | 
| 173 | -    def check_remote(cls, remote_spec, q):
 | |
| 179 | +    def check_remote(cls, remote_spec, tmpdir, q):
 | |
| 174 | 180 |  | 
| 175 | 181 |          def __check_remote():
 | 
| 176 | 182 |              try:
 | 
| 177 | -                remote = cls(remote_spec)
 | |
| 183 | +                remote = cls(remote_spec, tmpdir)
 | |
| 178 | 184 |                  remote.init()
 | 
| 179 | 185 |  | 
| 180 | 186 |                  request = buildstream_pb2.StatusRequest()
 | 
| ... | ... | @@ -252,6 +258,70 @@ class CASRemote(): | 
| 252 | 258 |  | 
| 253 | 259 |          return message_digest
 | 
| 254 | 260 |  | 
| 261 | +    # get_reference():
 | |
| 262 | +    #
 | |
| 263 | +    # Args:
 | |
| 264 | +    #    ref (str): The ref to request
 | |
| 265 | +    #
 | |
| 266 | +    def get_reference(self, ref):
 | |
| 267 | +        try:
 | |
| 268 | +            self.init()
 | |
| 269 | + | |
| 270 | +            request = buildstream_pb2.GetReferenceRequest()
 | |
| 271 | +            request.key = ref
 | |
| 272 | +            return self.ref_storage.GetReference(request).digest
 | |
| 273 | +        except grpc.RpcError as e:
 | |
| 274 | +            if e.code() != grpc.StatusCode.NOT_FOUND:
 | |
| 275 | +                raise CASError("Failed to find ref {}: {}".format(ref, e)) from e
 | |
| 276 | +            else:
 | |
| 277 | +                return None
 | |
| 278 | + | |
| 279 | +    # yield_blob_digests():
 | |
| 280 | +    #
 | |
| 281 | +    # Iterate over blobs digests from a reference
 | |
| 282 | +    #
 | |
| 283 | +    # Args:
 | |
| 284 | +    #     root_digest (str): The root_digest to get a tree of
 | |
| 285 | +    #     progress (callable): The progress callback, if any
 | |
| 286 | +    #     subdir (str): The optional specific subdir to pull
 | |
| 287 | +    #     excluded_subdirs (list): The optional list of subdirs to not pull
 | |
| 288 | +    #
 | |
| 289 | +    # Returns:
 | |
| 290 | +    #   (iter): True if pull was successful, False if ref was not available
 | |
| 291 | +    #
 | |
| 292 | +    def yield_blob_digests(self, root_digest, *, progress=None, subdir=None, excluded_subdirs=[]):
 | |
| 293 | +        self.init()
 | |
| 294 | + | |
| 295 | +        # TODO add subdir stuff
 | |
| 296 | +        # Fetch artifact, excluded_subdirs determined in pullqueue
 | |
| 297 | +        yield from self._yield_directory_digests(root_digest, excluded_subdirs=excluded_subdirs)
 | |
| 298 | + | |
| 299 | +    # request_blob():
 | |
| 300 | +    #
 | |
| 301 | +    # Request blob and returns path to tmpdir location
 | |
| 302 | +    #
 | |
| 303 | +    # Args:
 | |
| 304 | +    #    digest (Digest): digest of the requested blob
 | |
| 305 | +    #    path (str): tmpdir locations of downloaded blobs
 | |
| 306 | +    #
 | |
| 307 | +    def request_blob(self, digest):
 | |
| 308 | +        # TODO expand for adding to batches some other logic
 | |
| 309 | +        f = tempfile.NamedTemporaryFile(dir=self.tmpdir)
 | |
| 310 | +        self._fetch_blob(digest, f)
 | |
| 311 | +        self.__tmp_downloads.append(f)
 | |
| 312 | +        return f.name
 | |
| 313 | + | |
| 314 | +    # get_blobs():
 | |
| 315 | +    #
 | |
| 316 | +    # Yield over downloaded blobs in the tmp file locations, causing the files
 | |
| 317 | +    # to be deleted once they go out of scope.
 | |
| 318 | +    #
 | |
| 319 | +    # Returns:
 | |
| 320 | +    #    iterator over NamedTemporaryFile
 | |
| 321 | +    def get_blobs(self):
 | |
| 322 | +        while self.__tmp_downloads:
 | |
| 323 | +            yield self.__tmp_downloads.pop()
 | |
| 324 | + | |
| 255 | 325 |      ################################################
 | 
| 256 | 326 |      #             Local Private Methods            #
 | 
| 257 | 327 |      ################################################
 | 
| ... | ... | @@ -266,6 +336,36 @@ class CASRemote(): | 
| 266 | 336 |  | 
| 267 | 337 |          assert digest.size_bytes == os.fstat(stream.fileno()).st_size
 | 
| 268 | 338 |  | 
| 339 | +    # _yield_directory_digests():
 | |
| 340 | +    #
 | |
| 341 | +    # Fetches remote directory and adds it to content addressable store.
 | |
| 342 | +    #
 | |
| 343 | +    # Fetches files, symbolic links and recursively other directories in
 | |
| 344 | +    # the remote directory and adds them to the content addressable
 | |
| 345 | +    # store.
 | |
| 346 | +    #
 | |
| 347 | +    # Args:
 | |
| 348 | +    #     dir_digest (Digest): Digest object for the directory to fetch.
 | |
| 349 | +    #     excluded_subdirs (list): The optional list of subdirs to not fetch
 | |
| 350 | +    #
 | |
| 351 | +    def _yield_directory_digests(self, dir_digest, *, excluded_subdirs=[]):
 | |
| 352 | + | |
| 353 | +        objpath = self.request_blob(dir_digest)
 | |
| 354 | + | |
| 355 | +        directory = remote_execution_pb2.Directory()
 | |
| 356 | + | |
| 357 | +        with open(objpath, 'rb') as f:
 | |
| 358 | +            directory.ParseFromString(f.read())
 | |
| 359 | + | |
| 360 | +        yield dir_digest
 | |
| 361 | +        for filenode in directory.files:
 | |
| 362 | +            yield filenode.digest
 | |
| 363 | + | |
| 364 | +        for dirnode in directory.directories:
 | |
| 365 | +            if dirnode.name not in excluded_subdirs:
 | |
| 366 | +                yield dirnode.digest
 | |
| 367 | +                yield from self._yield_directory_digests(dirnode.digest)
 | |
| 368 | + | |
| 269 | 369 |      def _send_blob(self, digest, stream, u_uid=uuid.uuid4()):
 | 
| 270 | 370 |          resource_name = '/'.join(['uploads', str(u_uid), 'blobs',
 | 
| 271 | 371 |                                    digest.hash, str(digest.size_bytes)])
 | 
| ... | ... | @@ -182,10 +182,11 @@ class Context(): | 
| 182 | 182 |          _yaml.node_validate(defaults, [
 | 
| 183 | 183 |              'sourcedir', 'builddir', 'artifactdir', 'logdir',
 | 
| 184 | 184 |              'scheduler', 'artifacts', 'logging', 'projects',
 | 
| 185 | -            'cache', 'prompt', 'workspacedir',
 | |
| 185 | +            'cache', 'prompt', 'workspacedir', 'tmpdir'
 | |
| 186 | 186 |          ])
 | 
| 187 | 187 |  | 
| 188 | -        for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
 | |
| 188 | +        for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir',
 | |
| 189 | +                          'tmpdir', 'workspacedir']:
 | |
| 189 | 190 |              # Allow the ~ tilde expansion and any environment variables in
 | 
| 190 | 191 |              # path specification in the config files.
 | 
| 191 | 192 |              #
 | 
| ... | ... | @@ -19,6 +19,9 @@ builddir: ${XDG_CACHE_HOME}/buildstream/build | 
| 19 | 19 |  # Location to store local binary artifacts
 | 
| 20 | 20 |  artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
 | 
| 21 | 21 |  | 
| 22 | +# tmp directory, used by casremote
 | |
| 23 | +tmpdir: ${XDG_CACHE_HOME}/buildstream/tmp
 | |
| 24 | + | |
| 22 | 25 |  # Location to store build logs
 | 
| 23 | 26 |  logdir: ${XDG_CACHE_HOME}/buildstream/logs
 | 
| 24 | 27 |  | 
| ... | ... | @@ -495,7 +495,8 @@ def cli_integration(tmpdir, integration_cache): | 
| 495 | 495 |      # to avoid downloading the huge base-sdk repeatedly
 | 
| 496 | 496 |      fixture.configure({
 | 
| 497 | 497 |          'sourcedir': os.path.join(integration_cache, 'sources'),
 | 
| 498 | -        'artifactdir': os.path.join(integration_cache, 'artifacts')
 | |
| 498 | +        'artifactdir': os.path.join(integration_cache, 'artifacts'),
 | |
| 499 | +        'tmpdir': os.path.join(integration_cache, 'tmp')
 | |
| 499 | 500 |      })
 | 
| 500 | 501 |  | 
| 501 | 502 |      return fixture
 | 
| ... | ... | @@ -539,6 +540,8 @@ def configured(directory, config=None): | 
| 539 | 540 |          config['builddir'] = os.path.join(directory, 'build')
 | 
| 540 | 541 |      if not config.get('artifactdir', False):
 | 
| 541 | 542 |          config['artifactdir'] = os.path.join(directory, 'artifacts')
 | 
| 543 | +    if not config.get('tmpdir', False):
 | |
| 544 | +        config['tmpdir'] = os.path.join(directory, 'tmp')
 | |
| 542 | 545 |      if not config.get('logdir', False):
 | 
| 543 | 546 |          config['logdir'] = os.path.join(directory, 'logs')
 | 
| 544 | 547 |  | 
