[Notes] [Git][BuildStream/buildstream][mac_fixes] 26 commits: element.py: Add __use_remote_execution() helper method



Title: GitLab

Jürg Billeter pushed to branch mac_fixes at BuildStream / buildstream

Commits:

24 changed files:

Changes:

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -54,7 +54,6 @@ _MAX_PAYLOAD_BYTES = 1024 * 1024
    54 54
     #
    
    55 55
     # Args:
    
    56 56
     #     context (Context): The BuildStream context
    
    57
    -#     enable_push (bool): Whether pushing is allowed by the platform
    
    58 57
     #
    
    59 58
     # Pushing is explicitly disabled by the platform in some cases,
    
    60 59
     # like when we are falling back to functioning without using
    
    ... ... @@ -62,7 +61,7 @@ _MAX_PAYLOAD_BYTES = 1024 * 1024
    62 61
     #
    
    63 62
     class CASCache(ArtifactCache):
    
    64 63
     
    
    65
    -    def __init__(self, context, *, enable_push=True):
    
    64
    +    def __init__(self, context):
    
    66 65
             super().__init__(context)
    
    67 66
     
    
    68 67
             self.casdir = os.path.join(context.artifactdir, 'cas')
    
    ... ... @@ -71,8 +70,6 @@ class CASCache(ArtifactCache):
    71 70
     
    
    72 71
             self._calculate_cache_quota()
    
    73 72
     
    
    74
    -        self._enable_push = enable_push
    
    75
    -
    
    76 73
             # Per-project list of _CASRemote instances.
    
    77 74
             self._remotes = {}
    
    78 75
     
    
    ... ... @@ -214,7 +211,7 @@ class CASCache(ArtifactCache):
    214 211
                 return bool(remotes_for_project)
    
    215 212
     
    
    216 213
         def has_push_remotes(self, *, element=None):
    
    217
    -        if not self._has_push_remotes or not self._enable_push:
    
    214
    +        if not self._has_push_remotes:
    
    218 215
                 # No project has push remotes
    
    219 216
                 return False
    
    220 217
             elif element is None:
    

  • buildstream/_artifactcache/casserver.py
    ... ... @@ -35,8 +35,6 @@ from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
    35 35
     from .._exceptions import ArtifactError
    
    36 36
     from .._context import Context
    
    37 37
     
    
    38
    -from .cascache import CASCache
    
    39
    -
    
    40 38
     
    
    41 39
     # The default limit for gRPC messages is 4 MiB.
    
    42 40
     # Limit payload to 1 MiB to leave sufficient headroom for metadata.
    
    ... ... @@ -60,7 +58,7 @@ def create_server(repo, *, enable_push):
    60 58
         context = Context()
    
    61 59
         context.artifactdir = os.path.abspath(repo)
    
    62 60
     
    
    63
    -    artifactcache = CASCache(context)
    
    61
    +    artifactcache = context.artifactcache
    
    64 62
     
    
    65 63
         # Use max_workers default from Python 3.5+
    
    66 64
         max_workers = (os.cpu_count() or 1) * 5
    

  • buildstream/_context.py
    ... ... @@ -30,6 +30,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
    30 30
     from ._message import Message, MessageType
    
    31 31
     from ._profile import Topics, profile_start, profile_end
    
    32 32
     from ._artifactcache import ArtifactCache
    
    33
    +from ._artifactcache.cascache import CASCache
    
    33 34
     from ._workspaces import Workspaces
    
    34 35
     from .plugin import _plugin_lookup
    
    35 36
     
    
    ... ... @@ -113,6 +114,7 @@ class Context():
    113 114
             self._cache_key = None
    
    114 115
             self._message_handler = None
    
    115 116
             self._message_depth = deque()
    
    117
    +        self._artifactcache = None
    
    116 118
             self._projects = []
    
    117 119
             self._project_overrides = {}
    
    118 120
             self._workspaces = None
    
    ... ... @@ -227,6 +229,13 @@ class Context():
    227 229
                                 "{}: on-error should be one of: {}".format(
    
    228 230
                                     provenance, ", ".join(valid_actions)))
    
    229 231
     
    
    232
    +    @property
    
    233
    +    def artifactcache(self):
    
    234
    +        if not self._artifactcache:
    
    235
    +            self._artifactcache = CASCache(self)
    
    236
    +
    
    237
    +        return self._artifactcache
    
    238
    +
    
    230 239
         # add_project():
    
    231 240
         #
    
    232 241
         # Add a project to the context.
    

  • buildstream/_frontend/app.py
    ... ... @@ -115,14 +115,6 @@ class App():
    115 115
             else:
    
    116 116
                 self.colors = False
    
    117 117
     
    
    118
    -        # Increase the soft limit for open file descriptors to the maximum.
    
    119
    -        # SafeHardlinks FUSE needs to hold file descriptors for all processes in the sandbox.
    
    120
    -        # Avoid hitting the limit too quickly.
    
    121
    -        limits = resource.getrlimit(resource.RLIMIT_NOFILE)
    
    122
    -        if limits[0] != limits[1]:
    
    123
    -            # Set soft limit to hard limit
    
    124
    -            resource.setrlimit(resource.RLIMIT_NOFILE, (limits[1], limits[1]))
    
    125
    -
    
    126 118
         # create()
    
    127 119
         #
    
    128 120
         # Should be used instead of the regular constructor.
    
    ... ... @@ -198,7 +190,7 @@ class App():
    198 190
                 if option_value is not None:
    
    199 191
                     setattr(self.context, context_attr, option_value)
    
    200 192
             try:
    
    201
    -            Platform.create_instance(self.context)
    
    193
    +            Platform.get_platform()
    
    202 194
             except BstError as e:
    
    203 195
                 self._error_exit(e, "Error instantiating platform")
    
    204 196
     
    

  • buildstream/_loader/loader.py
    ... ... @@ -28,7 +28,6 @@ from .. import Consistency
    28 28
     from .. import _yaml
    
    29 29
     from ..element import Element
    
    30 30
     from .._profile import Topics, profile_start, profile_end
    
    31
    -from .._platform import Platform
    
    32 31
     from .._includes import Includes
    
    33 32
     
    
    34 33
     from .types import Symbol, Dependency
    
    ... ... @@ -518,8 +517,7 @@ class Loader():
    518 517
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    519 518
                                 "{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
    
    520 519
     
    
    521
    -        platform = Platform.get_platform()
    
    522
    -        element = Element._new_from_meta(meta_element, platform.artifactcache)
    
    520
    +        element = Element._new_from_meta(meta_element, self._context.artifactcache)
    
    523 521
             element._preflight()
    
    524 522
     
    
    525 523
             sources = list(element.sources())
    

  • buildstream/_platform/darwin.py
    1
    +#
    
    2
    +#  Copyright (C) 2017 Codethink Limited
    
    3
    +#  Copyright (C) 2018 Bloomberg Finance LP
    
    4
    +#
    
    5
    +#  This program is free software; you can redistribute it and/or
    
    6
    +#  modify it under the terms of the GNU Lesser General Public
    
    7
    +#  License as published by the Free Software Foundation; either
    
    8
    +#  version 2 of the License, or (at your option) any later version.
    
    9
    +#
    
    10
    +#  This library is distributed in the hope that it will be useful,
    
    11
    +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
    
    12
    +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
    
    13
    +#  Lesser General Public License for more details.
    
    14
    +#
    
    15
    +#  You should have received a copy of the GNU Lesser General Public
    
    16
    +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    17
    +
    
    18
    +import os
    
    19
    +import resource
    
    20
    +
    
    21
    +from .._exceptions import PlatformError
    
    22
    +from ..sandbox import SandboxDummy
    
    23
    +
    
    24
    +from . import Platform
    
    25
    +
    
    26
    +
    
    27
    +class Darwin(Platform):
    
    28
    +
    
    29
    +    # This value comes from OPEN_MAX in syslimits.h
    
    30
    +    OPEN_MAX = 10240
    
    31
    +
    
    32
    +    def __init__(self, context):
    
    33
    +
    
    34
    +        super().__init__(context)
    
    35
    +
    
    36
    +    def create_sandbox(self, *args, **kwargs):
    
    37
    +        return SandboxDummy(*args, **kwargs)
    
    38
    +
    
    39
    +    def check_sandbox_config(self, config):
    
    40
    +        # Accept all sandbox configs as it's irrelevant with the dummy sandbox (no Sandbox.run).
    
    41
    +        return True
    
    42
    +
    
    43
    +    def get_cpu_count(self, cap=None):
    
    44
    +        if cap < os.cpu_count():
    
    45
    +            return cap
    
    46
    +        else:
    
    47
    +            return os.cpu_count()
    
    48
    +
    
    49
    +    def set_resource_limits(self, soft_limit=OPEN_MAX, hard_limit=None):
    
    50
    +        super().set_resource_limits(soft_limit)

  • buildstream/_platform/linux.py
    ... ... @@ -17,42 +17,62 @@
    17 17
     #  Authors:
    
    18 18
     #        Tristan Maat <tristan maat codethink co uk>
    
    19 19
     
    
    20
    +import os
    
    20 21
     import subprocess
    
    21 22
     
    
    22 23
     from .. import _site
    
    23 24
     from .. import utils
    
    24
    -from .._artifactcache.cascache import CASCache
    
    25 25
     from .._message import Message, MessageType
    
    26
    -from ..sandbox import SandboxBwrap
    
    26
    +from ..sandbox import SandboxDummy
    
    27 27
     
    
    28 28
     from . import Platform
    
    29 29
     
    
    30 30
     
    
    31 31
     class Linux(Platform):
    
    32 32
     
    
    33
    -    def __init__(self, context):
    
    33
    +    def __init__(self):
    
    34 34
     
    
    35
    -        super().__init__(context)
    
    35
    +        super().__init__()
    
    36
    +
    
    37
    +        self._uid = os.geteuid()
    
    38
    +        self._gid = os.getegid()
    
    36 39
     
    
    37 40
             self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
    
    38
    -        self._user_ns_available = self._check_user_ns_available(context)
    
    39
    -        self._artifact_cache = CASCache(context, enable_push=self._user_ns_available)
    
    40 41
     
    
    41
    -    @property
    
    42
    -    def artifactcache(self):
    
    43
    -        return self._artifact_cache
    
    42
    +        if self._local_sandbox_available():
    
    43
    +            self._user_ns_available = self._check_user_ns_available()
    
    44
    +        else:
    
    45
    +            self._user_ns_available = False
    
    44 46
     
    
    45 47
         def create_sandbox(self, *args, **kwargs):
    
    46
    -        # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
    
    47
    -        kwargs['user_ns_available'] = self._user_ns_available
    
    48
    -        kwargs['die_with_parent_available'] = self._die_with_parent_available
    
    49
    -        return SandboxBwrap(*args, **kwargs)
    
    48
    +        if not self._local_sandbox_available():
    
    49
    +            return SandboxDummy(*args, **kwargs)
    
    50
    +        else:
    
    51
    +            from ..sandbox._sandboxbwrap import SandboxBwrap
    
    52
    +            # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
    
    53
    +            kwargs['user_ns_available'] = self._user_ns_available
    
    54
    +            kwargs['die_with_parent_available'] = self._die_with_parent_available
    
    55
    +            return SandboxBwrap(*args, **kwargs)
    
    56
    +
    
    57
    +    def check_sandbox_config(self, config):
    
    58
    +        if self._user_ns_available:
    
    59
    +            # User namespace support allows arbitrary build UID/GID settings.
    
    60
    +            return True
    
    61
    +        else:
    
    62
    +            # Without user namespace support, the UID/GID in the sandbox
    
    63
    +            # will match the host UID/GID.
    
    64
    +            return config.build_uid == self._uid and config.build_gid == self._gid
    
    50 65
     
    
    51 66
         ################################################
    
    52 67
         #              Private Methods                 #
    
    53 68
         ################################################
    
    54
    -    def _check_user_ns_available(self, context):
    
    69
    +    def _local_sandbox_available(self):
    
    70
    +        try:
    
    71
    +            return os.path.exists(utils.get_host_tool('bwrap')) and os.path.exists('/dev/fuse')
    
    72
    +        except utils.ProgramNotFoundError:
    
    73
    +            return False
    
    55 74
     
    
    75
    +    def _check_user_ns_available(self):
    
    56 76
             # Here, lets check if bwrap is able to create user namespaces,
    
    57 77
             # issue a warning if it's not available, and save the state
    
    58 78
             # locally so that we can inform the sandbox to not try it
    
    ... ... @@ -75,9 +95,4 @@ class Linux(Platform):
    75 95
                 return True
    
    76 96
     
    
    77 97
             else:
    
    78
    -            context.message(
    
    79
    -                Message(None, MessageType.WARN,
    
    80
    -                        "Unable to create user namespaces with bubblewrap, resorting to fallback",
    
    81
    -                        detail="Some builds may not function due to lack of uid / gid 0, " +
    
    82
    -                        "artifacts created will not be trusted for push purposes."))
    
    83 98
                 return False

  • buildstream/_platform/platform.py
    ... ... @@ -19,6 +19,7 @@
    19 19
     
    
    20 20
     import os
    
    21 21
     import sys
    
    22
    +import resource
    
    22 23
     
    
    23 24
     from .._exceptions import PlatformError, ImplError
    
    24 25
     
    
    ... ... @@ -29,50 +30,44 @@ class Platform():
    29 30
         # Platform()
    
    30 31
         #
    
    31 32
         # A class to manage platform-specific details. Currently holds the
    
    32
    -    # sandbox factory, the artifact cache and staging operations, as
    
    33
    -    # well as platform helpers.
    
    33
    +    # sandbox factory as well as platform helpers.
    
    34 34
         #
    
    35
    -    # Args:
    
    36
    -    #     context (context): The project context
    
    37
    -    #
    
    38
    -    def __init__(self, context):
    
    39
    -        self.context = context
    
    35
    +    def __init__(self):
    
    36
    +        self.set_resource_limits()
    
    40 37
     
    
    41 38
         @classmethod
    
    42
    -    def create_instance(cls, *args, **kwargs):
    
    43
    -        if sys.platform.startswith('linux'):
    
    44
    -            backend = 'linux'
    
    45
    -        else:
    
    46
    -            backend = 'unix'
    
    47
    -
    
    39
    +    def _create_instance(cls):
    
    48 40
             # Meant for testing purposes and therefore hidden in the
    
    49 41
             # deepest corners of the source code. Try not to abuse this,
    
    50 42
             # please?
    
    51 43
             if os.getenv('BST_FORCE_BACKEND'):
    
    52 44
                 backend = os.getenv('BST_FORCE_BACKEND')
    
    45
    +        elif sys.platform.startswith('linux'):
    
    46
    +            backend = 'linux'
    
    47
    +        elif sys.platform.startswith('darwin'):
    
    48
    +            backend = 'darwin'
    
    49
    +        else:
    
    50
    +            backend = 'unix'
    
    53 51
     
    
    54 52
             if backend == 'linux':
    
    55 53
                 from .linux import Linux as PlatformImpl
    
    54
    +        elif backend == 'darwin':
    
    55
    +            from .darwin import Darwin as PlatformImpl
    
    56 56
             elif backend == 'unix':
    
    57 57
                 from .unix import Unix as PlatformImpl
    
    58 58
             else:
    
    59 59
                 raise PlatformError("No such platform: '{}'".format(backend))
    
    60 60
     
    
    61
    -        cls._instance = PlatformImpl(*args, **kwargs)
    
    61
    +        cls._instance = PlatformImpl()
    
    62 62
     
    
    63 63
         @classmethod
    
    64 64
         def get_platform(cls):
    
    65 65
             if not cls._instance:
    
    66
    -            raise PlatformError("Platform needs to be initialized first")
    
    66
    +            cls._create_instance()
    
    67 67
             return cls._instance
    
    68 68
     
    
    69
    -    ##################################################################
    
    70
    -    #                       Platform properties                      #
    
    71
    -    ##################################################################
    
    72
    -    @property
    
    73
    -    def artifactcache(self):
    
    74
    -        raise ImplError("Platform {platform} does not implement an artifactcache"
    
    75
    -                        .format(platform=type(self).__name__))
    
    69
    +    def get_cpu_count(self, cap=None):
    
    70
    +        return min(len(os.sched_getaffinity(0)), cap)
    
    76 71
     
    
    77 72
         ##################################################################
    
    78 73
         #                        Sandbox functions                       #
    
    ... ... @@ -92,3 +87,19 @@ class Platform():
    92 87
         def create_sandbox(self, *args, **kwargs):
    
    93 88
             raise ImplError("Platform {platform} does not implement create_sandbox()"
    
    94 89
                             .format(platform=type(self).__name__))
    
    90
    +
    
    91
    +    def check_sandbox_config(self, config):
    
    92
    +        raise ImplError("Platform {platform} does not implement check_sandbox_config()"
    
    93
    +                        .format(platform=type(self).__name__))
    
    94
    +
    
    95
    +    def set_resource_limits(self, soft_limit=None, hard_limit=None):
    
    96
    +        # Need to set resources for _frontend/app.py as this is dependent on the platform
    
    97
    +        # SafeHardlinks FUSE needs to hold file descriptors for all processes in the sandbox.
    
    98
    +        # Avoid hitting the limit too quickly.
    
    99
    +        limits = resource.getrlimit(resource.RLIMIT_NOFILE)
    
    100
    +        if limits[0] != limits[1]:
    
    101
    +            if soft_limit is None:
    
    102
    +                soft_limit = limits[1]
    
    103
    +            if hard_limit is None:
    
    104
    +                hard_limit = limits[1]
    
    105
    +            resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))

  • buildstream/_platform/unix.py
    ... ... @@ -19,27 +19,29 @@
    19 19
     
    
    20 20
     import os
    
    21 21
     
    
    22
    -from .._artifactcache.cascache import CASCache
    
    23 22
     from .._exceptions import PlatformError
    
    24
    -from ..sandbox import SandboxChroot
    
    25 23
     
    
    26 24
     from . import Platform
    
    27 25
     
    
    28 26
     
    
    29 27
     class Unix(Platform):
    
    30 28
     
    
    31
    -    def __init__(self, context):
    
    29
    +    def __init__(self):
    
    32 30
     
    
    33
    -        super().__init__(context)
    
    34
    -        self._artifact_cache = CASCache(context)
    
    31
    +        super().__init__()
    
    32
    +
    
    33
    +        self._uid = os.geteuid()
    
    34
    +        self._gid = os.getegid()
    
    35 35
     
    
    36 36
             # Not necessarily 100% reliable, but we want to fail early.
    
    37
    -        if os.geteuid() != 0:
    
    37
    +        if self._uid != 0:
    
    38 38
                 raise PlatformError("Root privileges are required to run without bubblewrap.")
    
    39 39
     
    
    40
    -    @property
    
    41
    -    def artifactcache(self):
    
    42
    -        return self._artifact_cache
    
    43
    -
    
    44 40
         def create_sandbox(self, *args, **kwargs):
    
    41
    +        from ..sandbox._sandboxchroot import SandboxChroot
    
    45 42
             return SandboxChroot(*args, **kwargs)
    
    43
    +
    
    44
    +    def check_sandbox_config(self, config):
    
    45
    +        # With the chroot sandbox, the UID/GID in the sandbox
    
    46
    +        # will match the host UID/GID (typically 0/0).
    
    47
    +        return config.build_uid == self._uid and config.build_gid == self._gid

  • buildstream/_project.py
    ... ... @@ -38,6 +38,7 @@ from ._loader import Loader
    38 38
     from .element import Element
    
    39 39
     from ._message import Message, MessageType
    
    40 40
     from ._includes import Includes
    
    41
    +from ._platform import Platform
    
    41 42
     
    
    42 43
     
    
    43 44
     # Project Configuration file
    
    ... ... @@ -617,7 +618,8 @@ class Project():
    617 618
             # Based on some testing (mainly on AWS), maximum effective
    
    618 619
             # max-jobs value seems to be around 8-10 if we have enough cores
    
    619 620
             # users should set values based on workload and build infrastructure
    
    620
    -        output.base_variables['max-jobs'] = str(min(len(os.sched_getaffinity(0)), 8))
    
    621
    +        platform = Platform.get_platform()
    
    622
    +        output.base_variables['max-jobs'] = str(platform.get_cpu_count(8))
    
    621 623
     
    
    622 624
             # Export options into variables, if that was requested
    
    623 625
             output.options.export_variables(output.base_variables)
    

  • buildstream/_scheduler/jobs/cachesizejob.py
    ... ... @@ -17,7 +17,6 @@
    17 17
     #        Tristan Daniël Maat <tristan maat codethink co uk>
    
    18 18
     #
    
    19 19
     from .job import Job
    
    20
    -from ..._platform import Platform
    
    21 20
     
    
    22 21
     
    
    23 22
     class CacheSizeJob(Job):
    
    ... ... @@ -25,8 +24,8 @@ class CacheSizeJob(Job):
    25 24
             super().__init__(*args, **kwargs)
    
    26 25
             self._complete_cb = complete_cb
    
    27 26
     
    
    28
    -        platform = Platform.get_platform()
    
    29
    -        self._artifacts = platform.artifactcache
    
    27
    +        context = self._scheduler.context
    
    28
    +        self._artifacts = context.artifactcache
    
    30 29
     
    
    31 30
         def child_process(self):
    
    32 31
             return self._artifacts.compute_cache_size()
    

  • buildstream/_scheduler/jobs/cleanupjob.py
    ... ... @@ -17,15 +17,14 @@
    17 17
     #        Tristan Daniël Maat <tristan maat codethink co uk>
    
    18 18
     #
    
    19 19
     from .job import Job
    
    20
    -from ..._platform import Platform
    
    21 20
     
    
    22 21
     
    
    23 22
     class CleanupJob(Job):
    
    24 23
         def __init__(self, *args, **kwargs):
    
    25 24
             super().__init__(*args, **kwargs)
    
    26 25
     
    
    27
    -        platform = Platform.get_platform()
    
    28
    -        self._artifacts = platform.artifactcache
    
    26
    +        context = self._scheduler.context
    
    27
    +        self._artifacts = context.artifactcache
    
    29 28
     
    
    30 29
         def child_process(self):
    
    31 30
             return self._artifacts.clean()
    

  • buildstream/_scheduler/queues/buildqueue.py
    ... ... @@ -24,7 +24,6 @@ from . import Queue, QueueStatus
    24 24
     from ..jobs import ElementJob
    
    25 25
     from ..resources import ResourceType
    
    26 26
     from ..._message import MessageType
    
    27
    -from ..._platform import Platform
    
    28 27
     
    
    29 28
     
    
    30 29
     # A queue which assembles elements
    
    ... ... @@ -94,8 +93,8 @@ class BuildQueue(Queue):
    94 93
             # as returned from Element._assemble() to the estimated
    
    95 94
             # artifact cache size
    
    96 95
             #
    
    97
    -        platform = Platform.get_platform()
    
    98
    -        artifacts = platform.artifactcache
    
    96
    +        context = self._scheduler.context
    
    97
    +        artifacts = context.artifactcache
    
    99 98
     
    
    100 99
             artifacts.add_artifact_size(artifact_size)
    
    101 100
     
    

  • buildstream/_scheduler/scheduler.py
    ... ... @@ -29,7 +29,6 @@ from contextlib import contextmanager
    29 29
     # Local imports
    
    30 30
     from .resources import Resources, ResourceType
    
    31 31
     from .jobs import CacheSizeJob, CleanupJob
    
    32
    -from .._platform import Platform
    
    33 32
     
    
    34 33
     
    
    35 34
     # A decent return code for Scheduler.run()
    
    ... ... @@ -348,8 +347,8 @@ class Scheduler():
    348 347
         #       which will report the calculated cache size.
    
    349 348
         #
    
    350 349
         def _run_cleanup(self, cache_size):
    
    351
    -        platform = Platform.get_platform()
    
    352
    -        artifacts = platform.artifactcache
    
    350
    +        context = self.context
    
    351
    +        artifacts = context.artifactcache
    
    353 352
     
    
    354 353
             if not artifacts.has_quota_exceeded():
    
    355 354
                 return
    

  • buildstream/_stream.py
    ... ... @@ -32,7 +32,6 @@ from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
    32 32
     from ._message import Message, MessageType
    
    33 33
     from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
    
    34 34
     from ._pipeline import Pipeline, PipelineSelection
    
    35
    -from ._platform import Platform
    
    36 35
     from . import utils, _yaml, _site
    
    37 36
     from . import Scope, Consistency
    
    38 37
     
    
    ... ... @@ -71,8 +70,7 @@ class Stream():
    71 70
             #
    
    72 71
             # Private members
    
    73 72
             #
    
    74
    -        self._platform = Platform.get_platform()
    
    75
    -        self._artifacts = self._platform.artifactcache
    
    73
    +        self._artifacts = context.artifactcache
    
    76 74
             self._context = context
    
    77 75
             self._project = project
    
    78 76
             self._pipeline = Pipeline(context, project, self._artifacts)
    

  • buildstream/element.py
    ... ... @@ -246,15 +246,23 @@ class Element(Plugin):
    246 246
             self.__config = self.__extract_config(meta)
    
    247 247
             self._configure(self.__config)
    
    248 248
     
    
    249
    -        # Extract Sandbox config
    
    250
    -        self.__sandbox_config = self.__extract_sandbox_config(meta)
    
    251
    -
    
    252 249
             # Extract remote execution URL
    
    253 250
             if not self.__is_junction:
    
    254 251
                 self.__remote_execution_url = project.remote_execution_url
    
    255 252
             else:
    
    256 253
                 self.__remote_execution_url = None
    
    257 254
     
    
    255
    +        # Extract Sandbox config
    
    256
    +        self.__sandbox_config = self.__extract_sandbox_config(meta)
    
    257
    +
    
    258
    +        self.__sandbox_config_supported = True
    
    259
    +        if not self.__use_remote_execution():
    
    260
    +            platform = Platform.get_platform()
    
    261
    +            if not platform.check_sandbox_config(self.__sandbox_config):
    
    262
    +                # Local sandbox does not fully support specified sandbox config.
    
    263
    +                # This will taint the artifact, disable pushing.
    
    264
    +                self.__sandbox_config_supported = False
    
    265
    +
    
    258 266
         def __lt__(self, other):
    
    259 267
             return self.name < other.name
    
    260 268
     
    
    ... ... @@ -1521,6 +1529,11 @@ class Element(Plugin):
    1521 1529
             context = self._get_context()
    
    1522 1530
             with self._output_file() as output_file:
    
    1523 1531
     
    
    1532
    +            if not self.__sandbox_config_supported:
    
    1533
    +                self.warn("Sandbox configuration is not supported by the platform.",
    
    1534
    +                          detail="Falling back to UID {} GID {}. Artifact will not be pushed."
    
    1535
    +                          .format(self.__sandbox_config.build_uid, self.__sandbox_config.build_gid))
    
    1536
    +
    
    1524 1537
                 # Explicitly clean it up, keep the build dir around if exceptions are raised
    
    1525 1538
                 os.makedirs(context.builddir, exist_ok=True)
    
    1526 1539
                 rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
    
    ... ... @@ -2110,10 +2123,19 @@ class Element(Plugin):
    2110 2123
                 workspaced_dependencies = self.__get_artifact_metadata_workspaced_dependencies()
    
    2111 2124
     
    
    2112 2125
                 # Other conditions should be or-ed
    
    2113
    -            self.__tainted = workspaced or workspaced_dependencies
    
    2126
    +            self.__tainted = (workspaced or workspaced_dependencies or
    
    2127
    +                              not self.__sandbox_config_supported)
    
    2114 2128
     
    
    2115 2129
             return self.__tainted
    
    2116 2130
     
    
    2131
    +    # __use_remote_execution():
    
    2132
    +    #
    
    2133
    +    # Returns True if remote execution is configured and the element plugin
    
    2134
    +    # supports it.
    
    2135
    +    #
    
    2136
    +    def __use_remote_execution(self):
    
    2137
    +        return self.__remote_execution_url and self.BST_VIRTUAL_DIRECTORY
    
    2138
    +
    
    2117 2139
         # __sandbox():
    
    2118 2140
         #
    
    2119 2141
         # A context manager to prepare a Sandbox object at the specified directory,
    
    ... ... @@ -2135,9 +2157,7 @@ class Element(Plugin):
    2135 2157
             project = self._get_project()
    
    2136 2158
             platform = Platform.get_platform()
    
    2137 2159
     
    
    2138
    -        if (directory is not None and
    
    2139
    -            self.__remote_execution_url and
    
    2140
    -            self.BST_VIRTUAL_DIRECTORY):
    
    2160
    +        if directory is not None and self.__use_remote_execution():
    
    2141 2161
     
    
    2142 2162
                 self.info("Using a remote sandbox for artifact {} with directory '{}'".format(self.name, directory))
    
    2143 2163
     
    

  • buildstream/sandbox/__init__.py
    ... ... @@ -18,6 +18,5 @@
    18 18
     #        Tristan Maat <tristan maat codethink co uk>
    
    19 19
     
    
    20 20
     from .sandbox import Sandbox, SandboxFlags
    
    21
    -from ._sandboxchroot import SandboxChroot
    
    22
    -from ._sandboxbwrap import SandboxBwrap
    
    23 21
     from ._sandboxremote import SandboxRemote
    
    22
    +from ._sandboxdummy import SandboxDummy

  • buildstream/sandbox/_sandboxdummy.py
    1
    +#
    
    2
    +#  Copyright (C) 2017 Codethink Limited
    
    3
    +#
    
    4
    +#  This program is free software; you can redistribute it and/or
    
    5
    +#  modify it under the terms of the GNU Lesser General Public
    
    6
    +#  License as published by the Free Software Foundation; either
    
    7
    +#  version 2 of the License, or (at your option) any later version.
    
    8
    +#
    
    9
    +#  This library is distributed in the hope that it will be useful,
    
    10
    +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
    
    11
    +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
    
    12
    +#  Lesser General Public License for more details.
    
    13
    +#
    
    14
    +#  You should have received a copy of the GNU Lesser General Public
    
    15
    +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    16
    +#
    
    17
    +#  Authors:
    
    18
    +
    
    19
    +from .._exceptions import SandboxError
    
    20
    +from . import Sandbox
    
    21
    +
    
    22
    +
    
    23
    +class SandboxDummy(Sandbox):
    
    24
    +    def __init__(self, *args, **kwargs):
    
    25
    +        super().__init__(*args, **kwargs)
    
    26
    +
    
    27
    +    def run(self, command, flags, *, cwd=None, env=None):
    
    28
    +
    
    29
    +        # Fallback to the sandbox default settings for
    
    30
    +        # the cwd and env.
    
    31
    +        #
    
    32
    +        cwd = self._get_work_directory(cwd=cwd)
    
    33
    +        env = self._get_environment(cwd=cwd, env=env)
    
    34
    +
    
    35
    +        if not self._has_command(command[0], env):
    
    36
    +            raise SandboxError("Staged artifacts do not provide command "
    
    37
    +                               "'{}'".format(command[0]),
    
    38
    +                               reason='missing-command')
    
    39
    +
    
    40
    +        raise SandboxError("This platform does not support local builds")

  • buildstream/sandbox/_sandboxremote.py
    ... ... @@ -27,7 +27,6 @@ from . import Sandbox
    27 27
     from ..storage._filebaseddirectory import FileBasedDirectory
    
    28 28
     from ..storage._casbaseddirectory import CasBasedDirectory
    
    29 29
     from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
    
    30
    -from .._platform import Platform
    
    31 30
     
    
    32 31
     
    
    33 32
     class SandboxError(Exception):
    
    ... ... @@ -71,8 +70,8 @@ class SandboxRemote(Sandbox):
    71 70
                                                           output_files=[],
    
    72 71
                                                           output_directories=[self._output_directory],
    
    73 72
                                                           platform=None)
    
    74
    -        platform = Platform.get_platform()
    
    75
    -        cascache = platform.artifactcache
    
    73
    +        context = self._get_context()
    
    74
    +        cascache = context.artifactcache
    
    76 75
             # Upload the Command message to the remote CAS server
    
    77 76
             command_digest = cascache.push_message(self._get_project(), remote_command)
    
    78 77
             if not command_digest or not cascache.verify_digest_pushed(self._get_project(), command_digest):
    
    ... ... @@ -134,8 +133,8 @@ class SandboxRemote(Sandbox):
    134 133
             if tree_digest is None or not tree_digest.hash:
    
    135 134
                 raise SandboxError("Output directory structure had no digest attached.")
    
    136 135
     
    
    137
    -        platform = Platform.get_platform()
    
    138
    -        cascache = platform.artifactcache
    
    136
    +        context = self._get_context()
    
    137
    +        cascache = context.artifactcache
    
    139 138
             # Now do a pull to ensure we have the necessary parts.
    
    140 139
             dir_digest = cascache.pull_tree(self._get_project(), tree_digest)
    
    141 140
             if dir_digest is None or not dir_digest.hash or not dir_digest.size_bytes:
    
    ... ... @@ -170,8 +169,8 @@ class SandboxRemote(Sandbox):
    170 169
     
    
    171 170
             upload_vdir.recalculate_hash()
    
    172 171
     
    
    173
    -        platform = Platform.get_platform()
    
    174
    -        cascache = platform.artifactcache
    
    172
    +        context = self._get_context()
    
    173
    +        cascache = context.artifactcache
    
    175 174
             # Now, push that key (without necessarily needing a ref) to the remote.
    
    176 175
             cascache.push_directory(self._get_project(), upload_vdir)
    
    177 176
             if not cascache.verify_digest_pushed(self._get_project(), upload_vdir.ref):
    

  • buildstream/storage/_casbaseddirectory.py
    ... ... @@ -38,7 +38,6 @@ from .._exceptions import BstError
    38 38
     from .directory import Directory, VirtualDirectoryError
    
    39 39
     from ._filebaseddirectory import FileBasedDirectory
    
    40 40
     from ..utils import FileListResult, safe_copy, list_relative_paths
    
    41
    -from .._artifactcache.cascache import CASCache
    
    42 41
     
    
    43 42
     
    
    44 43
     class IndexEntry():
    
    ... ... @@ -80,7 +79,7 @@ class CasBasedDirectory(Directory):
    80 79
             self.filename = filename
    
    81 80
             self.common_name = common_name
    
    82 81
             self.pb2_directory = remote_execution_pb2.Directory()
    
    83
    -        self.cas_cache = CASCache(context)
    
    82
    +        self.cas_cache = context.artifactcache
    
    84 83
             if ref:
    
    85 84
                 with open(self.cas_cache.objpath(ref), 'rb') as f:
    
    86 85
                     self.pb2_directory.ParseFromString(f.read())
    

  • buildstream/utils.py
    ... ... @@ -35,6 +35,7 @@ import tempfile
    35 35
     import itertools
    
    36 36
     import functools
    
    37 37
     from contextlib import contextmanager
    
    38
    +from stat import S_ISDIR
    
    38 39
     
    
    39 40
     import psutil
    
    40 41
     
    
    ... ... @@ -328,27 +329,25 @@ def safe_remove(path):
    328 329
         Raises:
    
    329 330
            UtilError: In the case of unexpected system call failures
    
    330 331
         """
    
    331
    -    if os.path.lexists(path):
    
    332
    -
    
    333
    -        # Try to remove anything that is in the way, but issue
    
    334
    -        # a warning instead if it removes a non empty directory
    
    335
    -        try:
    
    332
    +    try:
    
    333
    +        if S_ISDIR(os.lstat(path).st_mode):
    
    334
    +            os.rmdir(path)
    
    335
    +        else:
    
    336 336
                 os.unlink(path)
    
    337
    -        except OSError as e:
    
    338
    -            if e.errno != errno.EISDIR:
    
    339
    -                raise UtilError("Failed to remove '{}': {}"
    
    340
    -                                .format(path, e))
    
    341
    -
    
    342
    -            try:
    
    343
    -                os.rmdir(path)
    
    344
    -            except OSError as e:
    
    345
    -                if e.errno == errno.ENOTEMPTY:
    
    346
    -                    return False
    
    347
    -                else:
    
    348
    -                    raise UtilError("Failed to remove '{}': {}"
    
    349
    -                                    .format(path, e))
    
    350 337
     
    
    351
    -    return True
    
    338
    +        # File removed/unlinked successfully
    
    339
    +        return True
    
    340
    +
    
    341
    +    except OSError as e:
    
    342
    +        if e.errno == errno.ENOTEMPTY:
    
    343
    +            # Path is non-empty directory
    
    344
    +            return False
    
    345
    +        elif e.errno == errno.ENOENT:
    
    346
    +            # Path does not exist
    
    347
    +            return True
    
    348
    +
    
    349
    +        raise UtilError("Failed to remove '{}': {}"
    
    350
    +                        .format(path, e))
    
    352 351
     
    
    353 352
     
    
    354 353
     def copy_files(src, dest, *, files=None, ignore_missing=False, report_written=False):
    

  • tests/artifactcache/pull.py
    ... ... @@ -6,7 +6,6 @@ import signal
    6 6
     import pytest
    
    7 7
     
    
    8 8
     from buildstream import _yaml, _signals, utils
    
    9
    -from buildstream._artifactcache.cascache import CASCache
    
    10 9
     from buildstream._context import Context
    
    11 10
     from buildstream._project import Project
    
    12 11
     from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    ... ... @@ -88,7 +87,7 @@ def test_pull(cli, tmpdir, datafiles):
    88 87
             # Load the project and CAS cache
    
    89 88
             project = Project(project_dir, context)
    
    90 89
             project.ensure_fully_loaded()
    
    91
    -        cas = CASCache(context)
    
    90
    +        cas = context.artifactcache
    
    92 91
     
    
    93 92
             # Assert that the element's artifact is **not** cached
    
    94 93
             element = project.load_elements(['target.bst'], cas)[0]
    
    ... ... @@ -130,7 +129,7 @@ def _test_pull(user_config_file, project_dir, artifact_dir,
    130 129
         project.ensure_fully_loaded()
    
    131 130
     
    
    132 131
         # Create a local CAS cache handle
    
    133
    -    cas = CASCache(context)
    
    132
    +    cas = context.artifactcache
    
    134 133
     
    
    135 134
         # Load the target element
    
    136 135
         element = project.load_elements([element_name], cas)[0]
    
    ... ... @@ -191,7 +190,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
    191 190
             # Load the project and CAS cache
    
    192 191
             project = Project(project_dir, context)
    
    193 192
             project.ensure_fully_loaded()
    
    194
    -        cas = CASCache(context)
    
    193
    +        cas = context.artifactcache
    
    195 194
     
    
    196 195
             # Assert that the element's artifact is cached
    
    197 196
             element = project.load_elements(['target.bst'], cas)[0]
    
    ... ... @@ -269,7 +268,7 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    269 268
         project.ensure_fully_loaded()
    
    270 269
     
    
    271 270
         # Create a local CAS cache handle
    
    272
    -    cas = CASCache(context)
    
    271
    +    cas = context.artifactcache
    
    273 272
     
    
    274 273
         # Manually setup the CAS remote
    
    275 274
         cas.setup_remotes(use_config=True)
    
    ... ... @@ -304,7 +303,7 @@ def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    304 303
         project.ensure_fully_loaded()
    
    305 304
     
    
    306 305
         # Create a local CAS cache handle
    
    307
    -    cas = CASCache(context)
    
    306
    +    cas = context.artifactcache
    
    308 307
     
    
    309 308
         # Manually setup the CAS remote
    
    310 309
         cas.setup_remotes(use_config=True)
    

  • tests/artifactcache/push.py
    ... ... @@ -6,7 +6,6 @@ import pytest
    6 6
     
    
    7 7
     from pluginbase import PluginBase
    
    8 8
     from buildstream import _yaml, _signals, utils
    
    9
    -from buildstream._artifactcache.cascache import CASCache
    
    10 9
     from buildstream._context import Context
    
    11 10
     from buildstream._project import Project
    
    12 11
     from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    ... ... @@ -67,7 +66,7 @@ def test_push(cli, tmpdir, datafiles):
    67 66
             project.ensure_fully_loaded()
    
    68 67
     
    
    69 68
             # Create a local CAS cache handle
    
    70
    -        cas = CASCache(context)
    
    69
    +        cas = context.artifactcache
    
    71 70
     
    
    72 71
             # Assert that the element's artifact is cached
    
    73 72
             element = project.load_elements(['target.bst'], cas)[0]
    
    ... ... @@ -109,7 +108,7 @@ def _test_push(user_config_file, project_dir, artifact_dir,
    109 108
         project.ensure_fully_loaded()
    
    110 109
     
    
    111 110
         # Create a local CAS cache handle
    
    112
    -    cas = CASCache(context)
    
    111
    +    cas = context.artifactcache
    
    113 112
     
    
    114 113
         # Load the target element
    
    115 114
         element = project.load_elements([element_name], cas)[0]
    
    ... ... @@ -166,7 +165,7 @@ def test_push_directory(cli, tmpdir, datafiles):
    166 165
             # Load the project and CAS cache
    
    167 166
             project = Project(project_dir, context)
    
    168 167
             project.ensure_fully_loaded()
    
    169
    -        cas = CASCache(context)
    
    168
    +        cas = context.artifactcache
    
    170 169
     
    
    171 170
             # Assert that the element's artifact is cached
    
    172 171
             element = project.load_elements(['target.bst'], cas)[0]
    
    ... ... @@ -217,7 +216,7 @@ def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_d
    217 216
         project.ensure_fully_loaded()
    
    218 217
     
    
    219 218
         # Create a local CAS cache handle
    
    220
    -    cas = CASCache(context)
    
    219
    +    cas = context.artifactcache
    
    221 220
     
    
    222 221
         # Manually setup the CAS remote
    
    223 222
         cas.setup_remotes(use_config=True)
    
    ... ... @@ -292,7 +291,7 @@ def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
    292 291
         project.ensure_fully_loaded()
    
    293 292
     
    
    294 293
         # Create a local CAS cache handle
    
    295
    -    cas = CASCache(context)
    
    294
    +    cas = context.artifactcache
    
    296 295
     
    
    297 296
         # Manually setup the CAS remote
    
    298 297
         cas.setup_remotes(use_config=True)
    

  • tests/testutils/artifactshare.py
    ... ... @@ -11,7 +11,6 @@ from multiprocessing import Process, Queue
    11 11
     import pytest_cov
    
    12 12
     
    
    13 13
     from buildstream import _yaml
    
    14
    -from buildstream._artifactcache.cascache import CASCache
    
    15 14
     from buildstream._artifactcache.casserver import create_server
    
    16 15
     from buildstream._context import Context
    
    17 16
     from buildstream._exceptions import ArtifactError
    
    ... ... @@ -49,7 +48,7 @@ class ArtifactShare():
    49 48
             context = Context()
    
    50 49
             context.artifactdir = self.repodir
    
    51 50
     
    
    52
    -        self.cas = CASCache(context)
    
    51
    +        self.cas = context.artifactcache
    
    53 52
     
    
    54 53
             self.total_space = total_space
    
    55 54
             self.free_space = free_space
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]