Jürg Billeter pushed to branch mac_fixes at BuildStream / buildstream
Commits:
-
83ab183e
by Tiago Gomes at 2018-09-25T16:37:30Z
-
c8594806
by Tiago Gomes at 2018-09-25T17:03:55Z
-
0c1ad2fd
by Jürg Billeter at 2018-09-25T17:26:04Z
-
9fb5101e
by Jürg Billeter at 2018-09-25T17:26:04Z
-
061a0ecf
by knownexus at 2018-09-25T17:26:04Z
-
86ceb399
by knownexus at 2018-09-25T17:26:04Z
-
5bda9f4e
by knownexus at 2018-09-25T17:26:04Z
-
8c7e1353
by knownexus at 2018-09-25T17:26:04Z
-
7b27b5fc
by knownexus at 2018-09-25T17:26:04Z
-
2b896603
by knownexus at 2018-09-25T17:26:16Z
-
37f485d3
by knownexus at 2018-09-25T17:26:17Z
-
f93e7013
by James Ennis at 2018-09-25T17:26:17Z
14 changed files:
- .gitlab-ci.yml
- buildstream/_frontend/app.py
- + buildstream/_platform/darwin.py
- buildstream/_platform/linux.py
- buildstream/_platform/platform.py
- buildstream/_platform/unix.py
- buildstream/_project.py
- buildstream/sandbox/__init__.py
- + buildstream/sandbox/_sandboxdummy.py
- buildstream/utils.py
- conftest.py
- tests/artifactcache/config.py
- tests/artifactcache/pull.py
- tests/artifactcache/push.py
Changes:
| ... | ... | @@ -79,6 +79,8 @@ source_dist: |
| 79 | 79 |
- cd ../..
|
| 80 | 80 |
- mkdir -p coverage-linux/
|
| 81 | 81 |
- cp dist/buildstream/.coverage.* coverage-linux/coverage."${CI_JOB_NAME}"
|
| 82 |
+ except:
|
|
| 83 |
+ - schedules
|
|
| 82 | 84 |
artifacts:
|
| 83 | 85 |
paths:
|
| 84 | 86 |
- coverage-linux/
|
| ... | ... | @@ -127,6 +129,8 @@ tests-unix: |
| 127 | 129 |
- cd ../..
|
| 128 | 130 |
- mkdir -p coverage-unix/
|
| 129 | 131 |
- cp dist/buildstream/.coverage.* coverage-unix/coverage.unix
|
| 132 |
+ except:
|
|
| 133 |
+ - schedules
|
|
| 130 | 134 |
artifacts:
|
| 131 | 135 |
paths:
|
| 132 | 136 |
- coverage-unix/
|
| ... | ... | @@ -148,10 +152,41 @@ docs: |
| 148 | 152 |
- make BST_FORCE_SESSION_REBUILD=1 -C doc
|
| 149 | 153 |
- cd ../..
|
| 150 | 154 |
- mv dist/buildstream/doc/build/html public
|
| 155 |
+ except:
|
|
| 156 |
+ - schedules
|
|
| 151 | 157 |
artifacts:
|
| 152 | 158 |
paths:
|
| 153 | 159 |
- public/
|
| 154 | 160 |
|
| 161 |
+.overnight-tests: &overnight-tests-template
|
|
| 162 |
+ stage: test
|
|
| 163 |
+ variables:
|
|
| 164 |
+ bst_ext_url: git+https://gitlab.com/BuildStream/bst-external.git
|
|
| 165 |
+ bst_ext_ref: 1d6ab71151b93c8cbc0a91a36ffe9270f3b835f1 # 0.5.1
|
|
| 166 |
+ fd_sdk_ref: 718ea88089644a1ea5b488de0b90c2c565cb75f8 # 18.08.12
|
|
| 167 |
+ before_script:
|
|
| 168 |
+ - (cd dist && ./unpack.sh && cd buildstream && pip3 install .)
|
|
| 169 |
+ - pip3 install --user -e ${bst_ext_url}@${bst_ext_ref}#egg=bst_ext
|
|
| 170 |
+ - git clone https://gitlab.com/freedesktop-sdk/freedesktop-sdk.git
|
|
| 171 |
+ - git -C freedesktop-sdk checkout ${fd_sdk_ref}
|
|
| 172 |
+ only:
|
|
| 173 |
+ - schedules
|
|
| 174 |
+ |
|
| 175 |
+overnight-tests:
|
|
| 176 |
+ <<: *overnight-tests-template
|
|
| 177 |
+ script:
|
|
| 178 |
+ - make -C freedesktop-sdk
|
|
| 179 |
+ tags:
|
|
| 180 |
+ - overnight-tests
|
|
| 181 |
+ |
|
| 182 |
+overnight-tests-no-cache:
|
|
| 183 |
+ <<: *overnight-tests-template
|
|
| 184 |
+ script:
|
|
| 185 |
+ - sed -i '/artifacts:/,+1 d' freedesktop-sdk/bootstrap/project.conf
|
|
| 186 |
+ - sed -i '/artifacts:/,+1 d' freedesktop-sdk/project.conf
|
|
| 187 |
+ - make -C freedesktop-sdk
|
|
| 188 |
+ tags:
|
|
| 189 |
+ - overnight-tests
|
|
| 155 | 190 |
|
| 156 | 191 |
# Check code quality with gitlab's built-in feature.
|
| 157 | 192 |
#
|
| ... | ... | @@ -170,6 +205,8 @@ code_quality: |
| 170 | 205 |
--volume "$PWD":/code
|
| 171 | 206 |
--volume /var/run/docker.sock:/var/run/docker.sock
|
| 172 | 207 |
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
|
| 208 |
+ except:
|
|
| 209 |
+ - schedules
|
|
| 173 | 210 |
artifacts:
|
| 174 | 211 |
paths: [gl-code-quality-report.json]
|
| 175 | 212 |
|
| ... | ... | @@ -199,6 +236,8 @@ analysis: |
| 199 | 236 |
radon raw -s -j buildstream > analysis/raw.json
|
| 200 | 237 |
radon raw -s buildstream
|
| 201 | 238 |
|
| 239 |
+ except:
|
|
| 240 |
+ - schedules
|
|
| 202 | 241 |
artifacts:
|
| 203 | 242 |
paths:
|
| 204 | 243 |
- analysis/
|
| ... | ... | @@ -224,6 +263,8 @@ coverage: |
| 224 | 263 |
- tests-fedora-28
|
| 225 | 264 |
- tests-unix
|
| 226 | 265 |
- source_dist
|
| 266 |
+ except:
|
|
| 267 |
+ - schedules
|
|
| 227 | 268 |
|
| 228 | 269 |
# Deploy, only for merges which land on master branch.
|
| 229 | 270 |
#
|
| ... | ... | @@ -248,3 +289,5 @@ pages: |
| 248 | 289 |
# See https://gitlab.com/gitlab-org/gitlab-ce/issues/35141
|
| 249 | 290 |
#
|
| 250 | 291 |
- master
|
| 292 |
+ except:
|
|
| 293 |
+ - schedules
|
| ... | ... | @@ -115,14 +115,6 @@ class App(): |
| 115 | 115 |
else:
|
| 116 | 116 |
self.colors = False
|
| 117 | 117 |
|
| 118 |
- # Increase the soft limit for open file descriptors to the maximum.
|
|
| 119 |
- # SafeHardlinks FUSE needs to hold file descriptors for all processes in the sandbox.
|
|
| 120 |
- # Avoid hitting the limit too quickly.
|
|
| 121 |
- limits = resource.getrlimit(resource.RLIMIT_NOFILE)
|
|
| 122 |
- if limits[0] != limits[1]:
|
|
| 123 |
- # Set soft limit to hard limit
|
|
| 124 |
- resource.setrlimit(resource.RLIMIT_NOFILE, (limits[1], limits[1]))
|
|
| 125 |
- |
|
| 126 | 118 |
# create()
|
| 127 | 119 |
#
|
| 128 | 120 |
# Should be used instead of the regular constructor.
|
| ... | ... | @@ -293,6 +285,8 @@ class App(): |
| 293 | 285 |
# Notify session success
|
| 294 | 286 |
self._notify("{} succeeded".format(session_name), "")
|
| 295 | 287 |
|
| 288 |
+ Platform.destroy_instance()
|
|
| 289 |
+ |
|
| 296 | 290 |
# init_project()
|
| 297 | 291 |
#
|
| 298 | 292 |
# Initialize a new BuildStream project, either with the explicitly passed options,
|
| ... | ... | @@ -667,6 +661,8 @@ class App(): |
| 667 | 661 |
detail = '\n' + indent + indent.join(error.detail.splitlines(True))
|
| 668 | 662 |
click.echo("{}".format(detail), err=True)
|
| 669 | 663 |
|
| 664 |
+ Platform.destroy_instance()
|
|
| 665 |
+ |
|
| 670 | 666 |
sys.exit(-1)
|
| 671 | 667 |
|
| 672 | 668 |
#
|
| 1 |
+#
|
|
| 2 |
+# Copyright (C) 2017 Codethink Limited
|
|
| 3 |
+# Copyright (C) 2018 Bloomberg Finance LP
|
|
| 4 |
+#
|
|
| 5 |
+# This program is free software; you can redistribute it and/or
|
|
| 6 |
+# modify it under the terms of the GNU Lesser General Public
|
|
| 7 |
+# License as published by the Free Software Foundation; either
|
|
| 8 |
+# version 2 of the License, or (at your option) any later version.
|
|
| 9 |
+#
|
|
| 10 |
+# This library is distributed in the hope that it will be useful,
|
|
| 11 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
| 12 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
| 13 |
+# Lesser General Public License for more details.
|
|
| 14 |
+#
|
|
| 15 |
+# You should have received a copy of the GNU Lesser General Public
|
|
| 16 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
| 17 |
+ |
|
| 18 |
+import os
|
|
| 19 |
+import resource
|
|
| 20 |
+ |
|
| 21 |
+from .._exceptions import PlatformError
|
|
| 22 |
+from ..sandbox import SandboxDummy
|
|
| 23 |
+ |
|
| 24 |
+from . import Platform
|
|
| 25 |
+ |
|
| 26 |
+ |
|
| 27 |
+class Darwin(Platform):
|
|
| 28 |
+ |
|
| 29 |
+ # This value comes from OPEN_MAX in syslimits.h
|
|
| 30 |
+ OPEN_MAX = 10240
|
|
| 31 |
+ |
|
| 32 |
+ def __init__(self, context):
|
|
| 33 |
+ |
|
| 34 |
+ super().__init__(context)
|
|
| 35 |
+ |
|
| 36 |
+ @property
|
|
| 37 |
+ def artifactcache(self):
|
|
| 38 |
+ return self._artifact_cache
|
|
| 39 |
+ |
|
| 40 |
+ def create_sandbox(self, *args, **kwargs):
|
|
| 41 |
+ return SandboxDummy(*args, **kwargs)
|
|
| 42 |
+ |
|
| 43 |
+ def get_cpu_count(self, cap=None):
|
|
| 44 |
+ if cap < os.cpu_count():
|
|
| 45 |
+ return cap
|
|
| 46 |
+ else:
|
|
| 47 |
+ return os.cpu_count()
|
|
| 48 |
+ |
|
| 49 |
+ def set_resource_limits(self, soft_limit=OPEN_MAX, hard_limit=None):
|
|
| 50 |
+ super().set_resource_limits(soft_limit)
|
| ... | ... | @@ -17,13 +17,13 @@ |
| 17 | 17 |
# Authors:
|
| 18 | 18 |
# Tristan Maat <tristan maat codethink co uk>
|
| 19 | 19 |
|
| 20 |
+import os
|
|
| 20 | 21 |
import subprocess
|
| 21 | 22 |
|
| 22 | 23 |
from .. import _site
|
| 23 | 24 |
from .. import utils
|
| 24 |
-from .._artifactcache.cascache import CASCache
|
|
| 25 | 25 |
from .._message import Message, MessageType
|
| 26 |
-from ..sandbox import SandboxBwrap
|
|
| 26 |
+from ..sandbox import SandboxDummy
|
|
| 27 | 27 |
|
| 28 | 28 |
from . import Platform
|
| 29 | 29 |
|
| ... | ... | @@ -32,27 +32,44 @@ class Linux(Platform): |
| 32 | 32 |
|
| 33 | 33 |
def __init__(self, context):
|
| 34 | 34 |
|
| 35 |
- super().__init__(context)
|
|
| 36 |
- |
|
| 37 | 35 |
self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
|
| 38 |
- self._user_ns_available = self._check_user_ns_available(context)
|
|
| 39 |
- self._artifact_cache = CASCache(context, enable_push=self._user_ns_available)
|
|
| 36 |
+ |
|
| 37 |
+ if self._local_sandbox_available():
|
|
| 38 |
+ self._user_ns_available = self._check_user_ns_available(context)
|
|
| 39 |
+ else:
|
|
| 40 |
+ self._user_ns_available = False
|
|
| 41 |
+ |
|
| 42 |
+ # _user_ns_available needs to be set before chaining up to the super class
|
|
| 43 |
+ # This is because it will call create_artifact_cache()
|
|
| 44 |
+ super().__init__(context)
|
|
| 40 | 45 |
|
| 41 | 46 |
@property
|
| 42 | 47 |
def artifactcache(self):
|
| 43 | 48 |
return self._artifact_cache
|
| 44 | 49 |
|
| 45 | 50 |
def create_sandbox(self, *args, **kwargs):
|
| 46 |
- # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
|
|
| 47 |
- kwargs['user_ns_available'] = self._user_ns_available
|
|
| 48 |
- kwargs['die_with_parent_available'] = self._die_with_parent_available
|
|
| 49 |
- return SandboxBwrap(*args, **kwargs)
|
|
| 51 |
+ if not self._local_sandbox_available():
|
|
| 52 |
+ return SandboxDummy(*args, **kwargs)
|
|
| 53 |
+ else:
|
|
| 54 |
+ from ..sandbox._sandboxbwrap import SandboxBwrap
|
|
| 55 |
+ # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
|
|
| 56 |
+ kwargs['user_ns_available'] = self._user_ns_available
|
|
| 57 |
+ kwargs['die_with_parent_available'] = self._die_with_parent_available
|
|
| 58 |
+ return SandboxBwrap(*args, **kwargs)
|
|
| 59 |
+ |
|
| 60 |
+ def create_artifact_cache(self, context, *, enable_push):
|
|
| 61 |
+ return super().create_artifact_cache(context=context, enable_push=self._user_ns_available)
|
|
| 50 | 62 |
|
| 51 | 63 |
################################################
|
| 52 | 64 |
# Private Methods #
|
| 53 | 65 |
################################################
|
| 54 |
- def _check_user_ns_available(self, context):
|
|
| 66 |
+ def _local_sandbox_available(self):
|
|
| 67 |
+ try:
|
|
| 68 |
+ return os.path.exists(utils.get_host_tool('bwrap')) and os.path.exists('/dev/fuse')
|
|
| 69 |
+ except utils.ProgramNotFoundError:
|
|
| 70 |
+ return False
|
|
| 55 | 71 |
|
| 72 |
+ def _check_user_ns_available(self, context):
|
|
| 56 | 73 |
# Here, lets check if bwrap is able to create user namespaces,
|
| 57 | 74 |
# issue a warning if it's not available, and save the state
|
| 58 | 75 |
# locally so that we can inform the sandbox to not try it
|
| ... | ... | @@ -19,8 +19,10 @@ |
| 19 | 19 |
|
| 20 | 20 |
import os
|
| 21 | 21 |
import sys
|
| 22 |
+import resource
|
|
| 22 | 23 |
|
| 23 | 24 |
from .._exceptions import PlatformError, ImplError
|
| 25 |
+from .._artifactcache.cascache import CASCache
|
|
| 24 | 26 |
|
| 25 | 27 |
|
| 26 | 28 |
class Platform():
|
| ... | ... | @@ -37,22 +39,29 @@ class Platform(): |
| 37 | 39 |
#
|
| 38 | 40 |
def __init__(self, context):
|
| 39 | 41 |
self.context = context
|
| 42 |
+ self.set_resource_limits()
|
|
| 43 |
+ self._artifact_cache = self.create_artifact_cache(context, enable_push=True)
|
|
| 40 | 44 |
|
| 41 | 45 |
@classmethod
|
| 42 | 46 |
def create_instance(cls, *args, **kwargs):
|
| 43 |
- if sys.platform.startswith('linux'):
|
|
| 44 |
- backend = 'linux'
|
|
| 45 |
- else:
|
|
| 46 |
- backend = 'unix'
|
|
| 47 |
+ assert not cls._instance
|
|
| 47 | 48 |
|
| 48 | 49 |
# Meant for testing purposes and therefore hidden in the
|
| 49 | 50 |
# deepest corners of the source code. Try not to abuse this,
|
| 50 | 51 |
# please?
|
| 51 | 52 |
if os.getenv('BST_FORCE_BACKEND'):
|
| 52 | 53 |
backend = os.getenv('BST_FORCE_BACKEND')
|
| 54 |
+ elif sys.platform.startswith('linux'):
|
|
| 55 |
+ backend = 'linux'
|
|
| 56 |
+ elif sys.platform.startswith('darwin'):
|
|
| 57 |
+ backend = 'darwin'
|
|
| 58 |
+ else:
|
|
| 59 |
+ backend = 'unix'
|
|
| 53 | 60 |
|
| 54 | 61 |
if backend == 'linux':
|
| 55 | 62 |
from .linux import Linux as PlatformImpl
|
| 63 |
+ elif backend == 'darwin':
|
|
| 64 |
+ from .darwin import Darwin as PlatformImpl
|
|
| 56 | 65 |
elif backend == 'unix':
|
| 57 | 66 |
from .unix import Unix as PlatformImpl
|
| 58 | 67 |
else:
|
| ... | ... | @@ -60,12 +69,19 @@ class Platform(): |
| 60 | 69 |
|
| 61 | 70 |
cls._instance = PlatformImpl(*args, **kwargs)
|
| 62 | 71 |
|
| 72 |
+ @classmethod
|
|
| 73 |
+ def destroy_instance(cls):
|
|
| 74 |
+ cls._instance = None
|
|
| 75 |
+ |
|
| 63 | 76 |
@classmethod
|
| 64 | 77 |
def get_platform(cls):
|
| 65 | 78 |
if not cls._instance:
|
| 66 | 79 |
raise PlatformError("Platform needs to be initialized first")
|
| 67 | 80 |
return cls._instance
|
| 68 | 81 |
|
| 82 |
+ def get_cpu_count(self, cap=None):
|
|
| 83 |
+ return min(len(os.sched_getaffinity(0)), cap)
|
|
| 84 |
+ |
|
| 69 | 85 |
##################################################################
|
| 70 | 86 |
# Platform properties #
|
| 71 | 87 |
##################################################################
|
| ... | ... | @@ -92,3 +108,18 @@ class Platform(): |
| 92 | 108 |
def create_sandbox(self, *args, **kwargs):
|
| 93 | 109 |
raise ImplError("Platform {platform} does not implement create_sandbox()"
|
| 94 | 110 |
.format(platform=type(self).__name__))
|
| 111 |
+ |
|
| 112 |
+ def set_resource_limits(self, soft_limit=None, hard_limit=None):
|
|
| 113 |
+ # Need to set resources for _frontend/app.py as this is dependent on the platform
|
|
| 114 |
+ # SafeHardlinks FUSE needs to hold file descriptors for all processes in the sandbox.
|
|
| 115 |
+ # Avoid hitting the limit too quickly.
|
|
| 116 |
+ limits = resource.getrlimit(resource.RLIMIT_NOFILE)
|
|
| 117 |
+ if limits[0] != limits[1]:
|
|
| 118 |
+ if soft_limit is None:
|
|
| 119 |
+ soft_limit = limits[1]
|
|
| 120 |
+ if hard_limit is None:
|
|
| 121 |
+ hard_limit = limits[1]
|
|
| 122 |
+ resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))
|
|
| 123 |
+ |
|
| 124 |
+ def create_artifact_cache(self, context, *, enable_push=True):
|
|
| 125 |
+ return CASCache(context=context, enable_push=enable_push)
|
| ... | ... | @@ -19,9 +19,7 @@ |
| 19 | 19 |
|
| 20 | 20 |
import os
|
| 21 | 21 |
|
| 22 |
-from .._artifactcache.cascache import CASCache
|
|
| 23 | 22 |
from .._exceptions import PlatformError
|
| 24 |
-from ..sandbox import SandboxChroot
|
|
| 25 | 23 |
|
| 26 | 24 |
from . import Platform
|
| 27 | 25 |
|
| ... | ... | @@ -31,7 +29,6 @@ class Unix(Platform): |
| 31 | 29 |
def __init__(self, context):
|
| 32 | 30 |
|
| 33 | 31 |
super().__init__(context)
|
| 34 |
- self._artifact_cache = CASCache(context)
|
|
| 35 | 32 |
|
| 36 | 33 |
# Not necessarily 100% reliable, but we want to fail early.
|
| 37 | 34 |
if os.geteuid() != 0:
|
| ... | ... | @@ -42,4 +39,5 @@ class Unix(Platform): |
| 42 | 39 |
return self._artifact_cache
|
| 43 | 40 |
|
| 44 | 41 |
def create_sandbox(self, *args, **kwargs):
|
| 42 |
+ from ..sandbox._sandboxchroot import SandboxChroot
|
|
| 45 | 43 |
return SandboxChroot(*args, **kwargs)
|
| ... | ... | @@ -38,6 +38,7 @@ from ._loader import Loader |
| 38 | 38 |
from .element import Element
|
| 39 | 39 |
from ._message import Message, MessageType
|
| 40 | 40 |
from ._includes import Includes
|
| 41 |
+from ._platform import Platform
|
|
| 41 | 42 |
|
| 42 | 43 |
|
| 43 | 44 |
# Project Configuration file
|
| ... | ... | @@ -617,7 +618,8 @@ class Project(): |
| 617 | 618 |
# Based on some testing (mainly on AWS), maximum effective
|
| 618 | 619 |
# max-jobs value seems to be around 8-10 if we have enough cores
|
| 619 | 620 |
# users should set values based on workload and build infrastructure
|
| 620 |
- output.base_variables['max-jobs'] = str(min(len(os.sched_getaffinity(0)), 8))
|
|
| 621 |
+ platform = Platform.get_platform()
|
|
| 622 |
+ output.base_variables['max-jobs'] = str(platform.get_cpu_count(8))
|
|
| 621 | 623 |
|
| 622 | 624 |
# Export options into variables, if that was requested
|
| 623 | 625 |
output.options.export_variables(output.base_variables)
|
| ... | ... | @@ -18,6 +18,5 @@ |
| 18 | 18 |
# Tristan Maat <tristan maat codethink co uk>
|
| 19 | 19 |
|
| 20 | 20 |
from .sandbox import Sandbox, SandboxFlags
|
| 21 |
-from ._sandboxchroot import SandboxChroot
|
|
| 22 |
-from ._sandboxbwrap import SandboxBwrap
|
|
| 23 | 21 |
from ._sandboxremote import SandboxRemote
|
| 22 |
+from ._sandboxdummy import SandboxDummy
|
| 1 |
+#
|
|
| 2 |
+# Copyright (C) 2017 Codethink Limited
|
|
| 3 |
+#
|
|
| 4 |
+# This program is free software; you can redistribute it and/or
|
|
| 5 |
+# modify it under the terms of the GNU Lesser General Public
|
|
| 6 |
+# License as published by the Free Software Foundation; either
|
|
| 7 |
+# version 2 of the License, or (at your option) any later version.
|
|
| 8 |
+#
|
|
| 9 |
+# This library is distributed in the hope that it will be useful,
|
|
| 10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
| 11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
| 12 |
+# Lesser General Public License for more details.
|
|
| 13 |
+#
|
|
| 14 |
+# You should have received a copy of the GNU Lesser General Public
|
|
| 15 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
| 16 |
+#
|
|
| 17 |
+# Authors:
|
|
| 18 |
+ |
|
| 19 |
+from .._exceptions import SandboxError
|
|
| 20 |
+from . import Sandbox
|
|
| 21 |
+ |
|
| 22 |
+ |
|
| 23 |
+class SandboxDummy(Sandbox):
|
|
| 24 |
+ def __init__(self, *args, **kwargs):
|
|
| 25 |
+ super().__init__(*args, **kwargs)
|
|
| 26 |
+ |
|
| 27 |
+ def run(self, command, flags, *, cwd=None, env=None):
|
|
| 28 |
+ |
|
| 29 |
+ # Fallback to the sandbox default settings for
|
|
| 30 |
+ # the cwd and env.
|
|
| 31 |
+ #
|
|
| 32 |
+ cwd = self._get_work_directory(cwd=cwd)
|
|
| 33 |
+ env = self._get_environment(cwd=cwd, env=env)
|
|
| 34 |
+ |
|
| 35 |
+ if not self._has_command(command[0], env):
|
|
| 36 |
+ raise SandboxError("Staged artifacts do not provide command "
|
|
| 37 |
+ "'{}'".format(command[0]),
|
|
| 38 |
+ reason='missing-command')
|
|
| 39 |
+ |
|
| 40 |
+ raise SandboxError("This platform does not support local builds")
|
| ... | ... | @@ -35,6 +35,7 @@ import tempfile |
| 35 | 35 |
import itertools
|
| 36 | 36 |
import functools
|
| 37 | 37 |
from contextlib import contextmanager
|
| 38 |
+from stat import S_ISDIR
|
|
| 38 | 39 |
|
| 39 | 40 |
import psutil
|
| 40 | 41 |
|
| ... | ... | @@ -328,27 +329,25 @@ def safe_remove(path): |
| 328 | 329 |
Raises:
|
| 329 | 330 |
UtilError: In the case of unexpected system call failures
|
| 330 | 331 |
"""
|
| 331 |
- if os.path.lexists(path):
|
|
| 332 |
- |
|
| 333 |
- # Try to remove anything that is in the way, but issue
|
|
| 334 |
- # a warning instead if it removes a non empty directory
|
|
| 335 |
- try:
|
|
| 332 |
+ try:
|
|
| 333 |
+ if S_ISDIR(os.lstat(path).st_mode):
|
|
| 334 |
+ os.rmdir(path)
|
|
| 335 |
+ else:
|
|
| 336 | 336 |
os.unlink(path)
|
| 337 |
- except OSError as e:
|
|
| 338 |
- if e.errno != errno.EISDIR:
|
|
| 339 |
- raise UtilError("Failed to remove '{}': {}"
|
|
| 340 |
- .format(path, e))
|
|
| 341 |
- |
|
| 342 |
- try:
|
|
| 343 |
- os.rmdir(path)
|
|
| 344 |
- except OSError as e:
|
|
| 345 |
- if e.errno == errno.ENOTEMPTY:
|
|
| 346 |
- return False
|
|
| 347 |
- else:
|
|
| 348 |
- raise UtilError("Failed to remove '{}': {}"
|
|
| 349 |
- .format(path, e))
|
|
| 350 | 337 |
|
| 351 |
- return True
|
|
| 338 |
+ # File removed/unlinked successfully
|
|
| 339 |
+ return True
|
|
| 340 |
+ |
|
| 341 |
+ except OSError as e:
|
|
| 342 |
+ if e.errno == errno.ENOTEMPTY:
|
|
| 343 |
+ # Path is non-empty directory
|
|
| 344 |
+ return False
|
|
| 345 |
+ elif e.errno == errno.ENOENT:
|
|
| 346 |
+ # Path does not exist
|
|
| 347 |
+ return True
|
|
| 348 |
+ |
|
| 349 |
+ raise UtilError("Failed to remove '{}': {}"
|
|
| 350 |
+ .format(path, e))
|
|
| 352 | 351 |
|
| 353 | 352 |
|
| 354 | 353 |
def copy_files(src, dest, *, files=None, ignore_missing=False, report_written=False):
|
| ... | ... | @@ -23,6 +23,8 @@ import shutil |
| 23 | 23 |
|
| 24 | 24 |
import pytest
|
| 25 | 25 |
|
| 26 |
+from buildstream._platform import Platform
|
|
| 27 |
+ |
|
| 26 | 28 |
|
| 27 | 29 |
def pytest_addoption(parser):
|
| 28 | 30 |
parser.addoption('--integration', action='store_true', default=False,
|
| ... | ... | @@ -34,6 +36,10 @@ def pytest_runtest_setup(item): |
| 34 | 36 |
pytest.skip('skipping integration test')
|
| 35 | 37 |
|
| 36 | 38 |
|
| 39 |
+def pytest_runtest_teardown(item, nextitem):
|
|
| 40 |
+ Platform.destroy_instance()
|
|
| 41 |
+ |
|
| 42 |
+ |
|
| 37 | 43 |
@pytest.fixture(scope='session')
|
| 38 | 44 |
def integration_cache(request):
|
| 39 | 45 |
|
| ... | ... | @@ -6,6 +6,7 @@ import os |
| 6 | 6 |
from buildstream._artifactcache import ArtifactCacheSpec
|
| 7 | 7 |
from buildstream._artifactcache.artifactcache import _configured_remote_artifact_cache_specs
|
| 8 | 8 |
from buildstream._context import Context
|
| 9 |
+from buildstream._platform import Platform
|
|
| 9 | 10 |
from buildstream._project import Project
|
| 10 | 11 |
from buildstream.utils import _deduplicate
|
| 11 | 12 |
from buildstream import _yaml
|
| ... | ... | @@ -101,6 +102,7 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user |
| 101 | 102 |
|
| 102 | 103 |
context = Context()
|
| 103 | 104 |
context.load(config=user_config_file)
|
| 105 |
+ Platform.create_instance(context)
|
|
| 104 | 106 |
project = Project(str(project_dir), context)
|
| 105 | 107 |
project.ensure_fully_loaded()
|
| 106 | 108 |
|
| ... | ... | @@ -39,6 +39,15 @@ def tree_maker(cas, tree, directory): |
| 39 | 39 |
tree_maker(cas, tree, child_directory)
|
| 40 | 40 |
|
| 41 | 41 |
|
| 42 |
+def context_with_artifactdir(artifactdir):
|
|
| 43 |
+ context = Context()
|
|
| 44 |
+ context.load(config=user_config_file)
|
|
| 45 |
+ context.artifactdir = artifactdir
|
|
| 46 |
+ context.set_message_handler(message_handler)
|
|
| 47 |
+ Platform.create_instance()
|
|
| 48 |
+ return context
|
|
| 49 |
+ |
|
| 50 |
+ |
|
| 42 | 51 |
@pytest.mark.datafiles(DATA_DIR)
|
| 43 | 52 |
def test_pull(cli, tmpdir, datafiles):
|
| 44 | 53 |
project_dir = str(datafiles)
|
| ... | ... | @@ -80,10 +89,7 @@ def test_pull(cli, tmpdir, datafiles): |
| 80 | 89 |
assert cli.get_element_state(project_dir, 'target.bst') != 'cached'
|
| 81 | 90 |
|
| 82 | 91 |
# Fake minimal context
|
| 83 |
- context = Context()
|
|
| 84 |
- context.load(config=user_config_file)
|
|
| 85 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 86 |
- context.set_message_handler(message_handler)
|
|
| 92 |
+ context = context_with_artifactdir(os.path.join(str(tmpdir), 'cache', 'artifacts'))
|
|
| 87 | 93 |
|
| 88 | 94 |
# Load the project and CAS cache
|
| 89 | 95 |
project = Project(project_dir, context)
|
| ... | ... | @@ -120,10 +126,7 @@ def test_pull(cli, tmpdir, datafiles): |
| 120 | 126 |
def _test_pull(user_config_file, project_dir, artifact_dir,
|
| 121 | 127 |
element_name, element_key, queue):
|
| 122 | 128 |
# Fake minimal context
|
| 123 |
- context = Context()
|
|
| 124 |
- context.load(config=user_config_file)
|
|
| 125 |
- context.artifactdir = artifact_dir
|
|
| 126 |
- context.set_message_handler(message_handler)
|
|
| 129 |
+ context = context_with_artifactdir(artifact_dir)
|
|
| 127 | 130 |
|
| 128 | 131 |
# Load the project manually
|
| 129 | 132 |
project = Project(project_dir, context)
|
| ... | ... | @@ -183,10 +186,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 183 | 186 |
assert share.has_artifact('test', 'target.bst', element_key)
|
| 184 | 187 |
|
| 185 | 188 |
# Fake minimal context
|
| 186 |
- context = Context()
|
|
| 187 |
- context.load(config=user_config_file)
|
|
| 188 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 189 |
- context.set_message_handler(message_handler)
|
|
| 189 |
+ context = context_with_artifactdir(os.path.join(str(tmpdir), 'cache', 'artifacts'))
|
|
| 190 | 190 |
|
| 191 | 191 |
# Load the project and CAS cache
|
| 192 | 192 |
project = Project(project_dir, context)
|
| ... | ... | @@ -259,10 +259,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 259 | 259 |
|
| 260 | 260 |
def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
| 261 | 261 |
# Fake minimal context
|
| 262 |
- context = Context()
|
|
| 263 |
- context.load(config=user_config_file)
|
|
| 264 |
- context.artifactdir = artifact_dir
|
|
| 265 |
- context.set_message_handler(message_handler)
|
|
| 262 |
+ context = context_with_artifactdir(artifact_dir)
|
|
| 266 | 263 |
|
| 267 | 264 |
# Load the project manually
|
| 268 | 265 |
project = Project(project_dir, context)
|
| ... | ... | @@ -294,10 +291,7 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest |
| 294 | 291 |
|
| 295 | 292 |
def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
| 296 | 293 |
# Fake minimal context
|
| 297 |
- context = Context()
|
|
| 298 |
- context.load(config=user_config_file)
|
|
| 299 |
- context.artifactdir = artifact_dir
|
|
| 300 |
- context.set_message_handler(message_handler)
|
|
| 294 |
+ context = context_with_artifactdir(artifact_dir)
|
|
| 301 | 295 |
|
| 302 | 296 |
# Load the project manually
|
| 303 | 297 |
project = Project(project_dir, context)
|
| ... | ... | @@ -27,6 +27,15 @@ def message_handler(message, context): |
| 27 | 27 |
pass
|
| 28 | 28 |
|
| 29 | 29 |
|
| 30 |
+def context_with_artifactdir(artifactdir):
|
|
| 31 |
+ context = Context()
|
|
| 32 |
+ context.load(config=user_config_file)
|
|
| 33 |
+ context.artifactdir = artifactdir
|
|
| 34 |
+ context.set_message_handler(message_handler)
|
|
| 35 |
+ Platform.create_instance()
|
|
| 36 |
+ return context
|
|
| 37 |
+ |
|
| 38 |
+ |
|
| 30 | 39 |
@pytest.mark.datafiles(DATA_DIR)
|
| 31 | 40 |
def test_push(cli, tmpdir, datafiles):
|
| 32 | 41 |
project_dir = str(datafiles)
|
| ... | ... | @@ -57,10 +66,7 @@ def test_push(cli, tmpdir, datafiles): |
| 57 | 66 |
_yaml.dump(_yaml.node_sanitize(user_config), filename=user_config_file)
|
| 58 | 67 |
|
| 59 | 68 |
# Fake minimal context
|
| 60 |
- context = Context()
|
|
| 61 |
- context.load(config=user_config_file)
|
|
| 62 |
- context.artifactdir = artifact_dir
|
|
| 63 |
- context.set_message_handler(message_handler)
|
|
| 69 |
+ context = context_with_artifactdir(artifact_dir)
|
|
| 64 | 70 |
|
| 65 | 71 |
# Load the project manually
|
| 66 | 72 |
project = Project(project_dir, context)
|
| ... | ... | @@ -99,10 +105,7 @@ def test_push(cli, tmpdir, datafiles): |
| 99 | 105 |
def _test_push(user_config_file, project_dir, artifact_dir,
|
| 100 | 106 |
element_name, element_key, queue):
|
| 101 | 107 |
# Fake minimal context
|
| 102 |
- context = Context()
|
|
| 103 |
- context.load(config=user_config_file)
|
|
| 104 |
- context.artifactdir = artifact_dir
|
|
| 105 |
- context.set_message_handler(message_handler)
|
|
| 108 |
+ context = context_with_artifactdir(artifact_dir)
|
|
| 106 | 109 |
|
| 107 | 110 |
# Load the project manually
|
| 108 | 111 |
project = Project(project_dir, context)
|
| ... | ... | @@ -158,10 +161,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 158 | 161 |
_yaml.dump(_yaml.node_sanitize(user_config), filename=user_config_file)
|
| 159 | 162 |
|
| 160 | 163 |
# Fake minimal context
|
| 161 |
- context = Context()
|
|
| 162 |
- context.load(config=user_config_file)
|
|
| 163 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 164 |
- context.set_message_handler(message_handler)
|
|
| 164 |
+ context = context_with_artifactdir(os.path.join(str(tmpdir), 'cache', 'artifacts'))
|
|
| 165 | 165 |
|
| 166 | 166 |
# Load the project and CAS cache
|
| 167 | 167 |
project = Project(project_dir, context)
|
| ... | ... | @@ -207,10 +207,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 207 | 207 |
|
| 208 | 208 |
def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
| 209 | 209 |
# Fake minimal context
|
| 210 |
- context = Context()
|
|
| 211 |
- context.load(config=user_config_file)
|
|
| 212 |
- context.artifactdir = artifact_dir
|
|
| 213 |
- context.set_message_handler(message_handler)
|
|
| 210 |
+ context = context_with_artifactdir(artifact_dir)
|
|
| 214 | 211 |
|
| 215 | 212 |
# Load the project manually
|
| 216 | 213 |
project = Project(project_dir, context)
|
| ... | ... | @@ -282,10 +279,7 @@ def test_push_message(cli, tmpdir, datafiles): |
| 282 | 279 |
|
| 283 | 280 |
def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
|
| 284 | 281 |
# Fake minimal context
|
| 285 |
- context = Context()
|
|
| 286 |
- context.load(config=user_config_file)
|
|
| 287 |
- context.artifactdir = artifact_dir
|
|
| 288 |
- context.set_message_handler(message_handler)
|
|
| 282 |
+ context = context_with_artifactdir(artifact_dir)
|
|
| 289 | 283 |
|
| 290 | 284 |
# Load the project manually
|
| 291 | 285 |
project = Project(project_dir, context)
|
