Raoul Hidalgo Charman pushed to branch raoul/870-root-cache-dir at BuildStream / buildstream
Commits:
-
7df37026
by Raoul Hidalgo Charman at 2019-01-21T13:56:27Z
12 changed files:
- buildstream/_context.py
- buildstream/_frontend/app.py
- buildstream/data/userconfig.yaml
- conftest.py
- tests/artifactcache/junctions.py
- tests/artifactcache/pull.py
- tests/artifactcache/push.py
- tests/frontend/pull.py
- tests/integration/build-tree.py
- tests/integration/pullbuildtrees.py
- tests/integration/source-determinism.py
- tests/testutils/runcli.py
Changes:
| ... | ... | @@ -58,12 +58,21 @@ class Context(): |
| 58 | 58 |
# Filename indicating which configuration file was used, or None for the defaults
|
| 59 | 59 |
self.config_origin = None
|
| 60 | 60 |
|
| 61 |
+ # The directory under which other directories are based
|
|
| 62 |
+ self.rootcachedir = None
|
|
| 63 |
+ |
|
| 61 | 64 |
# The directory where various sources are stored
|
| 62 | 65 |
self.sourcedir = None
|
| 63 | 66 |
|
| 64 | 67 |
# The directory where build sandboxes will be created
|
| 65 | 68 |
self.builddir = None
|
| 66 | 69 |
|
| 70 |
+ # The directory for CAS
|
|
| 71 |
+ self.casdir = None
|
|
| 72 |
+ |
|
| 73 |
+ # The directory for temporary files
|
|
| 74 |
+ self.tmpdir = None
|
|
| 75 |
+ |
|
| 67 | 76 |
# Default root location for workspaces
|
| 68 | 77 |
self.workspacedir = None
|
| 69 | 78 |
|
| ... | ... | @@ -165,7 +174,7 @@ class Context(): |
| 165 | 174 |
#
|
| 166 | 175 |
# This will first load the BuildStream default configuration and then
|
| 167 | 176 |
# override that configuration with the configuration file indicated
|
| 168 |
- # by *config*, if any was specified.
|
|
| 177 |
+ # by *config* , if any was specified.
|
|
| 169 | 178 |
#
|
| 170 | 179 |
def load(self, config=None):
|
| 171 | 180 |
profile_start(Topics.LOAD_CONTEXT, 'load')
|
| ... | ... | @@ -188,13 +197,30 @@ class Context(): |
| 188 | 197 |
user_config = _yaml.load(config)
|
| 189 | 198 |
_yaml.composite(defaults, user_config)
|
| 190 | 199 |
|
| 200 |
+ # Give deprecation warnings
|
|
| 201 |
+ if defaults.get('builddir'):
|
|
| 202 |
+ print("builddir is deprecated, use rootcachedir")
|
|
| 203 |
+ else:
|
|
| 204 |
+ defaults['builddir'] = os.path.join(defaults['rootcachedir'], 'build')
|
|
| 205 |
+ |
|
| 206 |
+ if defaults.get('artifactdir'):
|
|
| 207 |
+ print("artifactdir is deprecated, use rootcachedir")
|
|
| 208 |
+ else:
|
|
| 209 |
+ defaults['artifactdir'] = os.path.join(defaults['rootcachedir'], 'artifacts')
|
|
| 210 |
+ |
|
| 191 | 211 |
_yaml.node_validate(defaults, [
|
| 192 |
- 'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
|
| 212 |
+ 'rootcachedir', 'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
|
| 193 | 213 |
'scheduler', 'artifacts', 'logging', 'projects',
|
| 194 |
- 'cache', 'prompt', 'workspacedir', 'remote-execution'
|
|
| 214 |
+ 'cache', 'prompt', 'workspacedir', 'remote-execution',
|
|
| 195 | 215 |
])
|
| 196 | 216 |
|
| 197 |
- for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
|
|
| 217 |
+ # add directories not set by users
|
|
| 218 |
+ defaults.insert(0, 'tmpdir', os.path.join(defaults['rootcachedir'], 'tmp'))
|
|
| 219 |
+ defaults.insert(0, 'casdir', os.path.join(defaults['rootcachedir'], 'cas'))
|
|
| 220 |
+ |
|
| 221 |
+ for directory in ['rootcachedir', 'sourcedir', 'builddir',
|
|
| 222 |
+ 'artifactdir', 'logdir', 'workspacedir', 'casdir',
|
|
| 223 |
+ 'tmpdir']:
|
|
| 198 | 224 |
# Allow the ~ tilde expansion and any environment variables in
|
| 199 | 225 |
# path specification in the config files.
|
| 200 | 226 |
#
|
| ... | ... | @@ -290,7 +316,7 @@ class Context(): |
| 290 | 316 |
return self._artifactcache
|
| 291 | 317 |
|
| 292 | 318 |
# add_project():
|
| 293 |
- #
|
|
| 319 |
+ |
|
| 294 | 320 |
# Add a project to the context.
|
| 295 | 321 |
#
|
| 296 | 322 |
# Args:
|
| ... | ... | @@ -654,7 +680,7 @@ class Context(): |
| 654 | 680 |
|
| 655 | 681 |
def get_cascache(self):
|
| 656 | 682 |
if self._cascache is None:
|
| 657 |
- self._cascache = CASCache(self.artifactdir)
|
|
| 683 |
+ self._cascache = CASCache(self.rootcachedir)
|
|
| 658 | 684 |
return self._cascache
|
| 659 | 685 |
|
| 660 | 686 |
# guess_element()
|
| ... | ... | @@ -227,6 +227,7 @@ class App(): |
| 227 | 227 |
click.echo("", err=True)
|
| 228 | 228 |
click.echo(" bst init", err=True)
|
| 229 | 229 |
|
| 230 |
+ traceback.print_tb(sys.exc_info()[2])
|
|
| 230 | 231 |
self._error_exit(e, "Error loading project")
|
| 231 | 232 |
|
| 232 | 233 |
except BstError as e:
|
| ... | ... | @@ -13,11 +13,8 @@ |
| 13 | 13 |
# Location to store sources
|
| 14 | 14 |
sourcedir: ${XDG_CACHE_HOME}/buildstream/sources
|
| 15 | 15 |
|
| 16 |
-# Location to perform builds
|
|
| 17 |
-builddir: ${XDG_CACHE_HOME}/buildstream/build
|
|
| 18 |
- |
|
| 19 |
-# Location to store local binary artifacts
|
|
| 20 |
-artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
|
|
| 16 |
+# Root location for other directories in the cache
|
|
| 17 |
+rootcachedir: ${XDG_CACHE_HOME}/buildstream
|
|
| 21 | 18 |
|
| 22 | 19 |
# Location to store build logs
|
| 23 | 20 |
logdir: ${XDG_CACHE_HOME}/buildstream/logs
|
| ... | ... | @@ -60,10 +60,10 @@ class IntegrationCache(): |
| 60 | 60 |
|
| 61 | 61 |
# Create a temp directory for the duration of the test for
|
| 62 | 62 |
# the artifacts directory
|
| 63 |
- try:
|
|
| 64 |
- self.artifacts = tempfile.mkdtemp(dir=cache, prefix='artifacts-')
|
|
| 65 |
- except OSError as e:
|
|
| 66 |
- raise AssertionError("Unable to create test directory !") from e
|
|
| 63 |
+ # try:
|
|
| 64 |
+ # self.artifacts = tempfile.mkdtemp(dir=cache, prefix='artifacts-')
|
|
| 65 |
+ # except OSError as e:
|
|
| 66 |
+ # raise AssertionError("Unable to create test directory !") from e
|
|
| 67 | 67 |
|
| 68 | 68 |
|
| 69 | 69 |
@pytest.fixture(scope='session')
|
| ... | ... | @@ -82,10 +82,10 @@ def integration_cache(request): |
| 82 | 82 |
|
| 83 | 83 |
# Clean up the artifacts after each test run - we only want to
|
| 84 | 84 |
# cache sources between runs
|
| 85 |
- try:
|
|
| 86 |
- shutil.rmtree(cache.artifacts)
|
|
| 87 |
- except FileNotFoundError:
|
|
| 88 |
- pass
|
|
| 85 |
+ # try:
|
|
| 86 |
+ # shutil.rmtree(cache.artifacts)
|
|
| 87 |
+ # except FileNotFoundError:
|
|
| 88 |
+ # pass
|
|
| 89 | 89 |
|
| 90 | 90 |
|
| 91 | 91 |
#################################################
|
| ... | ... | @@ -68,8 +68,8 @@ def test_push_pull(cli, tmpdir, datafiles): |
| 68 | 68 |
# Now we've pushed, delete the user's local artifact cache
|
| 69 | 69 |
# directory and try to redownload it from the share
|
| 70 | 70 |
#
|
| 71 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 72 |
- shutil.rmtree(artifacts)
|
|
| 71 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 72 |
+ shutil.rmtree(cas)
|
|
| 73 | 73 |
|
| 74 | 74 |
# Assert that nothing is cached locally anymore
|
| 75 | 75 |
state = cli.get_element_state(project, 'target.bst')
|
| ... | ... | @@ -56,7 +56,7 @@ def test_pull(cli, tmpdir, datafiles): |
| 56 | 56 |
# Set up an artifact cache.
|
| 57 | 57 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
| 58 | 58 |
# Configure artifact share
|
| 59 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 59 |
+ cache_dir = os.path.join(str(tmpdir), 'cache')
|
|
| 60 | 60 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
| 61 | 61 |
user_config = {
|
| 62 | 62 |
'scheduler': {
|
| ... | ... | @@ -65,7 +65,8 @@ def test_pull(cli, tmpdir, datafiles): |
| 65 | 65 |
'artifacts': {
|
| 66 | 66 |
'url': share.repo,
|
| 67 | 67 |
'push': True,
|
| 68 |
- }
|
|
| 68 |
+ },
|
|
| 69 |
+ 'rootcachedir': cache_dir
|
|
| 69 | 70 |
}
|
| 70 | 71 |
|
| 71 | 72 |
# Write down the user configuration file
|
| ... | ... | @@ -92,7 +93,6 @@ def test_pull(cli, tmpdir, datafiles): |
| 92 | 93 |
# Fake minimal context
|
| 93 | 94 |
context = Context()
|
| 94 | 95 |
context.load(config=user_config_file)
|
| 95 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 96 | 96 |
context.set_message_handler(message_handler)
|
| 97 | 97 |
|
| 98 | 98 |
# Load the project and CAS cache
|
| ... | ... | @@ -102,7 +102,10 @@ def test_pull(cli, tmpdir, datafiles): |
| 102 | 102 |
|
| 103 | 103 |
# Assert that the element's artifact is **not** cached
|
| 104 | 104 |
element = project.load_elements(['target.bst'])[0]
|
| 105 |
+ print(element)
|
|
| 105 | 106 |
element_key = cli.get_element_key(project_dir, 'target.bst')
|
| 107 |
+ print(context.casdir)
|
|
| 108 |
+ print(cas.get_artifact_fullname(element, element_key))
|
|
| 106 | 109 |
assert not cas.contains(element, element_key)
|
| 107 | 110 |
|
| 108 | 111 |
queue = multiprocessing.Queue()
|
| ... | ... | @@ -110,7 +113,7 @@ def test_pull(cli, tmpdir, datafiles): |
| 110 | 113 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
| 111 | 114 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 112 | 115 |
args=(_test_pull, queue, user_config_file, project_dir,
|
| 113 |
- artifact_dir, 'target.bst', element_key))
|
|
| 116 |
+ cache_dir, 'target.bst', element_key))
|
|
| 114 | 117 |
|
| 115 | 118 |
try:
|
| 116 | 119 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -127,12 +130,14 @@ def test_pull(cli, tmpdir, datafiles): |
| 127 | 130 |
assert cas.contains(element, element_key)
|
| 128 | 131 |
|
| 129 | 132 |
|
| 130 |
-def _test_pull(user_config_file, project_dir, artifact_dir,
|
|
| 133 |
+def _test_pull(user_config_file, project_dir, cache_dir,
|
|
| 131 | 134 |
element_name, element_key, queue):
|
| 132 | 135 |
# Fake minimal context
|
| 133 | 136 |
context = Context()
|
| 134 | 137 |
context.load(config=user_config_file)
|
| 135 |
- context.artifactdir = artifact_dir
|
|
| 138 |
+ context.rootcachedir = cache_dir
|
|
| 139 |
+ context.casdir = os.path.join(cache_dir, 'cas')
|
|
| 140 |
+ context.tmpdir = os.path.join(cache_dir, 'tmp')
|
|
| 136 | 141 |
context.set_message_handler(message_handler)
|
| 137 | 142 |
|
| 138 | 143 |
# Load the project manually
|
| ... | ... | @@ -165,7 +170,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 165 | 170 |
# Set up an artifact cache.
|
| 166 | 171 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
| 167 | 172 |
# Configure artifact share
|
| 168 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 173 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
| 169 | 174 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
| 170 | 175 |
user_config = {
|
| 171 | 176 |
'scheduler': {
|
| ... | ... | @@ -174,7 +179,8 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 174 | 179 |
'artifacts': {
|
| 175 | 180 |
'url': share.repo,
|
| 176 | 181 |
'push': True,
|
| 177 |
- }
|
|
| 182 |
+ },
|
|
| 183 |
+ 'rootcachedir': rootcache_dir
|
|
| 178 | 184 |
}
|
| 179 | 185 |
|
| 180 | 186 |
# Write down the user configuration file
|
| ... | ... | @@ -195,7 +201,6 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 195 | 201 |
# Fake minimal context
|
| 196 | 202 |
context = Context()
|
| 197 | 203 |
context.load(config=user_config_file)
|
| 198 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 199 | 204 |
context.set_message_handler(message_handler)
|
| 200 | 205 |
|
| 201 | 206 |
# Load the project and CAS cache
|
| ... | ... | @@ -218,7 +223,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 218 | 223 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
| 219 | 224 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 220 | 225 |
args=(_test_push_tree, queue, user_config_file, project_dir,
|
| 221 |
- artifact_dir, artifact_digest))
|
|
| 226 |
+ artifact_digest))
|
|
| 222 | 227 |
|
| 223 | 228 |
try:
|
| 224 | 229 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -246,7 +251,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 246 | 251 |
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
|
| 247 | 252 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 248 | 253 |
args=(_test_pull_tree, queue, user_config_file, project_dir,
|
| 249 |
- artifact_dir, tree_digest))
|
|
| 254 |
+ tree_digest))
|
|
| 250 | 255 |
|
| 251 | 256 |
try:
|
| 252 | 257 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -268,11 +273,10 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 268 | 273 |
assert os.path.exists(cas.objpath(directory_digest))
|
| 269 | 274 |
|
| 270 | 275 |
|
| 271 |
-def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
|
| 276 |
+def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
|
|
| 272 | 277 |
# Fake minimal context
|
| 273 | 278 |
context = Context()
|
| 274 | 279 |
context.load(config=user_config_file)
|
| 275 |
- context.artifactdir = artifact_dir
|
|
| 276 | 280 |
context.set_message_handler(message_handler)
|
| 277 | 281 |
|
| 278 | 282 |
# Load the project manually
|
| ... | ... | @@ -304,11 +308,10 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest |
| 304 | 308 |
queue.put("No remote configured")
|
| 305 | 309 |
|
| 306 | 310 |
|
| 307 |
-def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
|
| 311 |
+def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
|
|
| 308 | 312 |
# Fake minimal context
|
| 309 | 313 |
context = Context()
|
| 310 | 314 |
context.load(config=user_config_file)
|
| 311 |
- context.artifactdir = artifact_dir
|
|
| 312 | 315 |
context.set_message_handler(message_handler)
|
| 313 | 316 |
|
| 314 | 317 |
# Load the project manually
|
| ... | ... | @@ -51,7 +51,7 @@ def test_push(cli, tmpdir, datafiles): |
| 51 | 51 |
# Set up an artifact cache.
|
| 52 | 52 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
| 53 | 53 |
# Configure artifact share
|
| 54 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 54 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
| 55 | 55 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
| 56 | 56 |
user_config = {
|
| 57 | 57 |
'scheduler': {
|
| ... | ... | @@ -60,7 +60,8 @@ def test_push(cli, tmpdir, datafiles): |
| 60 | 60 |
'artifacts': {
|
| 61 | 61 |
'url': share.repo,
|
| 62 | 62 |
'push': True,
|
| 63 |
- }
|
|
| 63 |
+ },
|
|
| 64 |
+ 'rootcachedir': rootcache_dir
|
|
| 64 | 65 |
}
|
| 65 | 66 |
|
| 66 | 67 |
# Write down the user configuration file
|
| ... | ... | @@ -69,7 +70,6 @@ def test_push(cli, tmpdir, datafiles): |
| 69 | 70 |
# Fake minimal context
|
| 70 | 71 |
context = Context()
|
| 71 | 72 |
context.load(config=user_config_file)
|
| 72 |
- context.artifactdir = artifact_dir
|
|
| 73 | 73 |
context.set_message_handler(message_handler)
|
| 74 | 74 |
|
| 75 | 75 |
# Load the project manually
|
| ... | ... | @@ -89,7 +89,7 @@ def test_push(cli, tmpdir, datafiles): |
| 89 | 89 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
| 90 | 90 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 91 | 91 |
args=(_test_push, queue, user_config_file, project_dir,
|
| 92 |
- artifact_dir, 'target.bst', element_key))
|
|
| 92 |
+ 'target.bst', element_key))
|
|
| 93 | 93 |
|
| 94 | 94 |
try:
|
| 95 | 95 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -106,12 +106,10 @@ def test_push(cli, tmpdir, datafiles): |
| 106 | 106 |
assert share.has_artifact('test', 'target.bst', element_key)
|
| 107 | 107 |
|
| 108 | 108 |
|
| 109 |
-def _test_push(user_config_file, project_dir, artifact_dir,
|
|
| 110 |
- element_name, element_key, queue):
|
|
| 109 |
+def _test_push(user_config_file, project_dir, element_name, element_key, queue):
|
|
| 111 | 110 |
# Fake minimal context
|
| 112 | 111 |
context = Context()
|
| 113 | 112 |
context.load(config=user_config_file)
|
| 114 |
- context.artifactdir = artifact_dir
|
|
| 115 | 113 |
context.set_message_handler(message_handler)
|
| 116 | 114 |
|
| 117 | 115 |
# Load the project manually
|
| ... | ... | @@ -152,7 +150,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 152 | 150 |
# Set up an artifact cache.
|
| 153 | 151 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
| 154 | 152 |
# Configure artifact share
|
| 155 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 153 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
| 156 | 154 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
| 157 | 155 |
user_config = {
|
| 158 | 156 |
'scheduler': {
|
| ... | ... | @@ -161,7 +159,8 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 161 | 159 |
'artifacts': {
|
| 162 | 160 |
'url': share.repo,
|
| 163 | 161 |
'push': True,
|
| 164 |
- }
|
|
| 162 |
+ },
|
|
| 163 |
+ 'rootcachedir': rootcache_dir
|
|
| 165 | 164 |
}
|
| 166 | 165 |
|
| 167 | 166 |
# Write down the user configuration file
|
| ... | ... | @@ -170,7 +169,6 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 170 | 169 |
# Fake minimal context
|
| 171 | 170 |
context = Context()
|
| 172 | 171 |
context.load(config=user_config_file)
|
| 173 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 174 | 172 |
context.set_message_handler(message_handler)
|
| 175 | 173 |
|
| 176 | 174 |
# Load the project and CAS cache
|
| ... | ... | @@ -182,6 +180,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 182 | 180 |
# Assert that the element's artifact is cached
|
| 183 | 181 |
element = project.load_elements(['target.bst'])[0]
|
| 184 | 182 |
element_key = cli.get_element_key(project_dir, 'target.bst')
|
| 183 |
+ print(context.casdir)
|
|
| 185 | 184 |
assert artifactcache.contains(element, element_key)
|
| 186 | 185 |
|
| 187 | 186 |
# Manually setup the CAS remote
|
| ... | ... | @@ -198,7 +197,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 198 | 197 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
| 199 | 198 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 200 | 199 |
args=(_test_push_directory, queue, user_config_file,
|
| 201 |
- project_dir, artifact_dir, artifact_digest))
|
|
| 200 |
+ project_dir, artifact_digest))
|
|
| 202 | 201 |
|
| 203 | 202 |
try:
|
| 204 | 203 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -216,11 +215,10 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 216 | 215 |
assert share.has_object(artifact_digest)
|
| 217 | 216 |
|
| 218 | 217 |
|
| 219 |
-def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
|
| 218 |
+def _test_push_directory(user_config_file, project_dir, artifact_digest, queue):
|
|
| 220 | 219 |
# Fake minimal context
|
| 221 | 220 |
context = Context()
|
| 222 | 221 |
context.load(config=user_config_file)
|
| 223 |
- context.artifactdir = artifact_dir
|
|
| 224 | 222 |
context.set_message_handler(message_handler)
|
| 225 | 223 |
|
| 226 | 224 |
# Load the project manually
|
| ... | ... | @@ -254,6 +252,7 @@ def test_push_message(cli, tmpdir, datafiles): |
| 254 | 252 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
| 255 | 253 |
# Configure artifact share
|
| 256 | 254 |
artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
| 255 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
| 257 | 256 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
| 258 | 257 |
user_config = {
|
| 259 | 258 |
'scheduler': {
|
| ... | ... | @@ -262,7 +261,8 @@ def test_push_message(cli, tmpdir, datafiles): |
| 262 | 261 |
'artifacts': {
|
| 263 | 262 |
'url': share.repo,
|
| 264 | 263 |
'push': True,
|
| 265 |
- }
|
|
| 264 |
+ },
|
|
| 265 |
+ 'rootcachedir': rootcache_dir
|
|
| 266 | 266 |
}
|
| 267 | 267 |
|
| 268 | 268 |
# Write down the user configuration file
|
| ... | ... | @@ -273,7 +273,7 @@ def test_push_message(cli, tmpdir, datafiles): |
| 273 | 273 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
| 274 | 274 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 275 | 275 |
args=(_test_push_message, queue, user_config_file,
|
| 276 |
- project_dir, artifact_dir))
|
|
| 276 |
+ project_dir))
|
|
| 277 | 277 |
|
| 278 | 278 |
try:
|
| 279 | 279 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -292,11 +292,10 @@ def test_push_message(cli, tmpdir, datafiles): |
| 292 | 292 |
assert share.has_object(message_digest)
|
| 293 | 293 |
|
| 294 | 294 |
|
| 295 |
-def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
|
|
| 295 |
+def _test_push_message(user_config_file, project_dir, queue):
|
|
| 296 | 296 |
# Fake minimal context
|
| 297 | 297 |
context = Context()
|
| 298 | 298 |
context.load(config=user_config_file)
|
| 299 |
- context.artifactdir = artifact_dir
|
|
| 300 | 299 |
context.set_message_handler(message_handler)
|
| 301 | 300 |
|
| 302 | 301 |
# Load the project manually
|
| ... | ... | @@ -62,8 +62,8 @@ def test_push_pull_all(cli, tmpdir, datafiles): |
| 62 | 62 |
# Now we've pushed, delete the user's local artifact cache
|
| 63 | 63 |
# directory and try to redownload it from the share
|
| 64 | 64 |
#
|
| 65 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 66 |
- shutil.rmtree(artifacts)
|
|
| 65 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 66 |
+ shutil.rmtree(cas)
|
|
| 67 | 67 |
|
| 68 | 68 |
# Assert that nothing is cached locally anymore
|
| 69 | 69 |
for element_name in all_elements:
|
| ... | ... | @@ -104,8 +104,8 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles): |
| 104 | 104 |
assert_shared(cli, share2, project, 'target.bst')
|
| 105 | 105 |
|
| 106 | 106 |
# Delete the user's local artifact cache.
|
| 107 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 108 |
- shutil.rmtree(artifacts)
|
|
| 107 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 108 |
+ shutil.rmtree(cas)
|
|
| 109 | 109 |
|
| 110 | 110 |
# Assert that the element is not cached anymore.
|
| 111 | 111 |
assert cli.get_element_state(project, 'target.bst') != 'cached'
|
| ... | ... | @@ -158,8 +158,8 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles): |
| 158 | 158 |
# Now we've pushed, delete the user's local artifact cache
|
| 159 | 159 |
# directory and try to redownload it from the good_share.
|
| 160 | 160 |
#
|
| 161 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 162 |
- shutil.rmtree(artifacts)
|
|
| 161 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 162 |
+ shutil.rmtree(cas)
|
|
| 163 | 163 |
|
| 164 | 164 |
result = cli.run(project=project, args=['pull', 'target.bst', '--remote',
|
| 165 | 165 |
good_share.repo])
|
| ... | ... | @@ -199,8 +199,8 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles): |
| 199 | 199 |
# Now we've pushed, delete the user's local artifact cache
|
| 200 | 200 |
# directory and try to redownload it from the share
|
| 201 | 201 |
#
|
| 202 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 203 |
- shutil.rmtree(artifacts)
|
|
| 202 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 203 |
+ shutil.rmtree(cas)
|
|
| 204 | 204 |
|
| 205 | 205 |
# Assert that nothing is cached locally anymore
|
| 206 | 206 |
for element_name in all_elements:
|
| ... | ... | @@ -249,8 +249,8 @@ def test_push_pull_track_non_strict(cli, tmpdir, datafiles): |
| 249 | 249 |
# Now we've pushed, delete the user's local artifact cache
|
| 250 | 250 |
# directory and try to redownload it from the share
|
| 251 | 251 |
#
|
| 252 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 253 |
- shutil.rmtree(artifacts)
|
|
| 252 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 253 |
+ shutil.rmtree(cas)
|
|
| 254 | 254 |
|
| 255 | 255 |
# Assert that nothing is cached locally anymore
|
| 256 | 256 |
for element_name in all_elements:
|
| ... | ... | @@ -285,7 +285,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles): |
| 285 | 285 |
result.assert_success()
|
| 286 | 286 |
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
|
| 287 | 287 |
|
| 288 |
- cache_dir = os.path.join(project, 'cache', 'artifacts')
|
|
| 288 |
+ cache_dir = os.path.join(project, 'cache', 'cas')
|
|
| 289 | 289 |
shutil.rmtree(cache_dir)
|
| 290 | 290 |
|
| 291 | 291 |
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
|
| ... | ... | @@ -320,8 +320,8 @@ def test_pull_missing_blob(cli, tmpdir, datafiles): |
| 320 | 320 |
# Now we've pushed, delete the user's local artifact cache
|
| 321 | 321 |
# directory and try to redownload it from the share
|
| 322 | 322 |
#
|
| 323 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 324 |
- shutil.rmtree(artifacts)
|
|
| 323 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 324 |
+ shutil.rmtree(cas)
|
|
| 325 | 325 |
|
| 326 | 326 |
# Assert that nothing is cached locally anymore
|
| 327 | 327 |
for element_name in all_elements:
|
| ... | ... | @@ -157,10 +157,8 @@ def test_buildtree_options(cli, tmpdir, datafiles): |
| 157 | 157 |
assert cli.get_element_state(project, element_name) == 'cached'
|
| 158 | 158 |
|
| 159 | 159 |
# Discard the cache
|
| 160 |
- cli.configure({
|
|
| 161 |
- 'artifacts': {'url': share.repo, 'push': True},
|
|
| 162 |
- 'artifactdir': os.path.join(cli.directory, 'artifacts2')
|
|
| 163 |
- })
|
|
| 160 |
+ shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
|
|
| 161 |
+ shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
|
|
| 164 | 162 |
assert cli.get_element_state(project, element_name) != 'cached'
|
| 165 | 163 |
|
| 166 | 164 |
# Pull from cache, but do not include buildtrees.
|
| ... | ... | @@ -19,9 +19,10 @@ DATA_DIR = os.path.join( |
| 19 | 19 |
# cleared as just forcefully removing the refpath leaves dangling objects.
|
| 20 | 20 |
def default_state(cli, tmpdir, share):
|
| 21 | 21 |
shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
| 22 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
|
|
| 22 | 23 |
cli.configure({
|
| 23 | 24 |
'artifacts': {'url': share.repo, 'push': False},
|
| 24 |
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
|
|
| 25 |
+ 'rootcachedir': str(tmpdir),
|
|
| 25 | 26 |
'cache': {'pull-buildtrees': False},
|
| 26 | 27 |
})
|
| 27 | 28 |
|
| ... | ... | @@ -42,7 +43,7 @@ def test_pullbuildtrees(cli, tmpdir, datafiles): |
| 42 | 43 |
create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
|
| 43 | 44 |
cli.configure({
|
| 44 | 45 |
'artifacts': {'url': share1.repo, 'push': True},
|
| 45 |
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
| 46 |
+ 'rootcachedir': str(tmpdir),
|
|
| 46 | 47 |
})
|
| 47 | 48 |
|
| 48 | 49 |
# Build autotools element, checked pushed, delete local
|
| ... | ... | @@ -79,7 +80,7 @@ def test_pullbuildtrees(cli, tmpdir, datafiles): |
| 79 | 80 |
assert os.path.isdir(buildtreedir)
|
| 80 | 81 |
default_state(cli, tmpdir, share1)
|
| 81 | 82 |
|
| 82 |
- # Pull artifact with pullbuildtrees set in user config, then assert
|
|
| 83 |
+ # Pull artifact with pullbuildtrees set in user onfig, then assert
|
|
| 83 | 84 |
# that pulling with the same user config doesn't creates a pull job,
|
| 84 | 85 |
# or when buildtrees cli flag is set.
|
| 85 | 86 |
cli.configure({'cache': {'pull-buildtrees': True}})
|
| ... | ... | @@ -94,9 +94,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca |
| 94 | 94 |
return f.read()
|
| 95 | 95 |
finally:
|
| 96 | 96 |
os.umask(old_umask)
|
| 97 |
- cache_dir = integration_cache.artifacts
|
|
| 98 |
- cli.remove_artifact_from_cache(project, element_name,
|
|
| 99 |
- cache_dir=cache_dir)
|
|
| 97 |
+ cli.remove_artifact_from_cache(project, element_name)
|
|
| 100 | 98 |
|
| 101 | 99 |
assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
|
| 102 | 100 |
|
| ... | ... | @@ -156,8 +154,6 @@ def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache): |
| 156 | 154 |
with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
|
| 157 | 155 |
return f.read()
|
| 158 | 156 |
finally:
|
| 159 |
- cache_dir = integration_cache.artifacts
|
|
| 160 |
- cli.remove_artifact_from_cache(project, element_name,
|
|
| 161 |
- cache_dir=cache_dir)
|
|
| 157 |
+ cli.remove_artifact_from_cache(project, element_name)
|
|
| 162 | 158 |
|
| 163 | 159 |
assert get_value_for_mask(0o7777) == get_value_for_mask(0o0700)
|
| ... | ... | @@ -246,10 +246,13 @@ class Cli(): |
| 246 | 246 |
def remove_artifact_from_cache(self, project, element_name,
|
| 247 | 247 |
*, cache_dir=None):
|
| 248 | 248 |
if not cache_dir:
|
| 249 |
- cache_dir = os.path.join(project, 'cache', 'artifacts')
|
|
| 249 |
+ cache_dir = os.path.join(project, 'cache')
|
|
| 250 | 250 |
|
| 251 | 251 |
cache_dir = os.path.join(cache_dir, 'cas', 'refs', 'heads')
|
| 252 | 252 |
|
| 253 |
+ # replace forward slashes
|
|
| 254 |
+ element_name = element_name.replace('/', '-')
|
|
| 255 |
+ |
|
| 253 | 256 |
cache_dir = os.path.splitext(os.path.join(cache_dir, 'test', element_name))[0]
|
| 254 | 257 |
shutil.rmtree(cache_dir)
|
| 255 | 258 |
|
| ... | ... | @@ -526,9 +529,13 @@ def cli_integration(tmpdir, integration_cache): |
| 526 | 529 |
# to avoid downloading the huge base-sdk repeatedly
|
| 527 | 530 |
fixture.configure({
|
| 528 | 531 |
'sourcedir': integration_cache.sources,
|
| 529 |
- 'artifactdir': integration_cache.artifacts
|
|
| 530 | 532 |
})
|
| 531 | 533 |
|
| 534 |
+ # For integration we want to clear the artifact dirs as they take up a lot
|
|
| 535 |
+ # of space.
|
|
| 536 |
+ shutil.rmtree(os.path.join(directory, 'cas'))
|
|
| 537 |
+ shutil.rmtree(os.path.join(directory, 'artifacts'))
|
|
| 538 |
+ |
|
| 532 | 539 |
return fixture
|
| 533 | 540 |
|
| 534 | 541 |
|
| ... | ... | @@ -569,10 +576,8 @@ def configured(directory, config=None): |
| 569 | 576 |
|
| 570 | 577 |
if not config.get('sourcedir', False):
|
| 571 | 578 |
config['sourcedir'] = os.path.join(directory, 'sources')
|
| 572 |
- if not config.get('builddir', False):
|
|
| 573 |
- config['builddir'] = os.path.join(directory, 'build')
|
|
| 574 |
- if not config.get('artifactdir', False):
|
|
| 575 |
- config['artifactdir'] = os.path.join(directory, 'artifacts')
|
|
| 579 |
+ if not config.get('rootcachedir', False):
|
|
| 580 |
+ config['rootcachedir'] = directory
|
|
| 576 | 581 |
if not config.get('logdir', False):
|
| 577 | 582 |
config['logdir'] = os.path.join(directory, 'logs')
|
| 578 | 583 |
|
