Will Salmon pushed to branch willsalmon/trackWarning at BuildStream / buildstream
Commits:
-
87edf67e
by Martin Blanchard at 2018-08-01T11:07:05Z
-
d10e4668
by Phillip Smyth at 2018-08-01T12:05:40Z
-
6aedc57b
by Jim MacArthur at 2018-08-01T12:07:32Z
-
1d3af84d
by Jim MacArthur at 2018-08-01T12:07:32Z
-
4397e45a
by Jim MacArthur at 2018-08-01T12:07:32Z
-
9bf343cf
by Jim MacArthur at 2018-08-01T12:07:32Z
-
f124dfec
by Jim MacArthur at 2018-08-01T12:07:32Z
-
42bdce0b
by Jim MacArthur at 2018-08-01T12:07:32Z
-
fe1a3428
by Jim MacArthur at 2018-08-01T12:07:33Z
-
849724ce
by Jim MacArthur at 2018-08-01T12:07:33Z
-
270325b4
by Jim MacArthur at 2018-08-01T12:07:33Z
-
a8c71ba4
by Jim MacArthur at 2018-08-01T12:07:33Z
-
4e6a9049
by Jim MacArthur at 2018-08-01T12:07:33Z
-
e69f2b23
by Jim MacArthur at 2018-08-01T12:07:33Z
-
6877b6fc
by Jim MacArthur at 2018-08-01T12:07:33Z
-
8edf1f80
by Jim MacArthur at 2018-08-01T12:07:33Z
-
02c6c846
by Jim MacArthur at 2018-08-01T12:07:33Z
-
eabc3899
by Jim MacArthur at 2018-08-01T13:16:04Z
-
80a3f630
by William Salmon at 2018-08-01T14:59:48Z
-
fef3f9e1
by William Salmon at 2018-08-01T15:00:28Z
-
030e4e3b
by William Salmon at 2018-08-01T15:00:45Z
25 changed files:
- buildstream/_exceptions.py
- buildstream/_frontend/complete.py
- buildstream/_pipeline.py
- buildstream/_stream.py
- buildstream/element.py
- buildstream/plugins/elements/compose.py
- buildstream/plugins/elements/import.py
- buildstream/plugins/elements/stack.py
- buildstream/plugins/sources/git.py
- buildstream/sandbox/_mount.py
- buildstream/sandbox/_sandboxbwrap.py
- buildstream/sandbox/_sandboxchroot.py
- buildstream/sandbox/sandbox.py
- buildstream/scriptelement.py
- + buildstream/storage/__init__.py
- + buildstream/storage/_filebaseddirectory.py
- + buildstream/storage/directory.py
- buildstream/utils.py
- tests/completions/completions.py
- + tests/completions/sub-folders/base/unwanted.bst
- + tests/completions/sub-folders/elements/base.bst
- + tests/completions/sub-folders/elements/base/wanted.bst
- + tests/completions/sub-folders/elements/hello.bst
- + tests/completions/sub-folders/project.conf
- tests/sources/git.py
Changes:
... | ... | @@ -88,6 +88,7 @@ class ErrorDomain(Enum): |
88 | 88 |
ELEMENT = 11
|
89 | 89 |
APP = 12
|
90 | 90 |
STREAM = 13
|
91 |
+ VIRTUAL_FS = 14
|
|
91 | 92 |
|
92 | 93 |
|
93 | 94 |
# BstError is an internal base exception class for BuildSream
|
... | ... | @@ -68,9 +68,10 @@ def complete_path(path_type, incomplete, base_directory='.'): |
68 | 68 |
# If there was nothing on the left of the last separator,
|
69 | 69 |
# we are completing files in the filesystem root
|
70 | 70 |
base_path = os.path.join(base_directory, base_path)
|
71 |
- |
|
72 |
- elif os.path.isdir(incomplete):
|
|
73 |
- base_path = incomplete
|
|
71 |
+ else:
|
|
72 |
+ incomplete_base_path = os.path.join(base_directory, incomplete)
|
|
73 |
+ if os.path.isdir(incomplete_base_path):
|
|
74 |
+ base_path = incomplete_base_path
|
|
74 | 75 |
|
75 | 76 |
try:
|
76 | 77 |
if base_path:
|
... | ... | @@ -385,7 +385,13 @@ class Pipeline(): |
385 | 385 |
detail = "Exact versions are missing for the following elements\n" + \
|
386 | 386 |
"Try tracking these elements first with `bst track`\n\n"
|
387 | 387 |
for element in inconsistent:
|
388 |
- detail += " " + element._get_full_name() + "\n"
|
|
388 |
+ detail += " " + element._get_full_name()
|
|
389 |
+ for source in element.sources():
|
|
390 |
+ if not source._get_consistency() and not source.get_ref():
|
|
391 |
+ if hasattr(source, 'tracking') and source.tracking is None:
|
|
392 |
+ detail += ": Is missing ref and track, please add a branch to track or a ref.\n"
|
|
393 |
+ detail += "\n"
|
|
394 |
+ |
|
389 | 395 |
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
|
390 | 396 |
|
391 | 397 |
# cleanup()
|
... | ... | @@ -407,15 +407,16 @@ class Stream(): |
407 | 407 |
integrate=integrate) as sandbox:
|
408 | 408 |
|
409 | 409 |
# Copy or move the sandbox to the target directory
|
410 |
- sandbox_root = sandbox.get_directory()
|
|
410 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
411 |
+ |
|
411 | 412 |
if not tar:
|
412 | 413 |
with target.timed_activity("Checking out files in '{}'"
|
413 | 414 |
.format(location)):
|
414 | 415 |
try:
|
415 | 416 |
if hardlinks:
|
416 |
- self._checkout_hardlinks(sandbox_root, location)
|
|
417 |
+ self._checkout_hardlinks(sandbox_vroot, location)
|
|
417 | 418 |
else:
|
418 |
- utils.copy_files(sandbox_root, location)
|
|
419 |
+ sandbox_vroot.export_files(location)
|
|
419 | 420 |
except OSError as e:
|
420 | 421 |
raise StreamError("Failed to checkout files: '{}'"
|
421 | 422 |
.format(e)) from e
|
... | ... | @@ -424,14 +425,12 @@ class Stream(): |
424 | 425 |
with target.timed_activity("Creating tarball"):
|
425 | 426 |
with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
|
426 | 427 |
with tarfile.open(fileobj=fo, mode="w|") as tf:
|
427 |
- Stream._add_directory_to_tarfile(
|
|
428 |
- tf, sandbox_root, '.')
|
|
428 |
+ sandbox_vroot.export_to_tar(tf, '.')
|
|
429 | 429 |
else:
|
430 | 430 |
with target.timed_activity("Creating tarball '{}'"
|
431 | 431 |
.format(location)):
|
432 | 432 |
with tarfile.open(location, "w:") as tf:
|
433 |
- Stream._add_directory_to_tarfile(
|
|
434 |
- tf, sandbox_root, '.')
|
|
433 |
+ sandbox_vroot.export_to_tar(tf, '.')
|
|
435 | 434 |
|
436 | 435 |
except BstError as e:
|
437 | 436 |
raise StreamError("Error while staging dependencies into a sandbox"
|
... | ... | @@ -1050,46 +1049,13 @@ class Stream(): |
1050 | 1049 |
|
1051 | 1050 |
# Helper function for checkout()
|
1052 | 1051 |
#
|
1053 |
- def _checkout_hardlinks(self, sandbox_root, directory):
|
|
1052 |
+ def _checkout_hardlinks(self, sandbox_vroot, directory):
|
|
1054 | 1053 |
try:
|
1055 |
- removed = utils.safe_remove(directory)
|
|
1054 |
+ utils.safe_remove(directory)
|
|
1056 | 1055 |
except OSError as e:
|
1057 | 1056 |
raise StreamError("Failed to remove checkout directory: {}".format(e)) from e
|
1058 | 1057 |
|
1059 |
- if removed:
|
|
1060 |
- # Try a simple rename of the sandbox root; if that
|
|
1061 |
- # doesnt cut it, then do the regular link files code path
|
|
1062 |
- try:
|
|
1063 |
- os.rename(sandbox_root, directory)
|
|
1064 |
- except OSError:
|
|
1065 |
- os.makedirs(directory, exist_ok=True)
|
|
1066 |
- utils.link_files(sandbox_root, directory)
|
|
1067 |
- else:
|
|
1068 |
- utils.link_files(sandbox_root, directory)
|
|
1069 |
- |
|
1070 |
- # Add a directory entry deterministically to a tar file
|
|
1071 |
- #
|
|
1072 |
- # This function takes extra steps to ensure the output is deterministic.
|
|
1073 |
- # First, it sorts the results of os.listdir() to ensure the ordering of
|
|
1074 |
- # the files in the archive is the same. Second, it sets a fixed
|
|
1075 |
- # timestamp for each entry. See also https://bugs.python.org/issue24465.
|
|
1076 |
- @staticmethod
|
|
1077 |
- def _add_directory_to_tarfile(tf, dir_name, dir_arcname, mtime=0):
|
|
1078 |
- for filename in sorted(os.listdir(dir_name)):
|
|
1079 |
- name = os.path.join(dir_name, filename)
|
|
1080 |
- arcname = os.path.join(dir_arcname, filename)
|
|
1081 |
- |
|
1082 |
- tarinfo = tf.gettarinfo(name, arcname)
|
|
1083 |
- tarinfo.mtime = mtime
|
|
1084 |
- |
|
1085 |
- if tarinfo.isreg():
|
|
1086 |
- with open(name, "rb") as f:
|
|
1087 |
- tf.addfile(tarinfo, f)
|
|
1088 |
- elif tarinfo.isdir():
|
|
1089 |
- tf.addfile(tarinfo)
|
|
1090 |
- Stream._add_directory_to_tarfile(tf, name, arcname, mtime)
|
|
1091 |
- else:
|
|
1092 |
- tf.addfile(tarinfo)
|
|
1058 |
+ sandbox_vroot.export_files(directory, can_link=True, can_destroy=True)
|
|
1093 | 1059 |
|
1094 | 1060 |
# Write the element build script to the given directory
|
1095 | 1061 |
def _write_element_script(self, directory, element):
|
... | ... | @@ -80,7 +80,6 @@ from collections import Mapping, OrderedDict |
80 | 80 |
from contextlib import contextmanager
|
81 | 81 |
from enum import Enum
|
82 | 82 |
import tempfile
|
83 |
-import time
|
|
84 | 83 |
import shutil
|
85 | 84 |
|
86 | 85 |
from . import _yaml
|
... | ... | @@ -97,6 +96,9 @@ from . import _site |
97 | 96 |
from ._platform import Platform
|
98 | 97 |
from .sandbox._config import SandboxConfig
|
99 | 98 |
|
99 |
+from .storage.directory import Directory
|
|
100 |
+from .storage._filebaseddirectory import FileBasedDirectory, VirtualDirectoryError
|
|
101 |
+ |
|
100 | 102 |
|
101 | 103 |
# _KeyStrength():
|
102 | 104 |
#
|
... | ... | @@ -195,6 +197,13 @@ class Element(Plugin): |
195 | 197 |
*Since: 1.2*
|
196 | 198 |
"""
|
197 | 199 |
|
200 |
+ BST_VIRTUAL_DIRECTORY = False
|
|
201 |
+ """Whether to raise exceptions if an element uses Sandbox.get_directory
|
|
202 |
+ instead of Sandbox.get_virtual_directory.
|
|
203 |
+ |
|
204 |
+ *Since: 1.4*
|
|
205 |
+ """
|
|
206 |
+ |
|
198 | 207 |
def __init__(self, context, project, artifacts, meta, plugin_conf):
|
199 | 208 |
|
200 | 209 |
self.__cache_key_dict = None # Dict for cache key calculation
|
... | ... | @@ -627,10 +636,10 @@ class Element(Plugin): |
627 | 636 |
|
628 | 637 |
# Hard link it into the staging area
|
629 | 638 |
#
|
630 |
- basedir = sandbox.get_directory()
|
|
631 |
- stagedir = basedir \
|
|
639 |
+ vbasedir = sandbox.get_virtual_directory()
|
|
640 |
+ vstagedir = vbasedir \
|
|
632 | 641 |
if path is None \
|
633 |
- else os.path.join(basedir, path.lstrip(os.sep))
|
|
642 |
+ else vbasedir.descend(path.lstrip(os.sep).split(os.sep))
|
|
634 | 643 |
|
635 | 644 |
files = list(self.__compute_splits(include, exclude, orphans))
|
636 | 645 |
|
... | ... | @@ -642,15 +651,8 @@ class Element(Plugin): |
642 | 651 |
link_files = files
|
643 | 652 |
copy_files = []
|
644 | 653 |
|
645 |
- link_result = utils.link_files(artifact, stagedir, files=link_files,
|
|
646 |
- report_written=True)
|
|
647 |
- copy_result = utils.copy_files(artifact, stagedir, files=copy_files,
|
|
648 |
- report_written=True)
|
|
649 |
- |
|
650 |
- cur_time = time.time()
|
|
651 |
- |
|
652 |
- for f in copy_result.files_written:
|
|
653 |
- os.utime(os.path.join(stagedir, f), times=(cur_time, cur_time))
|
|
654 |
+ link_result = vstagedir.import_files(artifact, files=link_files, report_written=True, can_link=True)
|
|
655 |
+ copy_result = vstagedir.import_files(artifact, files=copy_files, report_written=True, update_utimes=True)
|
|
654 | 656 |
|
655 | 657 |
return link_result.combine(copy_result)
|
656 | 658 |
|
... | ... | @@ -1359,40 +1361,45 @@ class Element(Plugin): |
1359 | 1361 |
sandbox._set_mount_source(directory, workspace.get_absolute_path())
|
1360 | 1362 |
|
1361 | 1363 |
# Stage all sources that need to be copied
|
1362 |
- sandbox_root = sandbox.get_directory()
|
|
1363 |
- host_directory = os.path.join(sandbox_root, directory.lstrip(os.sep))
|
|
1364 |
- self._stage_sources_at(host_directory, mount_workspaces=mount_workspaces)
|
|
1364 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
1365 |
+ host_vdirectory = sandbox_vroot.descend(directory.lstrip(os.sep).split(os.sep), create=True)
|
|
1366 |
+ self._stage_sources_at(host_vdirectory, mount_workspaces=mount_workspaces)
|
|
1365 | 1367 |
|
1366 | 1368 |
# _stage_sources_at():
|
1367 | 1369 |
#
|
1368 | 1370 |
# Stage this element's sources to a directory
|
1369 | 1371 |
#
|
1370 | 1372 |
# Args:
|
1371 |
- # directory (str): An absolute path to stage the sources at
|
|
1373 |
+ # vdirectory (:class:`.storage.Directory`): A virtual directory object to stage sources into.
|
|
1372 | 1374 |
# mount_workspaces (bool): mount workspaces if True, copy otherwise
|
1373 | 1375 |
#
|
1374 |
- def _stage_sources_at(self, directory, mount_workspaces=True):
|
|
1376 |
+ def _stage_sources_at(self, vdirectory, mount_workspaces=True):
|
|
1375 | 1377 |
with self.timed_activity("Staging sources", silent_nested=True):
|
1376 | 1378 |
|
1377 |
- if os.path.isdir(directory) and os.listdir(directory):
|
|
1378 |
- raise ElementError("Staging directory '{}' is not empty".format(directory))
|
|
1379 |
- |
|
1380 |
- workspace = self._get_workspace()
|
|
1381 |
- if workspace:
|
|
1382 |
- # If mount_workspaces is set and we're doing incremental builds,
|
|
1383 |
- # the workspace is already mounted into the sandbox.
|
|
1384 |
- if not (mount_workspaces and self.__can_build_incrementally()):
|
|
1385 |
- with self.timed_activity("Staging local files at {}".format(workspace.path)):
|
|
1386 |
- workspace.stage(directory)
|
|
1387 |
- else:
|
|
1388 |
- # No workspace, stage directly
|
|
1389 |
- for source in self.sources():
|
|
1390 |
- source._stage(directory)
|
|
1391 |
- |
|
1379 |
+ if not isinstance(vdirectory, Directory):
|
|
1380 |
+ vdirectory = FileBasedDirectory(vdirectory)
|
|
1381 |
+ if not vdirectory.is_empty():
|
|
1382 |
+ raise ElementError("Staging directory '{}' is not empty".format(vdirectory))
|
|
1383 |
+ |
|
1384 |
+ with tempfile.TemporaryDirectory() as temp_staging_directory:
|
|
1385 |
+ |
|
1386 |
+ workspace = self._get_workspace()
|
|
1387 |
+ if workspace:
|
|
1388 |
+ # If mount_workspaces is set and we're doing incremental builds,
|
|
1389 |
+ # the workspace is already mounted into the sandbox.
|
|
1390 |
+ if not (mount_workspaces and self.__can_build_incrementally()):
|
|
1391 |
+ with self.timed_activity("Staging local files at {}".format(workspace.path)):
|
|
1392 |
+ workspace.stage(temp_staging_directory)
|
|
1393 |
+ else:
|
|
1394 |
+ # No workspace, stage directly
|
|
1395 |
+ for source in self.sources():
|
|
1396 |
+ source._stage(temp_staging_directory)
|
|
1397 |
+ |
|
1398 |
+ vdirectory.import_files(temp_staging_directory)
|
|
1392 | 1399 |
# Ensure deterministic mtime of sources at build time
|
1393 |
- utils._set_deterministic_mtime(directory)
|
|
1400 |
+ vdirectory.set_deterministic_mtime()
|
|
1394 | 1401 |
# Ensure deterministic owners of sources at build time
|
1395 |
- utils._set_deterministic_user(directory)
|
|
1402 |
+ vdirectory.set_deterministic_user()
|
|
1396 | 1403 |
|
1397 | 1404 |
# _set_required():
|
1398 | 1405 |
#
|
... | ... | @@ -1508,7 +1515,7 @@ class Element(Plugin): |
1508 | 1515 |
with _signals.terminator(cleanup_rootdir), \
|
1509 | 1516 |
self.__sandbox(rootdir, output_file, output_file, self.__sandbox_config) as sandbox: # nopep8
|
1510 | 1517 |
|
1511 |
- sandbox_root = sandbox.get_directory()
|
|
1518 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
1512 | 1519 |
|
1513 | 1520 |
# By default, the dynamic public data is the same as the static public data.
|
1514 | 1521 |
# The plugin's assemble() method may modify this, though.
|
... | ... | @@ -1540,11 +1547,11 @@ class Element(Plugin): |
1540 | 1547 |
#
|
1541 | 1548 |
workspace = self._get_workspace()
|
1542 | 1549 |
if workspace and self.__staged_sources_directory:
|
1543 |
- sandbox_root = sandbox.get_directory()
|
|
1544 |
- sandbox_path = os.path.join(sandbox_root,
|
|
1545 |
- self.__staged_sources_directory.lstrip(os.sep))
|
|
1550 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
1551 |
+ path_components = self.__staged_sources_directory.lstrip(os.sep).split(os.sep)
|
|
1552 |
+ sandbox_vpath = sandbox_vroot.descend(path_components)
|
|
1546 | 1553 |
try:
|
1547 |
- utils.copy_files(workspace.path, sandbox_path)
|
|
1554 |
+ sandbox_vpath.import_files(workspace.path)
|
|
1548 | 1555 |
except UtilError as e:
|
1549 | 1556 |
self.warn("Failed to preserve workspace state for failed build sysroot: {}"
|
1550 | 1557 |
.format(e))
|
... | ... | @@ -1556,7 +1563,11 @@ class Element(Plugin): |
1556 | 1563 |
raise
|
1557 | 1564 |
finally:
|
1558 | 1565 |
if collect is not None:
|
1559 |
- collectdir = os.path.join(sandbox_root, collect.lstrip(os.sep))
|
|
1566 |
+ try:
|
|
1567 |
+ collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
|
|
1568 |
+ except VirtualDirectoryError:
|
|
1569 |
+ # No collect directory existed
|
|
1570 |
+ collectvdir = None
|
|
1560 | 1571 |
|
1561 | 1572 |
# Create artifact directory structure
|
1562 | 1573 |
assembledir = os.path.join(rootdir, 'artifact')
|
... | ... | @@ -1565,20 +1576,26 @@ class Element(Plugin): |
1565 | 1576 |
metadir = os.path.join(assembledir, 'meta')
|
1566 | 1577 |
buildtreedir = os.path.join(assembledir, 'buildtree')
|
1567 | 1578 |
os.mkdir(assembledir)
|
1568 |
- if collect is not None and os.path.exists(collectdir):
|
|
1579 |
+ if collect is not None and collectvdir is not None:
|
|
1569 | 1580 |
os.mkdir(filesdir)
|
1570 | 1581 |
os.mkdir(logsdir)
|
1571 | 1582 |
os.mkdir(metadir)
|
1572 | 1583 |
os.mkdir(buildtreedir)
|
1573 | 1584 |
|
1574 | 1585 |
# Hard link files from collect dir to files directory
|
1575 |
- if collect is not None and os.path.exists(collectdir):
|
|
1576 |
- utils.link_files(collectdir, filesdir)
|
|
1577 |
- |
|
1578 |
- sandbox_build_dir = os.path.join(sandbox_root, self.get_variable('build-root').lstrip(os.sep))
|
|
1579 |
- # Hard link files from build-root dir to buildtreedir directory
|
|
1580 |
- if os.path.isdir(sandbox_build_dir):
|
|
1581 |
- utils.link_files(sandbox_build_dir, buildtreedir)
|
|
1586 |
+ if collect is not None and collectvdir is not None:
|
|
1587 |
+ collectvdir.export_files(filesdir, can_link=True)
|
|
1588 |
+ |
|
1589 |
+ try:
|
|
1590 |
+ sandbox_build_dir = sandbox_vroot.descend(
|
|
1591 |
+ self.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
|
1592 |
+ # Hard link files from build-root dir to buildtreedir directory
|
|
1593 |
+ sandbox_build_dir.export_files(buildtreedir)
|
|
1594 |
+ except VirtualDirectoryError:
|
|
1595 |
+ # Directory could not be found. Pre-virtual
|
|
1596 |
+ # directory behaviour was to continue silently
|
|
1597 |
+ # if the directory could not be found.
|
|
1598 |
+ pass
|
|
1582 | 1599 |
|
1583 | 1600 |
# Copy build log
|
1584 | 1601 |
log_filename = context.get_log_filename()
|
... | ... | @@ -1626,7 +1643,7 @@ class Element(Plugin): |
1626 | 1643 |
self.__artifact_size = utils._get_dir_size(assembledir)
|
1627 | 1644 |
self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
|
1628 | 1645 |
|
1629 |
- if collect is not None and not os.path.exists(collectdir):
|
|
1646 |
+ if collect is not None and collectvdir is None:
|
|
1630 | 1647 |
raise ElementError(
|
1631 | 1648 |
"Directory '{}' was not found inside the sandbox, "
|
1632 | 1649 |
"unable to collect artifact contents"
|
... | ... | @@ -2126,7 +2143,8 @@ class Element(Plugin): |
2126 | 2143 |
directory,
|
2127 | 2144 |
stdout=stdout,
|
2128 | 2145 |
stderr=stderr,
|
2129 |
- config=config)
|
|
2146 |
+ config=config,
|
|
2147 |
+ allow_real_directory=not self.BST_VIRTUAL_DIRECTORY)
|
|
2130 | 2148 |
yield sandbox
|
2131 | 2149 |
|
2132 | 2150 |
else:
|
... | ... | @@ -34,7 +34,6 @@ The default configuration and possible options are as such: |
34 | 34 |
"""
|
35 | 35 |
|
36 | 36 |
import os
|
37 |
-from buildstream import utils
|
|
38 | 37 |
from buildstream import Element, Scope
|
39 | 38 |
|
40 | 39 |
|
... | ... | @@ -56,6 +55,9 @@ class ComposeElement(Element): |
56 | 55 |
# added, to reduce the potential for confusion
|
57 | 56 |
BST_FORBID_SOURCES = True
|
58 | 57 |
|
58 |
+ # This plugin has been modified to avoid the use of Sandbox.get_directory
|
|
59 |
+ BST_VIRTUAL_DIRECTORY = True
|
|
60 |
+ |
|
59 | 61 |
def configure(self, node):
|
60 | 62 |
self.node_validate(node, [
|
61 | 63 |
'integrate', 'include', 'exclude', 'include-orphans'
|
... | ... | @@ -104,7 +106,8 @@ class ComposeElement(Element): |
104 | 106 |
orphans=self.include_orphans)
|
105 | 107 |
manifest.update(files)
|
106 | 108 |
|
107 |
- basedir = sandbox.get_directory()
|
|
109 |
+ # Make a snapshot of all the files.
|
|
110 |
+ vbasedir = sandbox.get_virtual_directory()
|
|
108 | 111 |
modified_files = set()
|
109 | 112 |
removed_files = set()
|
110 | 113 |
added_files = set()
|
... | ... | @@ -116,38 +119,24 @@ class ComposeElement(Element): |
116 | 119 |
if require_split:
|
117 | 120 |
|
118 | 121 |
# Make a snapshot of all the files before integration-commands are run.
|
119 |
- snapshot = {
|
|
120 |
- f: getmtime(os.path.join(basedir, f))
|
|
121 |
- for f in utils.list_relative_paths(basedir)
|
|
122 |
- }
|
|
122 |
+ snapshot = set(vbasedir.list_relative_paths())
|
|
123 |
+ vbasedir.mark_unmodified()
|
|
123 | 124 |
|
124 | 125 |
for dep in self.dependencies(Scope.BUILD):
|
125 | 126 |
dep.integrate(sandbox)
|
126 | 127 |
|
127 | 128 |
if require_split:
|
128 |
- |
|
129 | 129 |
# Calculate added, modified and removed files
|
130 |
- basedir_contents = set(utils.list_relative_paths(basedir))
|
|
130 |
+ post_integration_snapshot = vbasedir.list_relative_paths()
|
|
131 |
+ modified_files = set(vbasedir.list_modified_paths())
|
|
132 |
+ basedir_contents = set(post_integration_snapshot)
|
|
131 | 133 |
for path in manifest:
|
132 |
- if path in basedir_contents:
|
|
133 |
- if path in snapshot:
|
|
134 |
- preintegration_mtime = snapshot[path]
|
|
135 |
- if preintegration_mtime != getmtime(os.path.join(basedir, path)):
|
|
136 |
- modified_files.add(path)
|
|
137 |
- else:
|
|
138 |
- # If the path appears in the manifest but not the initial snapshot,
|
|
139 |
- # it may be a file staged inside a directory symlink. In this case
|
|
140 |
- # the path we got from the manifest won't show up in the snapshot
|
|
141 |
- # because utils.list_relative_paths() doesn't recurse into symlink
|
|
142 |
- # directories.
|
|
143 |
- pass
|
|
144 |
- elif path in snapshot:
|
|
134 |
+ if path in snapshot and path not in basedir_contents:
|
|
145 | 135 |
removed_files.add(path)
|
146 | 136 |
|
147 | 137 |
for path in basedir_contents:
|
148 | 138 |
if path not in snapshot:
|
149 | 139 |
added_files.add(path)
|
150 |
- |
|
151 | 140 |
self.info("Integration modified {}, added {} and removed {} files"
|
152 | 141 |
.format(len(modified_files), len(added_files), len(removed_files)))
|
153 | 142 |
|
... | ... | @@ -166,8 +155,7 @@ class ComposeElement(Element): |
166 | 155 |
# instead of into a subdir. The element assemble() method should
|
167 | 156 |
# support this in some way.
|
168 | 157 |
#
|
169 |
- installdir = os.path.join(basedir, 'buildstream', 'install')
|
|
170 |
- os.makedirs(installdir, exist_ok=True)
|
|
158 |
+ installdir = vbasedir.descend(['buildstream', 'install'], create=True)
|
|
171 | 159 |
|
172 | 160 |
# We already saved the manifest for created files in the integration phase,
|
173 | 161 |
# now collect the rest of the manifest.
|
... | ... | @@ -191,19 +179,12 @@ class ComposeElement(Element): |
191 | 179 |
|
192 | 180 |
with self.timed_activity("Creating composition", detail=detail, silent_nested=True):
|
193 | 181 |
self.info("Composing {} files".format(len(manifest)))
|
194 |
- utils.link_files(basedir, installdir, files=manifest)
|
|
182 |
+ installdir.import_files(vbasedir, files=manifest, can_link=True)
|
|
195 | 183 |
|
196 | 184 |
# And we're done
|
197 | 185 |
return os.path.join(os.sep, 'buildstream', 'install')
|
198 | 186 |
|
199 | 187 |
|
200 |
-# Like os.path.getmtime(), but doesnt explode on symlinks
|
|
201 |
-#
|
|
202 |
-def getmtime(path):
|
|
203 |
- stat = os.lstat(path)
|
|
204 |
- return stat.st_mtime
|
|
205 |
- |
|
206 |
- |
|
207 | 188 |
# Plugin entry point
|
208 | 189 |
def setup():
|
209 | 190 |
return ComposeElement
|
... | ... | @@ -31,7 +31,6 @@ The empty configuration is as such: |
31 | 31 |
"""
|
32 | 32 |
|
33 | 33 |
import os
|
34 |
-import shutil
|
|
35 | 34 |
from buildstream import Element, BuildElement, ElementError
|
36 | 35 |
|
37 | 36 |
|
... | ... | @@ -39,6 +38,9 @@ from buildstream import Element, BuildElement, ElementError |
39 | 38 |
class ImportElement(BuildElement):
|
40 | 39 |
# pylint: disable=attribute-defined-outside-init
|
41 | 40 |
|
41 |
+ # This plugin has been modified to avoid the use of Sandbox.get_directory
|
|
42 |
+ BST_VIRTUAL_DIRECTORY = True
|
|
43 |
+ |
|
42 | 44 |
def configure(self, node):
|
43 | 45 |
self.source = self.node_subst_member(node, 'source')
|
44 | 46 |
self.target = self.node_subst_member(node, 'target')
|
... | ... | @@ -68,27 +70,22 @@ class ImportElement(BuildElement): |
68 | 70 |
# Do not mount workspaces as the files are copied from outside the sandbox
|
69 | 71 |
self._stage_sources_in_sandbox(sandbox, 'input', mount_workspaces=False)
|
70 | 72 |
|
71 |
- rootdir = sandbox.get_directory()
|
|
72 |
- inputdir = os.path.join(rootdir, 'input')
|
|
73 |
- outputdir = os.path.join(rootdir, 'output')
|
|
73 |
+ rootdir = sandbox.get_virtual_directory()
|
|
74 |
+ inputdir = rootdir.descend(['input'])
|
|
75 |
+ outputdir = rootdir.descend(['output'], create=True)
|
|
74 | 76 |
|
75 | 77 |
# The directory to grab
|
76 |
- inputdir = os.path.join(inputdir, self.source.lstrip(os.sep))
|
|
77 |
- inputdir = inputdir.rstrip(os.sep)
|
|
78 |
+ inputdir = inputdir.descend(self.source.strip(os.sep).split(os.sep))
|
|
78 | 79 |
|
79 | 80 |
# The output target directory
|
80 |
- outputdir = os.path.join(outputdir, self.target.lstrip(os.sep))
|
|
81 |
- outputdir = outputdir.rstrip(os.sep)
|
|
82 |
- |
|
83 |
- # Ensure target directory parent
|
|
84 |
- os.makedirs(os.path.dirname(outputdir), exist_ok=True)
|
|
81 |
+ outputdir = outputdir.descend(self.target.strip(os.sep).split(os.sep), create=True)
|
|
85 | 82 |
|
86 |
- if not os.path.exists(inputdir):
|
|
83 |
+ if inputdir.is_empty():
|
|
87 | 84 |
raise ElementError("{}: No files were found inside directory '{}'"
|
88 | 85 |
.format(self, self.source))
|
89 | 86 |
|
90 | 87 |
# Move it over
|
91 |
- shutil.move(inputdir, outputdir)
|
|
88 |
+ outputdir.import_files(inputdir)
|
|
92 | 89 |
|
93 | 90 |
# And we're done
|
94 | 91 |
return '/output'
|
... | ... | @@ -24,13 +24,15 @@ Stack elements are simply a symbolic element used for representing |
24 | 24 |
a logical group of elements.
|
25 | 25 |
"""
|
26 | 26 |
|
27 |
-import os
|
|
28 | 27 |
from buildstream import Element
|
29 | 28 |
|
30 | 29 |
|
31 | 30 |
# Element implementation for the 'stack' kind.
|
32 | 31 |
class StackElement(Element):
|
33 | 32 |
|
33 |
+ # This plugin has been modified to avoid the use of Sandbox.get_directory
|
|
34 |
+ BST_VIRTUAL_DIRECTORY = True
|
|
35 |
+ |
|
34 | 36 |
def configure(self, node):
|
35 | 37 |
pass
|
36 | 38 |
|
... | ... | @@ -52,7 +54,7 @@ class StackElement(Element): |
52 | 54 |
|
53 | 55 |
# Just create a dummy empty artifact, its existence is a statement
|
54 | 56 |
# that all this stack's dependencies are built.
|
55 |
- rootdir = sandbox.get_directory()
|
|
57 |
+ vrootdir = sandbox.get_virtual_directory()
|
|
56 | 58 |
|
57 | 59 |
# XXX FIXME: This is currently needed because the artifact
|
58 | 60 |
# cache wont let us commit an empty artifact.
|
... | ... | @@ -61,10 +63,7 @@ class StackElement(Element): |
61 | 63 |
# the actual artifact data in a subdirectory, then we
|
62 | 64 |
# will be able to store some additional state in the
|
63 | 65 |
# artifact cache, and we can also remove this hack.
|
64 |
- outputdir = os.path.join(rootdir, 'output', 'bst')
|
|
65 |
- |
|
66 |
- # Ensure target directory parent
|
|
67 |
- os.makedirs(os.path.dirname(outputdir), exist_ok=True)
|
|
66 |
+ vrootdir.descend(['output', 'bst'], create=True)
|
|
68 | 67 |
|
69 | 68 |
# And we're done
|
70 | 69 |
return '/output'
|
... | ... | @@ -363,6 +363,12 @@ class GitSource(Source): |
363 | 363 |
|
364 | 364 |
# If self.tracking is not specified it's not an error, just silently return
|
365 | 365 |
if not self.tracking:
|
366 |
+ # Is there a better way to check if a ref is given.
|
|
367 |
+ if self.mirror.ref is None:
|
|
368 |
+ detail = 'Without a tracking branch ref can not be updated. Please ' + \
|
|
369 |
+ 'provide a ref or a track.'
|
|
370 |
+ raise SourceError("{}: No track or ref".format(self),
|
|
371 |
+ detail=detail, reason="track-attempt-no-track")
|
|
366 | 372 |
return None
|
367 | 373 |
|
368 | 374 |
with self.timed_activity("Tracking {} from {}"
|
... | ... | @@ -32,7 +32,8 @@ from .._fuse import SafeHardlinks |
32 | 32 |
class Mount():
|
33 | 33 |
def __init__(self, sandbox, mount_point, safe_hardlinks):
|
34 | 34 |
scratch_directory = sandbox._get_scratch_directory()
|
35 |
- root_directory = sandbox.get_directory()
|
|
35 |
+ # Getting external_directory here is acceptable as we're part of the sandbox code.
|
|
36 |
+ root_directory = sandbox.get_virtual_directory().external_directory
|
|
36 | 37 |
|
37 | 38 |
self.mount_point = mount_point
|
38 | 39 |
self.safe_hardlinks = safe_hardlinks
|
... | ... | @@ -56,7 +56,9 @@ class SandboxBwrap(Sandbox): |
56 | 56 |
|
57 | 57 |
def run(self, command, flags, *, cwd=None, env=None):
|
58 | 58 |
stdout, stderr = self._get_output()
|
59 |
- root_directory = self.get_directory()
|
|
59 |
+ |
|
60 |
+ # Allowable access to underlying storage as we're part of the sandbox
|
|
61 |
+ root_directory = self.get_virtual_directory().external_directory
|
|
60 | 62 |
|
61 | 63 |
# Fallback to the sandbox default settings for
|
62 | 64 |
# the cwd and env.
|
... | ... | @@ -90,7 +90,7 @@ class SandboxChroot(Sandbox): |
90 | 90 |
# Nonetheless a better solution could perhaps be found.
|
91 | 91 |
|
92 | 92 |
rootfs = stack.enter_context(utils._tempdir(dir='/var/run/buildstream'))
|
93 |
- stack.enter_context(self.create_devices(self.get_directory(), flags))
|
|
93 |
+ stack.enter_context(self.create_devices(self._root, flags))
|
|
94 | 94 |
stack.enter_context(self.mount_dirs(rootfs, flags, stdout, stderr))
|
95 | 95 |
|
96 | 96 |
if flags & SandboxFlags.INTERACTIVE:
|
... | ... | @@ -29,7 +29,8 @@ See also: :ref:`sandboxing`. |
29 | 29 |
"""
|
30 | 30 |
|
31 | 31 |
import os
|
32 |
-from .._exceptions import ImplError
|
|
32 |
+from .._exceptions import ImplError, BstError
|
|
33 |
+from ..storage._filebaseddirectory import FileBasedDirectory
|
|
33 | 34 |
|
34 | 35 |
|
35 | 36 |
class SandboxFlags():
|
... | ... | @@ -90,28 +91,50 @@ class Sandbox(): |
90 | 91 |
self.__cwd = None
|
91 | 92 |
self.__env = None
|
92 | 93 |
self.__mount_sources = {}
|
94 |
+ self.__allow_real_directory = kwargs['allow_real_directory']
|
|
95 |
+ |
|
93 | 96 |
# Configuration from kwargs common to all subclasses
|
94 | 97 |
self.__config = kwargs['config']
|
95 | 98 |
self.__stdout = kwargs['stdout']
|
96 | 99 |
self.__stderr = kwargs['stderr']
|
97 | 100 |
|
98 |
- # Setup the directories
|
|
101 |
+ # Setup the directories. Root should be available to subclasses, hence
|
|
102 |
+ # being single-underscore. The others are private to this class.
|
|
103 |
+ self._root = os.path.join(directory, 'root')
|
|
99 | 104 |
self.__directory = directory
|
100 |
- self.__root = os.path.join(self.__directory, 'root')
|
|
101 | 105 |
self.__scratch = os.path.join(self.__directory, 'scratch')
|
102 |
- for directory_ in [self.__root, self.__scratch]:
|
|
106 |
+ for directory_ in [self._root, self.__scratch]:
|
|
103 | 107 |
os.makedirs(directory_, exist_ok=True)
|
104 | 108 |
|
105 | 109 |
def get_directory(self):
|
106 | 110 |
"""Fetches the sandbox root directory
|
107 | 111 |
|
108 | 112 |
The root directory is where artifacts for the base
|
109 |
- runtime environment should be staged.
|
|
113 |
+ runtime environment should be staged. Only works if
|
|
114 |
+ BST_VIRTUAL_DIRECTORY is not set.
|
|
110 | 115 |
|
111 | 116 |
Returns:
|
112 | 117 |
(str): The sandbox root directory
|
118 |
+ |
|
119 |
+ """
|
|
120 |
+ if self.__allow_real_directory:
|
|
121 |
+ return self._root
|
|
122 |
+ else:
|
|
123 |
+ raise BstError("You can't use get_directory")
|
|
124 |
+ |
|
125 |
+ def get_virtual_directory(self):
|
|
126 |
+ """Fetches the sandbox root directory
|
|
127 |
+ |
|
128 |
+ The root directory is where artifacts for the base
|
|
129 |
+ runtime environment should be staged. Only works if
|
|
130 |
+ BST_VIRTUAL_DIRECTORY is not set.
|
|
131 |
+ |
|
132 |
+ Returns:
|
|
133 |
+ (str): The sandbox root directory
|
|
134 |
+ |
|
113 | 135 |
"""
|
114 |
- return self.__root
|
|
136 |
+ # For now, just create a new Directory every time we're asked
|
|
137 |
+ return FileBasedDirectory(self._root)
|
|
115 | 138 |
|
116 | 139 |
def set_environment(self, environment):
|
117 | 140 |
"""Sets the environment variables for the sandbox
|
... | ... | @@ -293,11 +316,11 @@ class Sandbox(): |
293 | 316 |
def _has_command(self, command, env=None):
|
294 | 317 |
if os.path.isabs(command):
|
295 | 318 |
return os.path.exists(os.path.join(
|
296 |
- self.get_directory(), command.lstrip(os.sep)))
|
|
319 |
+ self._root, command.lstrip(os.sep)))
|
|
297 | 320 |
|
298 | 321 |
for path in env.get('PATH').split(':'):
|
299 | 322 |
if os.path.exists(os.path.join(
|
300 |
- self.get_directory(), path.lstrip(os.sep), command)):
|
|
323 |
+ self._root, path.lstrip(os.sep), command)):
|
|
301 | 324 |
return True
|
302 | 325 |
|
303 | 326 |
return False
|
... | ... | @@ -243,9 +243,8 @@ class ScriptElement(Element): |
243 | 243 |
with self.timed_activity("Staging {} at {}"
|
244 | 244 |
.format(element.name, item['destination']),
|
245 | 245 |
silent_nested=True):
|
246 |
- real_dstdir = os.path.join(sandbox.get_directory(),
|
|
247 |
- item['destination'].lstrip(os.sep))
|
|
248 |
- os.makedirs(os.path.dirname(real_dstdir), exist_ok=True)
|
|
246 |
+ virtual_dstdir = sandbox.get_virtual_directory()
|
|
247 |
+ virtual_dstdir.descend(item['destination'].lstrip(os.sep).split(os.sep), create=True)
|
|
249 | 248 |
element.stage_dependency_artifacts(sandbox, Scope.RUN, path=item['destination'])
|
250 | 249 |
|
251 | 250 |
for item in self.__layout:
|
... | ... | @@ -263,8 +262,8 @@ class ScriptElement(Element): |
263 | 262 |
for dep in element.dependencies(Scope.RUN):
|
264 | 263 |
dep.integrate(sandbox)
|
265 | 264 |
|
266 |
- os.makedirs(os.path.join(sandbox.get_directory(), self.__install_root.lstrip(os.sep)),
|
|
267 |
- exist_ok=True)
|
|
265 |
+ install_root_path_components = self.__install_root.lstrip(os.sep).split(os.sep)
|
|
266 |
+ sandbox.get_virtual_directory().descend(install_root_path_components, create=True)
|
|
268 | 267 |
|
269 | 268 |
def assemble(self, sandbox):
|
270 | 269 |
|
1 |
+#!/usr/bin/env python3
|
|
2 |
+#
|
|
3 |
+# Copyright (C) 2018 Bloomberg Finance LP
|
|
4 |
+#
|
|
5 |
+# This program is free software; you can redistribute it and/or
|
|
6 |
+# modify it under the terms of the GNU Lesser General Public
|
|
7 |
+# License as published by the Free Software Foundation; either
|
|
8 |
+# version 2 of the License, or (at your option) any later version.
|
|
9 |
+#
|
|
10 |
+# This library is distributed in the hope that it will be useful,
|
|
11 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
13 |
+# Lesser General Public License for more details.
|
|
14 |
+#
|
|
15 |
+# You should have received a copy of the GNU Lesser General Public
|
|
16 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
17 |
+#
|
|
18 |
+# Authors:
|
|
19 |
+# Jim MacArthur <jim macarthur codethink co uk>
|
|
20 |
+ |
|
21 |
+from ._filebaseddirectory import FileBasedDirectory
|
|
22 |
+from .directory import Directory
|
1 |
+#!/usr/bin/env python3
|
|
2 |
+#
|
|
3 |
+# Copyright (C) 2018 Bloomberg Finance LP
|
|
4 |
+#
|
|
5 |
+# This program is free software; you can redistribute it and/or
|
|
6 |
+# modify it under the terms of the GNU Lesser General Public
|
|
7 |
+# License as published by the Free Software Foundation; either
|
|
8 |
+# version 2 of the License, or (at your option) any later version.
|
|
9 |
+#
|
|
10 |
+# This library is distributed in the hope that it will be useful,
|
|
11 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
13 |
+# Lesser General Public License for more details.
|
|
14 |
+#
|
|
15 |
+# You should have received a copy of the GNU Lesser General Public
|
|
16 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
17 |
+#
|
|
18 |
+# Authors:
|
|
19 |
+# Jim MacArthur <jim macarthur codethink co uk>
|
|
20 |
+ |
|
21 |
+"""
|
|
22 |
+FileBasedDirectory
|
|
23 |
+=========
|
|
24 |
+ |
|
25 |
+Implementation of the Directory class which backs onto a normal POSIX filing system.
|
|
26 |
+ |
|
27 |
+See also: :ref:`sandboxing`.
|
|
28 |
+"""
|
|
29 |
+ |
|
30 |
+import os
|
|
31 |
+import time
|
|
32 |
+from .._exceptions import BstError, ErrorDomain
|
|
33 |
+from .directory import Directory
|
|
34 |
+from ..utils import link_files, copy_files, list_relative_paths, _get_link_mtime, _magic_timestamp
|
|
35 |
+from ..utils import _set_deterministic_user, _set_deterministic_mtime
|
|
36 |
+ |
|
37 |
+ |
|
38 |
+class VirtualDirectoryError(BstError):
|
|
39 |
+ """Raised by Directory functions when system calls fail.
|
|
40 |
+ This will be handled internally by the BuildStream core,
|
|
41 |
+ if you need to handle this error, then it should be reraised,
|
|
42 |
+ or either of the :class:`.ElementError` or :class:`.SourceError`
|
|
43 |
+ exceptions should be raised from this error.
|
|
44 |
+ """
|
|
45 |
+ def __init__(self, message, reason=None):
|
|
46 |
+ super().__init__(message, domain=ErrorDomain.VIRTUAL_FS, reason=reason)
|
|
47 |
+ |
|
48 |
+ |
|
49 |
+# FileBasedDirectory intentionally doesn't call its superclass constuctor,
|
|
50 |
+# which is mean to be unimplemented.
|
|
51 |
+# pylint: disable=super-init-not-called
|
|
52 |
+ |
|
53 |
+ |
|
54 |
+class _FileObject():
|
|
55 |
+ """A description of a file in a virtual directory. The contents of
|
|
56 |
+ this class are never used, but there needs to be something present
|
|
57 |
+ for files so is_empty() works correctly.
|
|
58 |
+ |
|
59 |
+ """
|
|
60 |
+ def __init__(self, virtual_directory: Directory, filename: str):
|
|
61 |
+ self.directory = virtual_directory
|
|
62 |
+ self.filename = filename
|
|
63 |
+ |
|
64 |
+ |
|
65 |
+class FileBasedDirectory(Directory):
|
|
66 |
+ def __init__(self, external_directory=None):
|
|
67 |
+ self.external_directory = external_directory
|
|
68 |
+ self.index = {}
|
|
69 |
+ self._directory_read = False
|
|
70 |
+ |
|
71 |
+ def _populate_index(self):
|
|
72 |
+ if self._directory_read:
|
|
73 |
+ return
|
|
74 |
+ for entry in os.listdir(self.external_directory):
|
|
75 |
+ if os.path.isdir(os.path.join(self.external_directory, entry)):
|
|
76 |
+ self.index[entry] = FileBasedDirectory(os.path.join(self.external_directory, entry))
|
|
77 |
+ else:
|
|
78 |
+ self.index[entry] = _FileObject(self, entry)
|
|
79 |
+ self._directory_read = True
|
|
80 |
+ |
|
81 |
+ def descend(self, subdirectory_spec, create=False):
|
|
82 |
+ """ See superclass Directory for arguments """
|
|
83 |
+ # It's very common to send a directory name instead of a list and this causes
|
|
84 |
+ # bizarre errors, so check for it here
|
|
85 |
+ if not isinstance(subdirectory_spec, list):
|
|
86 |
+ subdirectory_spec = [subdirectory_spec]
|
|
87 |
+ |
|
88 |
+ # Because of the way split works, it's common to get a list which begins with
|
|
89 |
+ # an empty string. Detect these and remove them.
|
|
90 |
+ while subdirectory_spec and subdirectory_spec[0] == "":
|
|
91 |
+ subdirectory_spec.pop(0)
|
|
92 |
+ |
|
93 |
+ if not subdirectory_spec:
|
|
94 |
+ return self
|
|
95 |
+ |
|
96 |
+ self._populate_index()
|
|
97 |
+ if subdirectory_spec[0] in self.index:
|
|
98 |
+ entry = self.index[subdirectory_spec[0]]
|
|
99 |
+ if isinstance(entry, FileBasedDirectory):
|
|
100 |
+ new_path = os.path.join(self.external_directory, subdirectory_spec[0])
|
|
101 |
+ return FileBasedDirectory(new_path).descend(subdirectory_spec[1:], create)
|
|
102 |
+ else:
|
|
103 |
+ error = "Cannot descend into {}, which is a '{}' in the directory {}"
|
|
104 |
+ raise VirtualDirectoryError(error.format(subdirectory_spec[0],
|
|
105 |
+ type(entry).__name__,
|
|
106 |
+ self.external_directory))
|
|
107 |
+ else:
|
|
108 |
+ if create:
|
|
109 |
+ new_path = os.path.join(self.external_directory, subdirectory_spec[0])
|
|
110 |
+ os.makedirs(new_path, exist_ok=True)
|
|
111 |
+ return FileBasedDirectory(new_path).descend(subdirectory_spec[1:], create)
|
|
112 |
+ else:
|
|
113 |
+ error = "No entry called '{}' found in the directory rooted at {}"
|
|
114 |
+ raise VirtualDirectoryError(error.format(subdirectory_spec[0], self.external_directory))
|
|
115 |
+ |
|
116 |
+ def import_files(self, external_pathspec, *, files=None,
|
|
117 |
+ report_written=True, update_utimes=False,
|
|
118 |
+ can_link=False):
|
|
119 |
+ """ See superclass Directory for arguments """
|
|
120 |
+ |
|
121 |
+ if isinstance(external_pathspec, Directory):
|
|
122 |
+ source_directory = external_pathspec.external_directory
|
|
123 |
+ else:
|
|
124 |
+ source_directory = external_pathspec
|
|
125 |
+ |
|
126 |
+ if can_link and not update_utimes:
|
|
127 |
+ import_result = link_files(source_directory, self.external_directory, files=files,
|
|
128 |
+ ignore_missing=False, report_written=report_written)
|
|
129 |
+ else:
|
|
130 |
+ import_result = copy_files(source_directory, self.external_directory, files=files,
|
|
131 |
+ ignore_missing=False, report_written=report_written)
|
|
132 |
+ if update_utimes:
|
|
133 |
+ cur_time = time.time()
|
|
134 |
+ |
|
135 |
+ for f in import_result.files_written:
|
|
136 |
+ os.utime(os.path.join(self.external_directory, f), times=(cur_time, cur_time))
|
|
137 |
+ return import_result
|
|
138 |
+ |
|
139 |
+ def set_deterministic_mtime(self):
|
|
140 |
+ _set_deterministic_mtime(self.external_directory)
|
|
141 |
+ |
|
142 |
+ def set_deterministic_user(self):
|
|
143 |
+ _set_deterministic_user(self.external_directory)
|
|
144 |
+ |
|
145 |
+ def export_files(self, to_directory, *, can_link=False, can_destroy=False):
|
|
146 |
+ if can_destroy:
|
|
147 |
+ # Try a simple rename of the sandbox root; if that
|
|
148 |
+ # doesnt cut it, then do the regular link files code path
|
|
149 |
+ try:
|
|
150 |
+ os.rename(self.external_directory, to_directory)
|
|
151 |
+ return
|
|
152 |
+ except OSError:
|
|
153 |
+ # Proceed using normal link/copy
|
|
154 |
+ pass
|
|
155 |
+ |
|
156 |
+ os.makedirs(to_directory, exist_ok=True)
|
|
157 |
+ if can_link:
|
|
158 |
+ link_files(self.external_directory, to_directory)
|
|
159 |
+ else:
|
|
160 |
+ copy_files(self.external_directory, to_directory)
|
|
161 |
+ |
|
162 |
+ # Add a directory entry deterministically to a tar file
|
|
163 |
+ #
|
|
164 |
+ # This function takes extra steps to ensure the output is deterministic.
|
|
165 |
+ # First, it sorts the results of os.listdir() to ensure the ordering of
|
|
166 |
+ # the files in the archive is the same. Second, it sets a fixed
|
|
167 |
+ # timestamp for each entry. See also https://bugs.python.org/issue24465.
|
|
168 |
+ def export_to_tar(self, tf, dir_arcname, mtime=0):
|
|
169 |
+ # We need directories here, including non-empty ones,
|
|
170 |
+ # so list_relative_paths is not used.
|
|
171 |
+ for filename in sorted(os.listdir(self.external_directory)):
|
|
172 |
+ source_name = os.path.join(self.external_directory, filename)
|
|
173 |
+ arcname = os.path.join(dir_arcname, filename)
|
|
174 |
+ tarinfo = tf.gettarinfo(source_name, arcname)
|
|
175 |
+ tarinfo.mtime = mtime
|
|
176 |
+ |
|
177 |
+ if tarinfo.isreg():
|
|
178 |
+ with open(source_name, "rb") as f:
|
|
179 |
+ tf.addfile(tarinfo, f)
|
|
180 |
+ elif tarinfo.isdir():
|
|
181 |
+ tf.addfile(tarinfo)
|
|
182 |
+ self.descend(filename.split(os.path.sep)).export_to_tar(tf, arcname, mtime)
|
|
183 |
+ else:
|
|
184 |
+ tf.addfile(tarinfo)
|
|
185 |
+ |
|
186 |
+ def is_empty(self):
|
|
187 |
+ self._populate_index()
|
|
188 |
+ return len(self.index) == 0
|
|
189 |
+ |
|
190 |
+ def mark_unmodified(self):
|
|
191 |
+ """ Marks all files in this directory (recursively) as unmodified.
|
|
192 |
+ """
|
|
193 |
+ _set_deterministic_mtime(self.external_directory)
|
|
194 |
+ |
|
195 |
+ def list_modified_paths(self):
|
|
196 |
+ """Provide a list of relative paths which have been modified since the
|
|
197 |
+ last call to mark_unmodified.
|
|
198 |
+ |
|
199 |
+ Return value: List(str) - list of modified paths
|
|
200 |
+ """
|
|
201 |
+ return [f for f in list_relative_paths(self.external_directory)
|
|
202 |
+ if _get_link_mtime(os.path.join(self.external_directory, f)) != _magic_timestamp]
|
|
203 |
+ |
|
204 |
+ def list_relative_paths(self):
|
|
205 |
+ """Provide a list of all relative paths.
|
|
206 |
+ |
|
207 |
+ Return value: List(str) - list of all paths
|
|
208 |
+ """
|
|
209 |
+ |
|
210 |
+ return list_relative_paths(self.external_directory)
|
|
211 |
+ |
|
212 |
+ def __str__(self):
|
|
213 |
+ # This returns the whole path (since we don't know where the directory started)
|
|
214 |
+ # which exposes the sandbox directory; we will have to assume for the time being
|
|
215 |
+ # that people will not abuse __str__.
|
|
216 |
+ return self.external_directory
|
1 |
+#!/usr/bin/env python3
|
|
2 |
+#
|
|
3 |
+# Copyright (C) 2018 Bloomberg Finance LP
|
|
4 |
+#
|
|
5 |
+# This program is free software; you can redistribute it and/or
|
|
6 |
+# modify it under the terms of the GNU Lesser General Public
|
|
7 |
+# License as published by the Free Software Foundation; either
|
|
8 |
+# version 2 of the License, or (at your option) any later version.
|
|
9 |
+#
|
|
10 |
+# This library is distributed in the hope that it will be useful,
|
|
11 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
13 |
+# Lesser General Public License for more details.
|
|
14 |
+#
|
|
15 |
+# You should have received a copy of the GNU Lesser General Public
|
|
16 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
17 |
+#
|
|
18 |
+# Authors:
|
|
19 |
+# Jim MacArthur <jim macarthur codethink co uk>
|
|
20 |
+ |
|
21 |
+"""
|
|
22 |
+Directory
|
|
23 |
+=========
|
|
24 |
+ |
|
25 |
+This is a virtual Directory class to isolate the rest of BuildStream
|
|
26 |
+from the backing store implementation. Sandboxes are allowed to read
|
|
27 |
+from and write to the underlying storage, but all others must use this
|
|
28 |
+Directory class to access files and directories in the sandbox.
|
|
29 |
+ |
|
30 |
+See also: :ref:`sandboxing`.
|
|
31 |
+ |
|
32 |
+"""
|
|
33 |
+ |
|
34 |
+ |
|
35 |
+class Directory():
|
|
36 |
+ def __init__(self, external_directory=None):
|
|
37 |
+ raise NotImplementedError()
|
|
38 |
+ |
|
39 |
+ def descend(self, subdirectory_spec, create=False):
|
|
40 |
+ """Descend one or more levels of directory hierarchy and return a new
|
|
41 |
+ Directory object for that directory.
|
|
42 |
+ |
|
43 |
+ Args:
|
|
44 |
+ subdirectory_spec (list of str): A list of strings which are all directory
|
|
45 |
+ names.
|
|
46 |
+ create (boolean): If this is true, the directories will be created if
|
|
47 |
+ they don't already exist.
|
|
48 |
+ |
|
49 |
+ Yields:
|
|
50 |
+ A Directory object representing the found directory.
|
|
51 |
+ |
|
52 |
+ Raises:
|
|
53 |
+ VirtualDirectoryError: if any of the components in subdirectory_spec
|
|
54 |
+ cannot be found, or are files, or symlinks to files.
|
|
55 |
+ |
|
56 |
+ """
|
|
57 |
+ raise NotImplementedError()
|
|
58 |
+ |
|
59 |
+ # Import and export of files and links
|
|
60 |
+ def import_files(self, external_pathspec, *, files=None,
|
|
61 |
+ report_written=True, update_utimes=False,
|
|
62 |
+ can_link=False):
|
|
63 |
+ """Imports some or all files from external_path into this directory.
|
|
64 |
+ |
|
65 |
+ Args:
|
|
66 |
+ external_pathspec: Either a string containing a pathname, or a
|
|
67 |
+ Directory object, to use as the source.
|
|
68 |
+ files (list of str): A list of all the files relative to
|
|
69 |
+ the external_pathspec to copy. If 'None' is supplied, all
|
|
70 |
+ files are copied.
|
|
71 |
+ report_written (bool): Return the full list of files
|
|
72 |
+ written. Defaults to true. If false, only a list of
|
|
73 |
+ overwritten files is returned.
|
|
74 |
+ update_utimes (bool): Update the access and modification time
|
|
75 |
+ of each file copied to the current time.
|
|
76 |
+ can_link (bool): Whether it's OK to create a hard link to the
|
|
77 |
+ original content, meaning the stored copy will change when the
|
|
78 |
+ original files change. Setting this doesn't guarantee hard
|
|
79 |
+ links will be made. can_link will never be used if
|
|
80 |
+ update_utimes is set.
|
|
81 |
+ |
|
82 |
+ Yields:
|
|
83 |
+ (FileListResult) - A report of files imported and overwritten.
|
|
84 |
+ |
|
85 |
+ """
|
|
86 |
+ |
|
87 |
+ raise NotImplementedError()
|
|
88 |
+ |
|
89 |
+ def export_files(self, to_directory, *, can_link=False, can_destroy=False):
|
|
90 |
+ """Copies everything from this into to_directory.
|
|
91 |
+ |
|
92 |
+ Args:
|
|
93 |
+ to_directory (string): a path outside this directory object
|
|
94 |
+ where the contents will be copied to.
|
|
95 |
+ can_link (bool): Whether we can create hard links in to_directory
|
|
96 |
+ instead of copying. Setting this does not guarantee hard links will be used.
|
|
97 |
+ can_destroy (bool): Can we destroy the data already in this
|
|
98 |
+ directory when exporting? If set, this may allow data to be
|
|
99 |
+ moved rather than copied which will be quicker.
|
|
100 |
+ """
|
|
101 |
+ |
|
102 |
+ raise NotImplementedError()
|
|
103 |
+ |
|
104 |
+ def export_to_tar(self, tarfile, destination_dir, mtime=0):
|
|
105 |
+ """ Exports this directory into the given tar file.
|
|
106 |
+ |
|
107 |
+ Args:
|
|
108 |
+ tarfile (TarFile): A Python TarFile object to export into.
|
|
109 |
+ destination_dir (str): The prefix for all filenames inside the archive.
|
|
110 |
+ mtime (int): mtimes of all files in the archive are set to this.
|
|
111 |
+ """
|
|
112 |
+ raise NotImplementedError()
|
|
113 |
+ |
|
114 |
+ # Convenience functions
|
|
115 |
+ def is_empty(self):
|
|
116 |
+ """ Return true if this directory has no files, subdirectories or links in it.
|
|
117 |
+ """
|
|
118 |
+ raise NotImplementedError()
|
|
119 |
+ |
|
120 |
+ def set_deterministic_mtime(self):
|
|
121 |
+ """ Sets a static modification time for all regular files in this directory.
|
|
122 |
+ The magic number for timestamps is 2011-11-11 11:11:11.
|
|
123 |
+ """
|
|
124 |
+ raise NotImplementedError()
|
|
125 |
+ |
|
126 |
+ def set_deterministic_user(self):
|
|
127 |
+ """ Sets all files in this directory to the current user's euid/egid.
|
|
128 |
+ """
|
|
129 |
+ raise NotImplementedError()
|
|
130 |
+ |
|
131 |
+ def mark_unmodified(self):
|
|
132 |
+ """ Marks all files in this directory (recursively) as unmodified.
|
|
133 |
+ """
|
|
134 |
+ raise NotImplementedError()
|
|
135 |
+ |
|
136 |
+ def list_modified_paths(self):
|
|
137 |
+ """Provide a list of relative paths which have been modified since the
|
|
138 |
+ last call to mark_unmodified. Includes directories only if
|
|
139 |
+ they are empty.
|
|
140 |
+ |
|
141 |
+ Yields:
|
|
142 |
+ (List(str)) - list of all modified files with relative paths.
|
|
143 |
+ |
|
144 |
+ """
|
|
145 |
+ raise NotImplementedError()
|
|
146 |
+ |
|
147 |
+ def list_relative_paths(self):
|
|
148 |
+ """Provide a list of all relative paths in this directory. Includes
|
|
149 |
+ directories only if they are empty.
|
|
150 |
+ |
|
151 |
+ Yields:
|
|
152 |
+ (List(str)) - list of all files with relative paths.
|
|
153 |
+ |
|
154 |
+ """
|
|
155 |
+ raise NotImplementedError()
|
... | ... | @@ -41,6 +41,9 @@ import psutil |
41 | 41 |
from . import _signals
|
42 | 42 |
from ._exceptions import BstError, ErrorDomain
|
43 | 43 |
|
44 |
+# The magic number for timestamps: 2011-11-11 11:11:11
|
|
45 |
+_magic_timestamp = calendar.timegm([2011, 11, 11, 11, 11, 11])
|
|
46 |
+ |
|
44 | 47 |
|
45 | 48 |
# The separator we use for user specified aliases
|
46 | 49 |
_ALIAS_SEPARATOR = ':'
|
... | ... | @@ -909,9 +912,6 @@ def _set_deterministic_user(directory): |
909 | 912 |
# directory (str): The directory to recursively set the mtime on
|
910 | 913 |
#
|
911 | 914 |
def _set_deterministic_mtime(directory):
|
912 |
- # The magic number for timestamps: 2011-11-11 11:11:11
|
|
913 |
- magic_timestamp = calendar.timegm([2011, 11, 11, 11, 11, 11])
|
|
914 |
- |
|
915 | 915 |
for dirname, _, filenames in os.walk(directory.encode("utf-8"), topdown=False):
|
916 | 916 |
for filename in filenames:
|
917 | 917 |
pathname = os.path.join(dirname, filename)
|
... | ... | @@ -930,9 +930,9 @@ def _set_deterministic_mtime(directory): |
930 | 930 |
# However, nowadays it is possible at least on gnuish systems
|
931 | 931 |
# with with the lutimes glibc function.
|
932 | 932 |
if not os.path.islink(pathname):
|
933 |
- os.utime(pathname, (magic_timestamp, magic_timestamp))
|
|
933 |
+ os.utime(pathname, (_magic_timestamp, _magic_timestamp))
|
|
934 | 934 |
|
935 |
- os.utime(dirname, (magic_timestamp, magic_timestamp))
|
|
935 |
+ os.utime(dirname, (_magic_timestamp, _magic_timestamp))
|
|
936 | 936 |
|
937 | 937 |
|
938 | 938 |
# _tempdir()
|
... | ... | @@ -1159,3 +1159,11 @@ def _deduplicate(iterable, key=None): |
1159 | 1159 |
if k not in seen:
|
1160 | 1160 |
seen_add(k)
|
1161 | 1161 |
yield element
|
1162 |
+ |
|
1163 |
+ |
|
1164 |
+# Like os.path.getmtime(), but returns the mtime of a link rather than
|
|
1165 |
+# the target, if the filesystem supports that.
|
|
1166 |
+#
|
|
1167 |
+def _get_link_mtime(path):
|
|
1168 |
+ path_stat = os.lstat(path)
|
|
1169 |
+ return path_stat.st_mtime
|
... | ... | @@ -212,6 +212,10 @@ def test_option_directory(datafiles, cli, cmd, word_idx, expected, subdir): |
212 | 212 |
# Also try multi arguments together
|
213 | 213 |
('no-element-path', 'bst --directory ../ checkout t ', 4, ['target.bst '], 'files'),
|
214 | 214 |
('no-element-path', 'bst --directory ../ checkout target.bst ', 5, ['bin-files/', 'dev-files/'], 'files'),
|
215 |
+ |
|
216 |
+ # When element-path have sub-folders
|
|
217 |
+ ('sub-folders', 'bst show base', 2, ['base/wanted.bst '], None),
|
|
218 |
+ ('sub-folders', 'bst show base/', 2, ['base/wanted.bst '], None),
|
|
215 | 219 |
])
|
216 | 220 |
def test_argument_element(datafiles, cli, project, cmd, word_idx, expected, subdir):
|
217 | 221 |
cwd = os.path.join(str(datafiles), project)
|
1 |
+kind: autotools
|
|
2 |
+description: |
|
|
3 |
+ |
|
4 |
+ Not auto-completed element
|
1 |
+kind: stack
|
|
2 |
+description: Base stack
|
|
3 |
+ |
|
4 |
+depends:
|
|
5 |
+- base/wanted.bst
|
1 |
+kind: autotools
|
|
2 |
+description: |
|
|
3 |
+ |
|
4 |
+ Auto-completed element
|
1 |
+kind: autotools
|
|
2 |
+description: |
|
|
3 |
+ |
|
4 |
+ Hello world
|
1 |
+# Project config for frontend build test
|
|
2 |
+name: test
|
|
3 |
+ |
|
4 |
+element-path: elements
|
... | ... | @@ -359,3 +359,44 @@ def test_submodule_track_ignore_inconsistent(cli, tmpdir, datafiles): |
359 | 359 |
|
360 | 360 |
# Assert that we are just fine without it, and emit a warning to the user.
|
361 | 361 |
assert "Ignoring inconsistent submodule" in result.stderr
|
362 |
+ |
|
363 |
+ |
|
364 |
+@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
|
365 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
|
|
366 |
+def test_submodule_track_no_ref_or_track(cli, tmpdir, datafiles):
|
|
367 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
368 |
+ |
|
369 |
+ # Create the repo from 'repofiles' subdir
|
|
370 |
+ repo = create_repo('git', str(tmpdir))
|
|
371 |
+ ref = repo.create(os.path.join(project, 'repofiles'))
|
|
372 |
+ |
|
373 |
+ # Write out our test target
|
|
374 |
+ gitsource = repo.source_config(ref=None)
|
|
375 |
+ gitsource.pop('track')
|
|
376 |
+ element = {
|
|
377 |
+ 'kind': 'import',
|
|
378 |
+ 'sources': [
|
|
379 |
+ gitsource
|
|
380 |
+ ]
|
|
381 |
+ }
|
|
382 |
+ |
|
383 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
384 |
+ |
|
385 |
+ # Track will encounter an inconsistent submodule without any ref
|
|
386 |
+ result = cli.run(project=project, args=['track', 'target.bst'])
|
|
387 |
+ result.assert_main_error(ErrorDomain.STREAM, None)
|
|
388 |
+ result.assert_task_error(ErrorDomain.SOURCE, 'track-attempt-no-track')
|
|
389 |
+ |
|
390 |
+ # Assert that we are just fine without it, and emit a warning to the user.
|
|
391 |
+ assert "FAILURE git source at" in result.stderr
|
|
392 |
+ assert "Without a tracking branch ref can not be updated. Please " + \
|
|
393 |
+ "provide a ref or a track." in result.stderr
|
|
394 |
+ |
|
395 |
+ # Track will encounter an inconsistent submodule without any ref
|
|
396 |
+ result = cli.run(project=project, args=['build', 'target.bst'])
|
|
397 |
+ result.assert_main_error(ErrorDomain.PIPELINE, 'inconsistent-pipeline')
|
|
398 |
+ result.assert_task_error(None, None)
|
|
399 |
+ |
|
400 |
+ # Assert that we are just fine without it, and emit a warning to the user.
|
|
401 |
+ assert "Exact versions are missing for the following elements" in result.stderr
|
|
402 |
+ assert ": Is missing ref and track, please add a branch to track or a ref." in result.stderr
|