Will Salmon pushed to branch willsalmon/defaultWorkspaces at BuildStream / buildstream
Commits:
-
7738f6df
by Daniel Silverstone at 2018-11-09T11:25:44Z
-
62f59eaa
by Valentin David at 2018-11-09T13:30:06Z
-
35ec6b6a
by Jim MacArthur at 2018-11-09T14:58:22Z
-
b93100ec
by Jim MacArthur at 2018-11-09T14:58:22Z
-
78691fa0
by Jim MacArthur at 2018-11-09T14:58:22Z
-
7ce6581b
by Jim MacArthur at 2018-11-09T15:25:46Z
-
6f4351ec
by Benjamin Schubert at 2018-11-09T16:53:42Z
-
e6e03451
by richardmaw-codethink at 2018-11-12T11:05:15Z
-
dcddd09d
by William Salmon at 2018-11-12T11:50:28Z
-
b2df769f
by William Salmon at 2018-11-12T11:50:28Z
-
3f636a75
by William Salmon at 2018-11-12T11:50:28Z
19 changed files:
- NEWS
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/_stream.py
- buildstream/data/userconfig.yaml
- buildstream/plugin.py
- buildstream/source.py
- buildstream/storage/_casbaseddirectory.py
- buildstream/utils.py
- doc/sessions/developing.run
- tests/examples/developing.py
- tests/examples/junctions.py
- tests/frontend/buildcheckout.py
- tests/frontend/cross_junction_workspace.py
- tests/frontend/workspace.py
- tests/integration/shell.py
- tests/integration/workspace.py
- tests/plugins/filter.py
- + tests/storage/virtual_directory_import.py
Changes:
... | ... | @@ -45,6 +45,10 @@ buildstream 1.3.1 |
45 | 45 |
instead of just a specially-formatted build-root with a `root` and `scratch`
|
46 | 46 |
subdirectory.
|
47 | 47 |
|
48 |
+ o `bst workspace open` now supports the creation of multiple elements and
|
|
49 |
+ allows the user to set a default location for there creation. This has meant
|
|
50 |
+ that the new CLI is no longer backwards compatible with buildstream 1.2.
|
|
51 |
+ |
|
48 | 52 |
|
49 | 53 |
=================
|
50 | 54 |
buildstream 1.1.5
|
... | ... | @@ -59,6 +59,9 @@ class Context(): |
59 | 59 |
# The directory where build sandboxes will be created
|
60 | 60 |
self.builddir = None
|
61 | 61 |
|
62 |
+ # Default root location for workspaces
|
|
63 |
+ self.workspacedir = None
|
|
64 |
+ |
|
62 | 65 |
# The local binary artifact cache directory
|
63 | 66 |
self.artifactdir = None
|
64 | 67 |
|
... | ... | @@ -160,10 +163,10 @@ class Context(): |
160 | 163 |
_yaml.node_validate(defaults, [
|
161 | 164 |
'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
162 | 165 |
'scheduler', 'artifacts', 'logging', 'projects',
|
163 |
- 'cache'
|
|
166 |
+ 'cache', 'workspacedir',
|
|
164 | 167 |
])
|
165 | 168 |
|
166 |
- for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
|
|
169 |
+ for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
|
|
167 | 170 |
# Allow the ~ tilde expansion and any environment variables in
|
168 | 171 |
# path specification in the config files.
|
169 | 172 |
#
|
... | ... | @@ -6,6 +6,7 @@ from .. import _yaml |
6 | 6 |
from .._exceptions import BstError, LoadError, AppError
|
7 | 7 |
from .._versions import BST_FORMAT_VERSION
|
8 | 8 |
from .complete import main_bashcomplete, complete_path, CompleteUnhandled
|
9 |
+from ..utils import DirectoryDescription
|
|
9 | 10 |
|
10 | 11 |
|
11 | 12 |
##################################################################
|
... | ... | @@ -678,28 +679,36 @@ def workspace(): |
678 | 679 |
@click.option('--no-checkout', default=False, is_flag=True,
|
679 | 680 |
help="Do not checkout the source, only link to the given directory")
|
680 | 681 |
@click.option('--force', '-f', default=False, is_flag=True,
|
681 |
- help="Overwrite files existing in checkout directory")
|
|
682 |
+ help="The workspace will be created even if the directory in which it will be created is not empty " +
|
|
683 |
+ "or if a workspace for that element already exists")
|
|
682 | 684 |
@click.option('--track', 'track_', default=False, is_flag=True,
|
683 | 685 |
help="Track and fetch new source references before checking out the workspace")
|
684 |
-@click.argument('element',
|
|
685 |
- type=click.Path(readable=False))
|
|
686 |
-@click.argument('directory', type=click.Path(file_okay=False))
|
|
686 |
+@click.option('--directory', type=click.Path(file_okay=False), default=None,
|
|
687 |
+ help="Only for use when a single Element is given: Set the directory to use to create the workspace")
|
|
688 |
+@click.argument('elements', nargs=-1, type=click.Path(readable=False))
|
|
687 | 689 |
@click.pass_obj
|
688 |
-def workspace_open(app, no_checkout, force, track_, element, directory):
|
|
690 |
+def workspace_open(app, no_checkout, force, track_, directory, elements):
|
|
689 | 691 |
"""Open a workspace for manual source modification"""
|
690 |
- |
|
691 |
- if os.path.exists(directory):
|
|
692 |
- |
|
693 |
- if not os.path.isdir(directory):
|
|
694 |
- click.echo("Checkout directory is not a directory: {}".format(directory), err=True)
|
|
692 |
+ directories = []
|
|
693 |
+ if directory is not None:
|
|
694 |
+ if len(elements) > 1:
|
|
695 |
+ click.echo("Directory option can only be used if a single element is given", err=True)
|
|
695 | 696 |
sys.exit(-1)
|
697 |
+ if os.path.exists(directory):
|
|
698 |
+ if not os.path.isdir(directory):
|
|
699 |
+ click.echo("Directory path is not a directory: {}".format(directory), err=True)
|
|
700 |
+ sys.exit(-1)
|
|
696 | 701 |
|
697 |
- if not (no_checkout or force) and os.listdir(directory):
|
|
698 |
- click.echo("Checkout directory is not empty: {}".format(directory), err=True)
|
|
699 |
- sys.exit(-1)
|
|
702 |
+ if not (no_checkout or force) and os.listdir(directory):
|
|
703 |
+ click.echo("Directory path is not empty: {}".format(directory), err=True)
|
|
704 |
+ sys.exit(-1)
|
|
705 |
+ directories.append(DirectoryDescription(directory, use_default=False))
|
|
706 |
+ else:
|
|
707 |
+ for element in elements:
|
|
708 |
+ directories.append(DirectoryDescription(element.rstrip('.bst')))
|
|
700 | 709 |
|
701 | 710 |
with app.initialized():
|
702 |
- app.stream.workspace_open(element, directory,
|
|
711 |
+ app.stream.workspace_open(elements, directories,
|
|
703 | 712 |
no_checkout=no_checkout,
|
704 | 713 |
track_first=track_,
|
705 | 714 |
force=force)
|
... | ... | @@ -448,44 +448,29 @@ class Stream(): |
448 | 448 |
# Open a project workspace
|
449 | 449 |
#
|
450 | 450 |
# Args:
|
451 |
- # target (str): The target element to open the workspace for
|
|
452 |
- # directory (str): The directory to stage the source in
|
|
451 |
+ # target (list): List of target elements to open workspaces for
|
|
452 |
+ # directory (list): List of DirectoryDescription objects to stage the source in
|
|
453 | 453 |
# no_checkout (bool): Whether to skip checking out the source
|
454 | 454 |
# track_first (bool): Whether to track and fetch first
|
455 | 455 |
# force (bool): Whether to ignore contents in an existing directory
|
456 | 456 |
#
|
457 |
- def workspace_open(self, target, directory, *,
|
|
457 |
+ def workspace_open(self, targets, directories, *,
|
|
458 | 458 |
no_checkout,
|
459 | 459 |
track_first,
|
460 | 460 |
force):
|
461 |
+ # This function is a little funny but it is trying to be as atomic as possible.
|
|
461 | 462 |
|
462 | 463 |
if track_first:
|
463 |
- track_targets = (target,)
|
|
464 |
+ track_targets = targets
|
|
464 | 465 |
else:
|
465 | 466 |
track_targets = ()
|
466 | 467 |
|
467 |
- elements, track_elements = self._load((target,), track_targets,
|
|
468 |
+ elements, track_elements = self._load(targets, track_targets,
|
|
468 | 469 |
selection=PipelineSelection.REDIRECT,
|
469 | 470 |
track_selection=PipelineSelection.REDIRECT)
|
470 |
- target = elements[0]
|
|
471 |
- directory = os.path.abspath(directory)
|
|
472 |
- |
|
473 |
- if not list(target.sources()):
|
|
474 |
- build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
|
|
475 |
- if not build_depends:
|
|
476 |
- raise StreamError("The given element has no sources")
|
|
477 |
- detail = "Try opening a workspace on one of its dependencies instead:\n"
|
|
478 |
- detail += " \n".join(build_depends)
|
|
479 |
- raise StreamError("The given element has no sources", detail=detail)
|
|
480 | 471 |
|
481 | 472 |
workspaces = self._context.get_workspaces()
|
482 | 473 |
|
483 |
- # Check for workspace config
|
|
484 |
- workspace = workspaces.get_workspace(target._get_full_name())
|
|
485 |
- if workspace and not force:
|
|
486 |
- raise StreamError("Workspace '{}' is already defined at: {}"
|
|
487 |
- .format(target.name, workspace.get_absolute_path()))
|
|
488 |
- |
|
489 | 474 |
# If we're going to checkout, we need at least a fetch,
|
490 | 475 |
# if we were asked to track first, we're going to fetch anyway.
|
491 | 476 |
#
|
... | ... | @@ -495,29 +480,69 @@ class Stream(): |
495 | 480 |
track_elements = elements
|
496 | 481 |
self._fetch(elements, track_elements=track_elements)
|
497 | 482 |
|
498 |
- if not no_checkout and target._get_consistency() != Consistency.CACHED:
|
|
499 |
- raise StreamError("Could not stage uncached source. " +
|
|
500 |
- "Use `--track` to track and " +
|
|
501 |
- "fetch the latest version of the " +
|
|
502 |
- "source.")
|
|
483 |
+ expanded_directories = []
|
|
484 |
+ # To try to be more atomic, loop through the elements and raise any errors we can early
|
|
485 |
+ for target, directory_obj in zip(elements, directories):
|
|
486 |
+ |
|
487 |
+ if not list(target.sources()):
|
|
488 |
+ build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
|
|
489 |
+ if not build_depends:
|
|
490 |
+ raise StreamError("The element {} has no sources".format(target.name))
|
|
491 |
+ detail = "Try opening a workspace on one of its dependencies instead:\n"
|
|
492 |
+ detail += " \n".join(build_depends)
|
|
493 |
+ raise StreamError("The element {} has no sources".format(target.name), detail=detail)
|
|
494 |
+ |
|
495 |
+ # Check for workspace config
|
|
496 |
+ workspace = workspaces.get_workspace(target._get_full_name())
|
|
497 |
+ if workspace and not force:
|
|
498 |
+ raise StreamError("Workspace '{}' is already defined at: {}"
|
|
499 |
+ .format(target.name, workspace.get_absolute_path()))
|
|
500 |
+ |
|
501 |
+ if not no_checkout and target._get_consistency() != Consistency.CACHED:
|
|
502 |
+ raise StreamError("Could not stage uncached source. For {} ".format(target.name) +
|
|
503 |
+ "Use `--track` to track and " +
|
|
504 |
+ "fetch the latest version of the " +
|
|
505 |
+ "source.")
|
|
506 |
+ |
|
507 |
+ if directory_obj.use_default:
|
|
508 |
+ directory = os.path.abspath(os.path.join(self._context.workspacedir, directory_obj.directory))
|
|
509 |
+ else:
|
|
510 |
+ directory = directory_obj.directory
|
|
503 | 511 |
|
504 |
- if workspace:
|
|
505 |
- workspaces.delete_workspace(target._get_full_name())
|
|
506 |
- workspaces.save_config()
|
|
507 |
- shutil.rmtree(directory)
|
|
508 |
- try:
|
|
509 |
- os.makedirs(directory, exist_ok=True)
|
|
510 |
- except OSError as e:
|
|
511 |
- raise StreamError("Failed to create workspace directory: {}".format(e)) from e
|
|
512 |
+ expanded_directories.append(directory)
|
|
512 | 513 |
|
513 |
- workspaces.create_workspace(target._get_full_name(), directory)
|
|
514 |
+ # So far this function has tried to catch as many issues as possible with out making any changes
|
|
515 |
+ # Now it dose the bits that can not be made atomic.
|
|
516 |
+ targetGenerator = zip(elements, expanded_directories)
|
|
517 |
+ for target, directory in targetGenerator:
|
|
518 |
+ self._message(MessageType.INFO, "Creating workspace for element {}"
|
|
519 |
+ .format(target.name))
|
|
514 | 520 |
|
515 |
- if not no_checkout:
|
|
516 |
- with target.timed_activity("Staging sources to {}".format(directory)):
|
|
517 |
- target._open_workspace()
|
|
521 |
+ workspace = workspaces.get_workspace(target._get_full_name())
|
|
522 |
+ if workspace:
|
|
523 |
+ workspaces.delete_workspace(target._get_full_name())
|
|
524 |
+ workspaces.save_config()
|
|
525 |
+ shutil.rmtree(directory)
|
|
526 |
+ try:
|
|
527 |
+ os.makedirs(directory, exist_ok=True)
|
|
528 |
+ except OSError as e:
|
|
529 |
+ todo_elements = " ".join([str(target.name) for target, directory_dict in targetGenerator])
|
|
530 |
+ if todo_elements:
|
|
531 |
+ # This output should make creating the remaining workspaces as easy as possible.
|
|
532 |
+ todo_elements = "\nDid not try to create workspaces for " + todo_elements
|
|
533 |
+ raise StreamError("Failed to create workspace directory: {}".format(e) + todo_elements) from e
|
|
518 | 534 |
|
519 |
- workspaces.save_config()
|
|
520 |
- self._message(MessageType.INFO, "Saved workspace configuration")
|
|
535 |
+ workspaces.create_workspace(target._get_full_name(), directory)
|
|
536 |
+ |
|
537 |
+ if not no_checkout:
|
|
538 |
+ with target.timed_activity("Staging sources to {}".format(directory)):
|
|
539 |
+ target._open_workspace()
|
|
540 |
+ |
|
541 |
+ # Saving the workspace once it is set up means that if the next workspace fails to be created before
|
|
542 |
+ # the configuration gets saved. The successfully created workspace still gets saved.
|
|
543 |
+ workspaces.save_config()
|
|
544 |
+ self._message(MessageType.INFO, "Added element {} to the workspace configuration"
|
|
545 |
+ .format(target._get_full_name()))
|
|
521 | 546 |
|
522 | 547 |
# workspace_close
|
523 | 548 |
#
|
... | ... | @@ -22,6 +22,9 @@ artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts |
22 | 22 |
# Location to store build logs
|
23 | 23 |
logdir: ${XDG_CACHE_HOME}/buildstream/logs
|
24 | 24 |
|
25 |
+# Default root location for workspaces, blank for no default set.
|
|
26 |
+workspacedir: .
|
|
27 |
+ |
|
25 | 28 |
#
|
26 | 29 |
# Cache
|
27 | 30 |
#
|
... | ... | @@ -111,6 +111,7 @@ Class Reference |
111 | 111 |
|
112 | 112 |
import os
|
113 | 113 |
import subprocess
|
114 |
+import sys
|
|
114 | 115 |
from contextlib import contextmanager
|
115 | 116 |
from weakref import WeakValueDictionary
|
116 | 117 |
|
... | ... | @@ -190,7 +191,7 @@ class Plugin(): |
190 | 191 |
# Dont send anything through the Message() pipeline at destruction time,
|
191 | 192 |
# any subsequent lookup of plugin by unique id would raise KeyError.
|
192 | 193 |
if self.__context.log_debug:
|
193 |
- print("DEBUG: Destroyed: {}".format(self))
|
|
194 |
+ sys.stderr.write("DEBUG: Destroyed: {}\n".format(self))
|
|
194 | 195 |
|
195 | 196 |
def __str__(self):
|
196 | 197 |
return "{kind} {typetag} at {provenance}".format(
|
... | ... | @@ -973,32 +973,34 @@ class Source(Plugin): |
973 | 973 |
# the items of source_fetchers, if it happens to be a generator.
|
974 | 974 |
#
|
975 | 975 |
source_fetchers = iter(source_fetchers)
|
976 |
- try:
|
|
977 | 976 |
|
978 |
- while True:
|
|
977 |
+ while True:
|
|
979 | 978 |
|
980 |
- with context.silence():
|
|
979 |
+ with context.silence():
|
|
980 |
+ try:
|
|
981 | 981 |
fetcher = next(source_fetchers)
|
982 |
- |
|
983 |
- alias = fetcher._get_alias()
|
|
984 |
- for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
|
|
985 |
- try:
|
|
986 |
- fetcher.fetch(uri)
|
|
987 |
- # FIXME: Need to consider temporary vs. permanent failures,
|
|
988 |
- # and how this works with retries.
|
|
989 |
- except BstError as e:
|
|
990 |
- last_error = e
|
|
991 |
- continue
|
|
992 |
- |
|
993 |
- # No error, we're done with this fetcher
|
|
982 |
+ except StopIteration:
|
|
983 |
+ # as per PEP479, we are not allowed to let StopIteration
|
|
984 |
+ # thrown from a context manager.
|
|
985 |
+ # Catching it here and breaking instead.
|
|
994 | 986 |
break
|
995 | 987 |
|
996 |
- else:
|
|
997 |
- # No break occurred, raise the last detected error
|
|
998 |
- raise last_error
|
|
988 |
+ alias = fetcher._get_alias()
|
|
989 |
+ for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
|
|
990 |
+ try:
|
|
991 |
+ fetcher.fetch(uri)
|
|
992 |
+ # FIXME: Need to consider temporary vs. permanent failures,
|
|
993 |
+ # and how this works with retries.
|
|
994 |
+ except BstError as e:
|
|
995 |
+ last_error = e
|
|
996 |
+ continue
|
|
999 | 997 |
|
1000 |
- except StopIteration:
|
|
1001 |
- pass
|
|
998 |
+ # No error, we're done with this fetcher
|
|
999 |
+ break
|
|
1000 |
+ |
|
1001 |
+ else:
|
|
1002 |
+ # No break occurred, raise the last detected error
|
|
1003 |
+ raise last_error
|
|
1002 | 1004 |
|
1003 | 1005 |
# Default codepath is to reinstantiate the Source
|
1004 | 1006 |
#
|
... | ... | @@ -30,7 +30,6 @@ See also: :ref:`sandboxing`. |
30 | 30 |
from collections import OrderedDict
|
31 | 31 |
|
32 | 32 |
import os
|
33 |
-import tempfile
|
|
34 | 33 |
import stat
|
35 | 34 |
|
36 | 35 |
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
... | ... | @@ -51,6 +50,183 @@ class IndexEntry(): |
51 | 50 |
self.modified = modified
|
52 | 51 |
|
53 | 52 |
|
53 |
+class ResolutionException(VirtualDirectoryError):
|
|
54 |
+ """ Superclass of all exceptions that can be raised by
|
|
55 |
+ CasBasedDirectory._resolve. Should not be used outside this module. """
|
|
56 |
+ pass
|
|
57 |
+ |
|
58 |
+ |
|
59 |
+class InfiniteSymlinkException(ResolutionException):
|
|
60 |
+ """ Raised when an infinite symlink loop is found. """
|
|
61 |
+ pass
|
|
62 |
+ |
|
63 |
+ |
|
64 |
+class AbsoluteSymlinkException(ResolutionException):
|
|
65 |
+ """Raised if we try to follow an absolute symlink (i.e. one whose
|
|
66 |
+ target starts with the path separator) and we have disallowed
|
|
67 |
+ following such symlinks.
|
|
68 |
+ """
|
|
69 |
+ pass
|
|
70 |
+ |
|
71 |
+ |
|
72 |
+class UnexpectedFileException(ResolutionException):
|
|
73 |
+ """Raised if we were found a file where a directory or symlink was
|
|
74 |
+ expected, for example we try to resolve a symlink pointing to
|
|
75 |
+ /a/b/c but /a/b is a file.
|
|
76 |
+ """
|
|
77 |
+ def __init__(self, message=""):
|
|
78 |
+ """Allow constructor with no arguments, since this can be raised in
|
|
79 |
+ places where there isn't sufficient information to write the
|
|
80 |
+ message.
|
|
81 |
+ """
|
|
82 |
+ super().__init__(message)
|
|
83 |
+ |
|
84 |
+ |
|
85 |
+class _Resolver():
|
|
86 |
+ """A class for resolving symlinks inside CAS-based directories. As
|
|
87 |
+ well as providing a namespace for some functions, this also
|
|
88 |
+ contains two flags which are constant throughout one resolution
|
|
89 |
+ operation and the 'seen_objects' list used to detect infinite
|
|
90 |
+ symlink loops.
|
|
91 |
+ |
|
92 |
+ """
|
|
93 |
+ |
|
94 |
+ def __init__(self, absolute_symlinks_resolve=True, force_create=False):
|
|
95 |
+ self.absolute_symlinks_resolve = absolute_symlinks_resolve
|
|
96 |
+ self.force_create = force_create
|
|
97 |
+ self.seen_objects = []
|
|
98 |
+ |
|
99 |
+ def resolve(self, name, directory):
|
|
100 |
+ """Resolves any name to an object. If the name points to a symlink in
|
|
101 |
+ the directory, it returns the thing it points to,
|
|
102 |
+ recursively.
|
|
103 |
+ |
|
104 |
+ Returns a CasBasedDirectory, FileNode or None. None indicates
|
|
105 |
+ either that 'target' does not exist in this directory, or is a
|
|
106 |
+ symlink chain which points to a nonexistent name (broken
|
|
107 |
+ symlink).
|
|
108 |
+ |
|
109 |
+ Raises:
|
|
110 |
+ |
|
111 |
+ - InfiniteSymlinkException if 'name' points to an infinite
|
|
112 |
+ symlink loop.
|
|
113 |
+ - AbsoluteSymlinkException if 'name' points to an absolute
|
|
114 |
+ symlink and absolute_symlinks_resolve is False.
|
|
115 |
+ - UnexpectedFileException if at any point during resolution we
|
|
116 |
+ find a file which we expected to be a directory or symlink.
|
|
117 |
+ |
|
118 |
+ If force_create is set, this will attempt to create
|
|
119 |
+ directories to make symlinks and directories resolve. Files
|
|
120 |
+ present in symlink target paths will also be removed and
|
|
121 |
+ replaced with directories. If force_create is off, this will
|
|
122 |
+ never alter 'directory'.
|
|
123 |
+ |
|
124 |
+ """
|
|
125 |
+ |
|
126 |
+ # First check for nonexistent things or 'normal' objects and return them
|
|
127 |
+ if name not in directory.index:
|
|
128 |
+ return None
|
|
129 |
+ index_entry = directory.index[name]
|
|
130 |
+ if isinstance(index_entry.buildstream_object, Directory):
|
|
131 |
+ return index_entry.buildstream_object
|
|
132 |
+ elif isinstance(index_entry.pb_object, remote_execution_pb2.FileNode):
|
|
133 |
+ return index_entry.pb_object
|
|
134 |
+ |
|
135 |
+ # Now we must be dealing with a symlink.
|
|
136 |
+ assert isinstance(index_entry.pb_object, remote_execution_pb2.SymlinkNode)
|
|
137 |
+ |
|
138 |
+ symlink_object = index_entry.pb_object
|
|
139 |
+ if symlink_object in self.seen_objects:
|
|
140 |
+ # Infinite symlink loop detected
|
|
141 |
+ message = ("Infinite symlink loop found during resolution. " +
|
|
142 |
+ "First repeated element is {}".format(name))
|
|
143 |
+ raise InfiniteSymlinkException(message=message)
|
|
144 |
+ |
|
145 |
+ self.seen_objects.append(symlink_object)
|
|
146 |
+ |
|
147 |
+ components = symlink_object.target.split(CasBasedDirectory._pb2_path_sep)
|
|
148 |
+ absolute = symlink_object.target.startswith(CasBasedDirectory._pb2_absolute_path_prefix)
|
|
149 |
+ |
|
150 |
+ if absolute:
|
|
151 |
+ if self.absolute_symlinks_resolve:
|
|
152 |
+ directory = directory.find_root()
|
|
153 |
+ # Discard the first empty element
|
|
154 |
+ components.pop(0)
|
|
155 |
+ else:
|
|
156 |
+ # Unresolvable absolute symlink
|
|
157 |
+ message = "{} is an absolute symlink, which was disallowed during resolution".format(name)
|
|
158 |
+ raise AbsoluteSymlinkException(message=message)
|
|
159 |
+ |
|
160 |
+ resolution = directory
|
|
161 |
+ while components and isinstance(resolution, CasBasedDirectory):
|
|
162 |
+ c = components.pop(0)
|
|
163 |
+ directory = resolution
|
|
164 |
+ |
|
165 |
+ try:
|
|
166 |
+ resolution = self._resolve_path_component(c, directory, components)
|
|
167 |
+ except UnexpectedFileException as original:
|
|
168 |
+ errormsg = ("Reached a file called {} while trying to resolve a symlink; " +
|
|
169 |
+ "cannot proceed. The remaining path components are {}.")
|
|
170 |
+ raise UnexpectedFileException(errormsg.format(c, components)) from original
|
|
171 |
+ |
|
172 |
+ return resolution
|
|
173 |
+ |
|
174 |
+ def _resolve_path_component(self, c, directory, components_remaining):
|
|
175 |
+ if c == ".":
|
|
176 |
+ resolution = directory
|
|
177 |
+ elif c == "..":
|
|
178 |
+ if directory.parent is not None:
|
|
179 |
+ resolution = directory.parent
|
|
180 |
+ else:
|
|
181 |
+ # If directory.parent *is* None, this is an attempt to
|
|
182 |
+ # access '..' from the root, which is valid under
|
|
183 |
+ # POSIX; it just returns the root.
|
|
184 |
+ resolution = directory
|
|
185 |
+ elif c in directory.index:
|
|
186 |
+ try:
|
|
187 |
+ resolution = self._resolve_through_files(c, directory, components_remaining)
|
|
188 |
+ except UnexpectedFileException as original:
|
|
189 |
+ errormsg = ("Reached a file called {} while trying to resolve a symlink; " +
|
|
190 |
+ "cannot proceed. The remaining path components are {}.")
|
|
191 |
+ raise UnexpectedFileException(errormsg.format(c, components_remaining)) from original
|
|
192 |
+ else:
|
|
193 |
+ # c is not in our index
|
|
194 |
+ if self.force_create:
|
|
195 |
+ resolution = directory.descend(c, create=True)
|
|
196 |
+ else:
|
|
197 |
+ resolution = None
|
|
198 |
+ return resolution
|
|
199 |
+ |
|
200 |
+ def _resolve_through_files(self, c, directory, require_traversable):
|
|
201 |
+ """A wrapper to resolve() which deals with files being found
|
|
202 |
+ in the middle of paths, for example trying to resolve a symlink
|
|
203 |
+ which points to /usr/lib64/libfoo when 'lib64' is a file.
|
|
204 |
+ |
|
205 |
+ require_traversable: If this is True, never return a file
|
|
206 |
+ node. Instead, if force_create is set, destroy the file node,
|
|
207 |
+ then create and return a normal directory in its place. If
|
|
208 |
+ force_create is off, throws ResolutionException.
|
|
209 |
+ |
|
210 |
+ """
|
|
211 |
+ resolved_thing = self.resolve(c, directory)
|
|
212 |
+ |
|
213 |
+ if isinstance(resolved_thing, remote_execution_pb2.FileNode):
|
|
214 |
+ if require_traversable:
|
|
215 |
+ # We have components still to resolve, but one of the path components
|
|
216 |
+ # is a file.
|
|
217 |
+ if self.force_create:
|
|
218 |
+ directory.delete_entry(c)
|
|
219 |
+ resolved_thing = directory.descend(c, create=True)
|
|
220 |
+ else:
|
|
221 |
+ # This is a signal that we hit a file, but don't
|
|
222 |
+ # have the data to give a proper message, so the
|
|
223 |
+ # caller should reraise this with a proper
|
|
224 |
+ # description.
|
|
225 |
+ raise UnexpectedFileException()
|
|
226 |
+ |
|
227 |
+ return resolved_thing
|
|
228 |
+ |
|
229 |
+ |
|
54 | 230 |
# CasBasedDirectory intentionally doesn't call its superclass constuctor,
|
55 | 231 |
# which is meant to be unimplemented.
|
56 | 232 |
# pylint: disable=super-init-not-called
|
... | ... | @@ -168,29 +344,34 @@ class CasBasedDirectory(Directory): |
168 | 344 |
self.index[name] = IndexEntry(dirnode, buildstream_object=newdir)
|
169 | 345 |
return newdir
|
170 | 346 |
|
171 |
- def _add_new_file(self, basename, filename):
|
|
347 |
+ def _add_file(self, basename, filename, modified=False):
|
|
172 | 348 |
filenode = self.pb2_directory.files.add()
|
173 | 349 |
filenode.name = filename
|
174 | 350 |
self.cas_cache.add_object(digest=filenode.digest, path=os.path.join(basename, filename))
|
175 | 351 |
is_executable = os.access(os.path.join(basename, filename), os.X_OK)
|
176 | 352 |
filenode.is_executable = is_executable
|
177 |
- self.index[filename] = IndexEntry(filenode, modified=(filename in self.index))
|
|
353 |
+ self.index[filename] = IndexEntry(filenode, modified=modified or filename in self.index)
|
|
178 | 354 |
|
179 |
- def _add_new_link(self, basename, filename):
|
|
180 |
- existing_link = self._find_pb2_entry(filename)
|
|
355 |
+ def _copy_link_from_filesystem(self, basename, filename):
|
|
356 |
+ self._add_new_link_direct(filename, os.readlink(os.path.join(basename, filename)))
|
|
357 |
+ |
|
358 |
+ def _add_new_link_direct(self, name, target):
|
|
359 |
+ existing_link = self._find_pb2_entry(name)
|
|
181 | 360 |
if existing_link:
|
182 | 361 |
symlinknode = existing_link
|
183 | 362 |
else:
|
184 | 363 |
symlinknode = self.pb2_directory.symlinks.add()
|
185 |
- symlinknode.name = filename
|
|
364 |
+ assert isinstance(symlinknode, remote_execution_pb2.SymlinkNode)
|
|
365 |
+ symlinknode.name = name
|
|
186 | 366 |
# A symlink node has no digest.
|
187 |
- symlinknode.target = os.readlink(os.path.join(basename, filename))
|
|
188 |
- self.index[filename] = IndexEntry(symlinknode, modified=(existing_link is not None))
|
|
367 |
+ symlinknode.target = target
|
|
368 |
+ self.index[name] = IndexEntry(symlinknode, modified=(existing_link is not None))
|
|
189 | 369 |
|
190 | 370 |
def delete_entry(self, name):
|
191 | 371 |
for collection in [self.pb2_directory.files, self.pb2_directory.symlinks, self.pb2_directory.directories]:
|
192 |
- if name in collection:
|
|
193 |
- collection.remove(name)
|
|
372 |
+ for thing in collection:
|
|
373 |
+ if thing.name == name:
|
|
374 |
+ collection.remove(thing)
|
|
194 | 375 |
if name in self.index:
|
195 | 376 |
del self.index[name]
|
196 | 377 |
|
... | ... | @@ -231,9 +412,13 @@ class CasBasedDirectory(Directory): |
231 | 412 |
if isinstance(entry, CasBasedDirectory):
|
232 | 413 |
return entry.descend(subdirectory_spec[1:], create)
|
233 | 414 |
else:
|
415 |
+ # May be a symlink
|
|
416 |
+ target = self._resolve(subdirectory_spec[0], force_create=create)
|
|
417 |
+ if isinstance(target, CasBasedDirectory):
|
|
418 |
+ return target
|
|
234 | 419 |
error = "Cannot descend into {}, which is a '{}' in the directory {}"
|
235 | 420 |
raise VirtualDirectoryError(error.format(subdirectory_spec[0],
|
236 |
- type(entry).__name__,
|
|
421 |
+ type(self.index[subdirectory_spec[0]].pb_object).__name__,
|
|
237 | 422 |
self))
|
238 | 423 |
else:
|
239 | 424 |
if create:
|
... | ... | @@ -254,36 +439,9 @@ class CasBasedDirectory(Directory): |
254 | 439 |
else:
|
255 | 440 |
return self
|
256 | 441 |
|
257 |
- def _resolve_symlink_or_directory(self, name):
|
|
258 |
- """Used only by _import_files_from_directory. Tries to resolve a
|
|
259 |
- directory name or symlink name. 'name' must be an entry in this
|
|
260 |
- directory. It must be a single symlink or directory name, not a path
|
|
261 |
- separated by path separators. If it's an existing directory name, it
|
|
262 |
- just returns the Directory object for that. If it's a symlink, it will
|
|
263 |
- attempt to find the target of the symlink and return that as a
|
|
264 |
- Directory object.
|
|
265 |
- |
|
266 |
- If a symlink target doesn't exist, it will attempt to create it
|
|
267 |
- as a directory as long as it's within this directory tree.
|
|
268 |
- """
|
|
269 |
- |
|
270 |
- if isinstance(self.index[name].buildstream_object, Directory):
|
|
271 |
- return self.index[name].buildstream_object
|
|
272 |
- # OK then, it's a symlink
|
|
273 |
- symlink = self._find_pb2_entry(name)
|
|
274 |
- absolute = symlink.target.startswith(CasBasedDirectory._pb2_absolute_path_prefix)
|
|
275 |
- if absolute:
|
|
276 |
- root = self.find_root()
|
|
277 |
- else:
|
|
278 |
- root = self
|
|
279 |
- directory = root
|
|
280 |
- components = symlink.target.split(CasBasedDirectory._pb2_path_sep)
|
|
281 |
- for c in components:
|
|
282 |
- if c == "..":
|
|
283 |
- directory = directory.parent
|
|
284 |
- else:
|
|
285 |
- directory = directory.descend(c, create=True)
|
|
286 |
- return directory
|
|
442 |
+ def _resolve(self, name, absolute_symlinks_resolve=True, force_create=False):
|
|
443 |
+ resolver = _Resolver(absolute_symlinks_resolve, force_create)
|
|
444 |
+ return resolver.resolve(name, self)
|
|
287 | 445 |
|
288 | 446 |
def _check_replacement(self, name, path_prefix, fileListResult):
|
289 | 447 |
""" Checks whether 'name' exists, and if so, whether we can overwrite it.
|
... | ... | @@ -297,6 +455,7 @@ class CasBasedDirectory(Directory): |
297 | 455 |
return True
|
298 | 456 |
if (isinstance(existing_entry,
|
299 | 457 |
(remote_execution_pb2.FileNode, remote_execution_pb2.SymlinkNode))):
|
458 |
+ self.delete_entry(name)
|
|
300 | 459 |
fileListResult.overwritten.append(relative_pathname)
|
301 | 460 |
return True
|
302 | 461 |
elif isinstance(existing_entry, remote_execution_pb2.DirectoryNode):
|
... | ... | @@ -314,23 +473,44 @@ class CasBasedDirectory(Directory): |
314 | 473 |
.format(name, type(existing_entry)))
|
315 | 474 |
return False # In case asserts are disabled
|
316 | 475 |
|
317 |
- def _import_directory_recursively(self, directory_name, source_directory, remaining_path, path_prefix):
|
|
318 |
- """ _import_directory_recursively and _import_files_from_directory will be called alternately
|
|
319 |
- as a directory tree is descended. """
|
|
320 |
- if directory_name in self.index:
|
|
321 |
- subdir = self._resolve_symlink_or_directory(directory_name)
|
|
322 |
- else:
|
|
323 |
- subdir = self._add_directory(directory_name)
|
|
324 |
- new_path_prefix = os.path.join(path_prefix, directory_name)
|
|
325 |
- subdir_result = subdir._import_files_from_directory(os.path.join(source_directory, directory_name),
|
|
326 |
- [os.path.sep.join(remaining_path)],
|
|
327 |
- path_prefix=new_path_prefix)
|
|
328 |
- return subdir_result
|
|
476 |
+ def _replace_anything_with_dir(self, name, path_prefix, overwritten_files_list):
|
|
477 |
+ self.delete_entry(name)
|
|
478 |
+ subdir = self._add_directory(name)
|
|
479 |
+ overwritten_files_list.append(os.path.join(path_prefix, name))
|
|
480 |
+ return subdir
|
|
329 | 481 |
|
330 | 482 |
def _import_files_from_directory(self, source_directory, files, path_prefix=""):
|
331 |
- """ Imports files from a traditional directory """
|
|
483 |
+ """ Imports files from a traditional directory. """
|
|
484 |
+ |
|
485 |
+ def _ensure_followable(name, path_prefix):
|
|
486 |
+ """ Makes sure 'name' is a directory or symlink to a directory which can be descended into. """
|
|
487 |
+ if isinstance(self.index[name].buildstream_object, Directory):
|
|
488 |
+ return self.descend(name)
|
|
489 |
+ try:
|
|
490 |
+ target = self._resolve(name, force_create=True)
|
|
491 |
+ except InfiniteSymlinkException:
|
|
492 |
+ return self._replace_anything_with_dir(name, path_prefix, result.overwritten)
|
|
493 |
+ if isinstance(target, CasBasedDirectory):
|
|
494 |
+ return target
|
|
495 |
+ elif isinstance(target, remote_execution_pb2.FileNode):
|
|
496 |
+ return self._replace_anything_with_dir(name, path_prefix, result.overwritten)
|
|
497 |
+ return target
|
|
498 |
+ |
|
499 |
+ def _import_directory_recursively(directory_name, source_directory, remaining_path, path_prefix):
|
|
500 |
+ """ _import_directory_recursively and _import_files_from_directory will be called alternately
|
|
501 |
+ as a directory tree is descended. """
|
|
502 |
+ if directory_name in self.index:
|
|
503 |
+ subdir = _ensure_followable(directory_name, path_prefix)
|
|
504 |
+ else:
|
|
505 |
+ subdir = self._add_directory(directory_name)
|
|
506 |
+ new_path_prefix = os.path.join(path_prefix, directory_name)
|
|
507 |
+ subdir_result = subdir._import_files_from_directory(os.path.join(source_directory, directory_name),
|
|
508 |
+ [os.path.sep.join(remaining_path)],
|
|
509 |
+ path_prefix=new_path_prefix)
|
|
510 |
+ return subdir_result
|
|
511 |
+ |
|
332 | 512 |
result = FileListResult()
|
333 |
- for entry in sorted(files):
|
|
513 |
+ for entry in files:
|
|
334 | 514 |
split_path = entry.split(os.path.sep)
|
335 | 515 |
# The actual file on the FS we're importing
|
336 | 516 |
import_file = os.path.join(source_directory, entry)
|
... | ... | @@ -338,14 +518,18 @@ class CasBasedDirectory(Directory): |
338 | 518 |
relative_pathname = os.path.join(path_prefix, entry)
|
339 | 519 |
if len(split_path) > 1:
|
340 | 520 |
directory_name = split_path[0]
|
341 |
- # Hand this off to the importer for that subdir. This will only do one file -
|
|
342 |
- # a better way would be to hand off all the files in this subdir at once.
|
|
343 |
- subdir_result = self._import_directory_recursively(directory_name, source_directory,
|
|
344 |
- split_path[1:], path_prefix)
|
|
521 |
+ # Hand this off to the importer for that subdir.
|
|
522 |
+ |
|
523 |
+ # It would be advantageous to batch these together by
|
|
524 |
+ # directory_name. However, we can't do it out of
|
|
525 |
+ # order, since importing symlinks affects the results
|
|
526 |
+ # of other imports.
|
|
527 |
+ subdir_result = _import_directory_recursively(directory_name, source_directory,
|
|
528 |
+ split_path[1:], path_prefix)
|
|
345 | 529 |
result.combine(subdir_result)
|
346 | 530 |
elif os.path.islink(import_file):
|
347 | 531 |
if self._check_replacement(entry, path_prefix, result):
|
348 |
- self._add_new_link(source_directory, entry)
|
|
532 |
+ self._copy_link_from_filesystem(source_directory, entry)
|
|
349 | 533 |
result.files_written.append(relative_pathname)
|
350 | 534 |
elif os.path.isdir(import_file):
|
351 | 535 |
# A plain directory which already exists isn't a problem; just ignore it.
|
... | ... | @@ -353,10 +537,78 @@ class CasBasedDirectory(Directory): |
353 | 537 |
self._add_directory(entry)
|
354 | 538 |
elif os.path.isfile(import_file):
|
355 | 539 |
if self._check_replacement(entry, path_prefix, result):
|
356 |
- self._add_new_file(source_directory, entry)
|
|
540 |
+ self._add_file(source_directory, entry, modified=relative_pathname in result.overwritten)
|
|
357 | 541 |
result.files_written.append(relative_pathname)
|
358 | 542 |
return result
|
359 | 543 |
|
544 |
+ @staticmethod
|
|
545 |
+ def _files_in_subdir(sorted_files, dirname):
|
|
546 |
+ """Filters sorted_files and returns only the ones which have
|
|
547 |
+ 'dirname' as a prefix, with that prefix removed.
|
|
548 |
+ |
|
549 |
+ """
|
|
550 |
+ if not dirname.endswith(os.path.sep):
|
|
551 |
+ dirname += os.path.sep
|
|
552 |
+ return [f[len(dirname):] for f in sorted_files if f.startswith(dirname)]
|
|
553 |
+ |
|
554 |
+ def _partial_import_cas_into_cas(self, source_directory, files, path_prefix="", file_list_required=True):
|
|
555 |
+ """ Import only the files and symlinks listed in 'files' from source_directory to this one.
|
|
556 |
+ Args:
|
|
557 |
+ source_directory (:class:`.CasBasedDirectory`): The directory to import from
|
|
558 |
+ files ([str]): List of pathnames to import. Must be a list, not a generator.
|
|
559 |
+ path_prefix (str): Prefix used to add entries to the file list result.
|
|
560 |
+ file_list_required: Whether to update the file list while processing.
|
|
561 |
+ """
|
|
562 |
+ result = FileListResult()
|
|
563 |
+ processed_directories = set()
|
|
564 |
+ for f in files:
|
|
565 |
+ fullname = os.path.join(path_prefix, f)
|
|
566 |
+ components = f.split(os.path.sep)
|
|
567 |
+ if len(components) > 1:
|
|
568 |
+ # We are importing a thing which is in a subdirectory. We may have already seen this dirname
|
|
569 |
+ # for a previous file.
|
|
570 |
+ dirname = components[0]
|
|
571 |
+ if dirname not in processed_directories:
|
|
572 |
+ # Now strip off the first directory name and import files recursively.
|
|
573 |
+ subcomponents = CasBasedDirectory._files_in_subdir(files, dirname)
|
|
574 |
+ # We will fail at this point if there is a file or symlink to file called 'dirname'.
|
|
575 |
+ if dirname in self.index:
|
|
576 |
+ resolved_component = self._resolve(dirname, force_create=True)
|
|
577 |
+ if isinstance(resolved_component, remote_execution_pb2.FileNode):
|
|
578 |
+ dest_subdir = self._replace_anything_with_dir(dirname, path_prefix, result.overwritten)
|
|
579 |
+ else:
|
|
580 |
+ dest_subdir = resolved_component
|
|
581 |
+ else:
|
|
582 |
+ dest_subdir = self.descend(dirname, create=True)
|
|
583 |
+ src_subdir = source_directory.descend(dirname)
|
|
584 |
+ import_result = dest_subdir._partial_import_cas_into_cas(src_subdir, subcomponents,
|
|
585 |
+ path_prefix=fullname,
|
|
586 |
+ file_list_required=file_list_required)
|
|
587 |
+ result.combine(import_result)
|
|
588 |
+ processed_directories.add(dirname)
|
|
589 |
+ elif isinstance(source_directory.index[f].buildstream_object, CasBasedDirectory):
|
|
590 |
+ # The thing in the input file list is a directory on
|
|
591 |
+ # its own. We don't need to do anything other than create it if it doesn't exist.
|
|
592 |
+ # If we already have an entry with the same name that isn't a directory, that
|
|
593 |
+ # will be dealt with when importing files in this directory.
|
|
594 |
+ if f not in self.index:
|
|
595 |
+ self.descend(f, create=True)
|
|
596 |
+ else:
|
|
597 |
+ # We're importing a file or symlink - replace anything with the same name.
|
|
598 |
+ importable = self._check_replacement(f, path_prefix, result)
|
|
599 |
+ if importable:
|
|
600 |
+ item = source_directory.index[f].pb_object
|
|
601 |
+ if isinstance(item, remote_execution_pb2.FileNode):
|
|
602 |
+ filenode = self.pb2_directory.files.add(digest=item.digest, name=f,
|
|
603 |
+ is_executable=item.is_executable)
|
|
604 |
+ self.index[f] = IndexEntry(filenode, modified=True)
|
|
605 |
+ else:
|
|
606 |
+ assert isinstance(item, remote_execution_pb2.SymlinkNode)
|
|
607 |
+ self._add_new_link_direct(name=f, target=item.target)
|
|
608 |
+ else:
|
|
609 |
+ result.ignored.append(os.path.join(path_prefix, f))
|
|
610 |
+ return result
|
|
611 |
+ |
|
360 | 612 |
def import_files(self, external_pathspec, *, files=None,
|
361 | 613 |
report_written=True, update_utimes=False,
|
362 | 614 |
can_link=False):
|
... | ... | @@ -378,28 +630,27 @@ class CasBasedDirectory(Directory): |
378 | 630 |
|
379 | 631 |
can_link (bool): Ignored, since hard links do not have any meaning within CAS.
|
380 | 632 |
"""
|
381 |
- if isinstance(external_pathspec, FileBasedDirectory):
|
|
382 |
- source_directory = external_pathspec._get_underlying_directory()
|
|
383 |
- elif isinstance(external_pathspec, CasBasedDirectory):
|
|
384 |
- # TODO: This transfers from one CAS to another via the
|
|
385 |
- # filesystem, which is very inefficient. Alter this so it
|
|
386 |
- # transfers refs across directly.
|
|
387 |
- with tempfile.TemporaryDirectory(prefix="roundtrip") as tmpdir:
|
|
388 |
- external_pathspec.export_files(tmpdir)
|
|
389 |
- if files is None:
|
|
390 |
- files = list_relative_paths(tmpdir)
|
|
391 |
- result = self._import_files_from_directory(tmpdir, files=files)
|
|
392 |
- return result
|
|
393 |
- else:
|
|
394 |
- source_directory = external_pathspec
|
|
395 | 633 |
|
396 | 634 |
if files is None:
|
397 |
- files = list_relative_paths(source_directory)
|
|
635 |
+ if isinstance(external_pathspec, str):
|
|
636 |
+ files = list_relative_paths(external_pathspec)
|
|
637 |
+ else:
|
|
638 |
+ assert isinstance(external_pathspec, Directory)
|
|
639 |
+ files = external_pathspec.list_relative_paths()
|
|
640 |
+ |
|
641 |
+ if isinstance(external_pathspec, FileBasedDirectory):
|
|
642 |
+ source_directory = external_pathspec.get_underlying_directory()
|
|
643 |
+ result = self._import_files_from_directory(source_directory, files=files)
|
|
644 |
+ elif isinstance(external_pathspec, str):
|
|
645 |
+ source_directory = external_pathspec
|
|
646 |
+ result = self._import_files_from_directory(source_directory, files=files)
|
|
647 |
+ else:
|
|
648 |
+ assert isinstance(external_pathspec, CasBasedDirectory)
|
|
649 |
+ result = self._partial_import_cas_into_cas(external_pathspec, files=list(files))
|
|
398 | 650 |
|
399 | 651 |
# TODO: No notice is taken of report_written, update_utimes or can_link.
|
400 | 652 |
# Current behaviour is to fully populate the report, which is inefficient,
|
401 | 653 |
# but still correct.
|
402 |
- result = self._import_files_from_directory(source_directory, files=files)
|
|
403 | 654 |
|
404 | 655 |
# We need to recalculate and store the hashes of all directories both
|
405 | 656 |
# up and down the tree; we have changed our directory by importing files
|
... | ... | @@ -511,6 +762,28 @@ class CasBasedDirectory(Directory): |
511 | 762 |
else:
|
512 | 763 |
self._mark_directory_unmodified()
|
513 | 764 |
|
765 |
+ def _lightweight_resolve_to_index(self, path):
|
|
766 |
+ """A lightweight function for transforming paths into IndexEntry
|
|
767 |
+ objects. This does not follow symlinks.
|
|
768 |
+ |
|
769 |
+ path: The string to resolve. This should be a series of path
|
|
770 |
+ components separated by the protocol buffer path separator
|
|
771 |
+ _pb2_path_sep.
|
|
772 |
+ |
|
773 |
+ Returns: the IndexEntry found, or None if any of the path components were not present.
|
|
774 |
+ |
|
775 |
+ """
|
|
776 |
+ directory = self
|
|
777 |
+ path_components = path.split(CasBasedDirectory._pb2_path_sep)
|
|
778 |
+ for component in path_components[:-1]:
|
|
779 |
+ if component not in directory.index:
|
|
780 |
+ return None
|
|
781 |
+ if isinstance(directory.index[component].buildstream_object, CasBasedDirectory):
|
|
782 |
+ directory = directory.index[component].buildstream_object
|
|
783 |
+ else:
|
|
784 |
+ return None
|
|
785 |
+ return directory.index.get(path_components[-1], None)
|
|
786 |
+ |
|
514 | 787 |
def list_modified_paths(self):
|
515 | 788 |
"""Provide a list of relative paths which have been modified since the
|
516 | 789 |
last call to mark_unmodified.
|
... | ... | @@ -518,29 +791,43 @@ class CasBasedDirectory(Directory): |
518 | 791 |
Return value: List(str) - list of modified paths
|
519 | 792 |
"""
|
520 | 793 |
|
521 |
- filelist = []
|
|
522 |
- for (k, v) in self.index.items():
|
|
523 |
- if isinstance(v.buildstream_object, CasBasedDirectory):
|
|
524 |
- filelist.extend([k + os.path.sep + x for x in v.buildstream_object.list_modified_paths()])
|
|
525 |
- elif isinstance(v.pb_object, remote_execution_pb2.FileNode) and v.modified:
|
|
526 |
- filelist.append(k)
|
|
527 |
- return filelist
|
|
794 |
+ for p in self.list_relative_paths():
|
|
795 |
+ i = self._lightweight_resolve_to_index(p)
|
|
796 |
+ if i and i.modified:
|
|
797 |
+ yield p
|
|
528 | 798 |
|
529 |
- def list_relative_paths(self):
|
|
799 |
+ def list_relative_paths(self, relpath=""):
|
|
530 | 800 |
"""Provide a list of all relative paths.
|
531 | 801 |
|
532 |
- NOTE: This list is not in the same order as utils.list_relative_paths.
|
|
533 |
- |
|
534 | 802 |
Return value: List(str) - list of all paths
|
535 | 803 |
"""
|
536 | 804 |
|
537 |
- filelist = []
|
|
538 |
- for (k, v) in self.index.items():
|
|
539 |
- if isinstance(v.buildstream_object, CasBasedDirectory):
|
|
540 |
- filelist.extend([k + os.path.sep + x for x in v.buildstream_object.list_relative_paths()])
|
|
541 |
- elif isinstance(v.pb_object, remote_execution_pb2.FileNode):
|
|
542 |
- filelist.append(k)
|
|
543 |
- return filelist
|
|
805 |
+ symlink_list = filter(lambda i: isinstance(i[1].pb_object, remote_execution_pb2.SymlinkNode),
|
|
806 |
+ self.index.items())
|
|
807 |
+ file_list = list(filter(lambda i: isinstance(i[1].pb_object, remote_execution_pb2.FileNode),
|
|
808 |
+ self.index.items()))
|
|
809 |
+ directory_list = filter(lambda i: isinstance(i[1].buildstream_object, CasBasedDirectory),
|
|
810 |
+ self.index.items())
|
|
811 |
+ |
|
812 |
+ # We need to mimic the behaviour of os.walk, in which symlinks
|
|
813 |
+ # to directories count as directories and symlinks to file or
|
|
814 |
+ # broken symlinks count as files. os.walk doesn't follow
|
|
815 |
+ # symlinks, so we don't recurse.
|
|
816 |
+ for (k, v) in sorted(symlink_list):
|
|
817 |
+ target = self._resolve(k, absolute_symlinks_resolve=True)
|
|
818 |
+ if isinstance(target, CasBasedDirectory):
|
|
819 |
+ yield os.path.join(relpath, k)
|
|
820 |
+ else:
|
|
821 |
+ file_list.append((k, v))
|
|
822 |
+ |
|
823 |
+ if file_list == [] and relpath != "":
|
|
824 |
+ yield relpath
|
|
825 |
+ else:
|
|
826 |
+ for (k, v) in sorted(file_list):
|
|
827 |
+ yield os.path.join(relpath, k)
|
|
828 |
+ |
|
829 |
+ for (k, v) in sorted(directory_list):
|
|
830 |
+ yield from v.buildstream_object.list_relative_paths(relpath=os.path.join(relpath, k))
|
|
544 | 831 |
|
545 | 832 |
def recalculate_hash(self):
|
546 | 833 |
""" Recalcuates the hash for this directory and store the results in
|
... | ... | @@ -105,6 +105,20 @@ class FileListResult(): |
105 | 105 |
return ret
|
106 | 106 |
|
107 | 107 |
|
108 |
+class DirectoryDescription():
|
|
109 |
+ """
|
|
110 |
+ This object is used to keep information about directories in a nice tidy object.
|
|
111 |
+ """
|
|
112 |
+ def __init__(self, directory, *, use_default=True):
|
|
113 |
+ """
|
|
114 |
+ Args:
|
|
115 |
+ directory (str): The path to the directory this object describes
|
|
116 |
+ use_default (bool): Weather to process the directory so it is in the default folder.
|
|
117 |
+ """
|
|
118 |
+ self.directory = directory
|
|
119 |
+ self.use_default = use_default
|
|
120 |
+ |
|
121 |
+ |
|
108 | 122 |
def list_relative_paths(directory, *, list_dirs=True):
|
109 | 123 |
"""A generator for walking directory relative paths
|
110 | 124 |
|
... | ... | @@ -7,7 +7,7 @@ commands: |
7 | 7 |
# Capture workspace open output
|
8 | 8 |
- directory: ../examples/developing/
|
9 | 9 |
output: ../source/sessions/developing-workspace-open.html
|
10 |
- command: workspace open hello.bst workspace_hello
|
|
10 |
+ command: workspace open hello.bst --directory workspace_hello
|
|
11 | 11 |
|
12 | 12 |
# Catpure output from workspace list
|
13 | 13 |
- directory: ../examples/developing/
|
... | ... | @@ -55,7 +55,7 @@ def test_open_workspace(cli, tmpdir, datafiles): |
55 | 55 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
56 | 56 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
|
57 | 57 |
|
58 |
- result = cli.run(project=project, args=['workspace', 'open', '-f', 'hello.bst', workspace_dir])
|
|
58 |
+ result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst', ])
|
|
59 | 59 |
result.assert_success()
|
60 | 60 |
|
61 | 61 |
result = cli.run(project=project, args=['workspace', 'list'])
|
... | ... | @@ -72,7 +72,7 @@ def test_make_change_in_workspace(cli, tmpdir, datafiles): |
72 | 72 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
73 | 73 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
|
74 | 74 |
|
75 |
- result = cli.run(project=project, args=['workspace', 'open', '-f', 'hello.bst', workspace_dir])
|
|
75 |
+ result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst'])
|
|
76 | 76 |
result.assert_success()
|
77 | 77 |
|
78 | 78 |
result = cli.run(project=project, args=['workspace', 'list'])
|
... | ... | @@ -44,7 +44,7 @@ def test_open_cross_junction_workspace(cli, tmpdir, datafiles): |
44 | 44 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello_junction")
|
45 | 45 |
|
46 | 46 |
result = cli.run(project=project,
|
47 |
- args=['workspace', 'open', 'hello-junction.bst:hello.bst', workspace_dir])
|
|
47 |
+ args=['workspace', 'open', '--directory', workspace_dir, 'hello-junction.bst:hello.bst'])
|
|
48 | 48 |
result.assert_success()
|
49 | 49 |
|
50 | 50 |
result = cli.run(project=project,
|
... | ... | @@ -509,7 +509,7 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles): |
509 | 509 |
|
510 | 510 |
# Now open a workspace on the junction
|
511 | 511 |
#
|
512 |
- result = cli.run(project=project, args=['workspace', 'open', 'junction.bst', workspace])
|
|
512 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, 'junction.bst'])
|
|
513 | 513 |
result.assert_success()
|
514 | 514 |
filename = os.path.join(workspace, 'files', 'etc-files', 'etc', 'animal.conf')
|
515 | 515 |
|
... | ... | @@ -47,7 +47,7 @@ def open_cross_junction(cli, tmpdir): |
47 | 47 |
workspace = tmpdir.join("workspace")
|
48 | 48 |
|
49 | 49 |
element = 'sub.bst:data.bst'
|
50 |
- args = ['workspace', 'open', element, str(workspace)]
|
|
50 |
+ args = ['workspace', 'open', '--directory', str(workspace), element]
|
|
51 | 51 |
result = cli.run(project=project, args=args)
|
52 | 52 |
result.assert_success()
|
53 | 53 |
|
... | ... | @@ -21,9 +21,11 @@ |
21 | 21 |
# Phillip Smyth <phillip smyth codethink co uk>
|
22 | 22 |
# Jonathan Maw <jonathan maw codethink co uk>
|
23 | 23 |
# Richard Maw <richard maw codethink co uk>
|
24 |
+# William Salmon <will salmon codethink co uk>
|
|
24 | 25 |
#
|
25 | 26 |
|
26 | 27 |
import os
|
28 |
+import stat
|
|
27 | 29 |
import pytest
|
28 | 30 |
import shutil
|
29 | 31 |
import subprocess
|
... | ... | @@ -43,65 +45,118 @@ DATA_DIR = os.path.join( |
43 | 45 |
)
|
44 | 46 |
|
45 | 47 |
|
46 |
-def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
|
|
47 |
- project_path=None, element_attrs=None):
|
|
48 |
- if not workspace_dir:
|
|
49 |
- workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
|
50 |
- if not project_path:
|
|
51 |
- project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
52 |
- else:
|
|
53 |
- shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
|
|
54 |
- bin_files_path = os.path.join(project_path, 'files', 'bin-files')
|
|
55 |
- element_path = os.path.join(project_path, 'elements')
|
|
56 |
- element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
|
48 |
+class WorkspaceCreater():
|
|
49 |
+ def __init__(self, cli, tmpdir, datafiles, project_path=None):
|
|
50 |
+ self.cli = cli
|
|
51 |
+ self.tmpdir = tmpdir
|
|
52 |
+ self.datafiles = datafiles
|
|
53 |
+ |
|
54 |
+ if not project_path:
|
|
55 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
56 |
+ else:
|
|
57 |
+ shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
|
|
58 |
+ |
|
59 |
+ self.project_path = project_path
|
|
60 |
+ self.bin_files_path = os.path.join(project_path, 'files', 'bin-files')
|
|
61 |
+ |
|
62 |
+ self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd')
|
|
63 |
+ |
|
64 |
+ def create_workspace_element(self, kind, track, suffix='', workspace_dir=None,
|
|
65 |
+ element_attrs=None):
|
|
66 |
+ element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
|
67 |
+ element_path = os.path.join(self.project_path, 'elements')
|
|
68 |
+ if not workspace_dir:
|
|
69 |
+ workspace_dir = os.path.join(self.workspace_cmd, element_name.rstrip('.bst'))
|
|
70 |
+ |
|
71 |
+ # Create our repo object of the given source type with
|
|
72 |
+ # the bin files, and then collect the initial ref.
|
|
73 |
+ repo = create_repo(kind, str(self.tmpdir))
|
|
74 |
+ ref = repo.create(self.bin_files_path)
|
|
75 |
+ if track:
|
|
76 |
+ ref = None
|
|
77 |
+ |
|
78 |
+ # Write out our test target
|
|
79 |
+ element = {
|
|
80 |
+ 'kind': 'import',
|
|
81 |
+ 'sources': [
|
|
82 |
+ repo.source_config(ref=ref)
|
|
83 |
+ ]
|
|
84 |
+ }
|
|
85 |
+ if element_attrs:
|
|
86 |
+ element = {**element, **element_attrs}
|
|
87 |
+ _yaml.dump(element,
|
|
88 |
+ os.path.join(element_path,
|
|
89 |
+ element_name))
|
|
90 |
+ return element_name, element_path, workspace_dir
|
|
57 | 91 |
|
58 |
- # Create our repo object of the given source type with
|
|
59 |
- # the bin files, and then collect the initial ref.
|
|
60 |
- #
|
|
61 |
- repo = create_repo(kind, str(tmpdir))
|
|
62 |
- ref = repo.create(bin_files_path)
|
|
63 |
- if track:
|
|
64 |
- ref = None
|
|
92 |
+ def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
|
|
93 |
+ element_attrs=None):
|
|
65 | 94 |
|
66 |
- # Write out our test target
|
|
67 |
- element = {
|
|
68 |
- 'kind': 'import',
|
|
69 |
- 'sources': [
|
|
70 |
- repo.source_config(ref=ref)
|
|
71 |
- ]
|
|
72 |
- }
|
|
73 |
- if element_attrs:
|
|
74 |
- element = {**element, **element_attrs}
|
|
75 |
- _yaml.dump(element,
|
|
76 |
- os.path.join(element_path,
|
|
77 |
- element_name))
|
|
95 |
+ element_tuples = []
|
|
78 | 96 |
|
79 |
- # Assert that there is no reference, a track & fetch is needed
|
|
80 |
- state = cli.get_element_state(project_path, element_name)
|
|
81 |
- if track:
|
|
82 |
- assert state == 'no reference'
|
|
83 |
- else:
|
|
84 |
- assert state == 'fetch needed'
|
|
97 |
+ if suffixs is None:
|
|
98 |
+ suffixs = ['', ] * len(kinds)
|
|
99 |
+ else:
|
|
100 |
+ if len(suffixs) != len(kinds):
|
|
101 |
+ raise "terable error"
|
|
85 | 102 |
|
86 |
- # Now open the workspace, this should have the effect of automatically
|
|
87 |
- # tracking & fetching the source from the repo.
|
|
88 |
- args = ['workspace', 'open']
|
|
89 |
- if track:
|
|
90 |
- args.append('--track')
|
|
91 |
- args.extend([element_name, workspace_dir])
|
|
92 |
- result = cli.run(project=project_path, args=args)
|
|
103 |
+ for suffix, kind in zip(suffixs, kinds):
|
|
104 |
+ element_name, element_path, workspace_dir = \
|
|
105 |
+ self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
|
|
106 |
+ element_attrs)
|
|
93 | 107 |
|
94 |
- result.assert_success()
|
|
108 |
+ # Assert that there is no reference, a track & fetch is needed
|
|
109 |
+ state = self.cli.get_element_state(self.project_path, element_name)
|
|
110 |
+ if track:
|
|
111 |
+ assert state == 'no reference'
|
|
112 |
+ else:
|
|
113 |
+ assert state == 'fetch needed'
|
|
114 |
+ element_tuples.append((element_name, workspace_dir))
|
|
95 | 115 |
|
96 |
- # Assert that we are now buildable because the source is
|
|
97 |
- # now cached.
|
|
98 |
- assert cli.get_element_state(project_path, element_name) == 'buildable'
|
|
116 |
+ return element_tuples
|
|
99 | 117 |
|
100 |
- # Check that the executable hello file is found in the workspace
|
|
101 |
- filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
|
102 |
- assert os.path.exists(filename)
|
|
118 |
+ def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
|
|
119 |
+ element_attrs=None):
|
|
120 |
+ |
|
121 |
+ element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
|
|
122 |
+ element_attrs)
|
|
123 |
+ os.makedirs(self.workspace_cmd, exist_ok=True)
|
|
124 |
+ |
|
125 |
+ # Now open the workspace, this should have the effect of automatically
|
|
126 |
+ # tracking & fetching the source from the repo.
|
|
127 |
+ args = ['workspace', 'open']
|
|
128 |
+ if track:
|
|
129 |
+ args.append('--track')
|
|
130 |
+ if workspace_dir is not None:
|
|
131 |
+ assert len(element_tuples) == 1, "test logic error"
|
|
132 |
+ _, workspace_dir = element_tuples[0]
|
|
133 |
+ args.extend(['--directory', workspace_dir])
|
|
134 |
+ |
|
135 |
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
|
|
136 |
+ result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
|
|
103 | 137 |
|
104 |
- return (element_name, project_path, workspace_dir)
|
|
138 |
+ result.assert_success()
|
|
139 |
+ |
|
140 |
+ for element_name, workspace_dir in element_tuples:
|
|
141 |
+ # Assert that we are now buildable because the source is
|
|
142 |
+ # now cached.
|
|
143 |
+ assert self.cli.get_element_state(self.project_path, element_name) == 'buildable'
|
|
144 |
+ |
|
145 |
+ # Check that the executable hello file is found in the workspace
|
|
146 |
+ filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
|
147 |
+ assert os.path.exists(filename)
|
|
148 |
+ |
|
149 |
+ return element_tuples
|
|
150 |
+ |
|
151 |
+ |
|
152 |
+def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
|
|
153 |
+ project_path=None, element_attrs=None):
|
|
154 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles, project_path)
|
|
155 |
+ workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
|
|
156 |
+ element_attrs)
|
|
157 |
+ assert len(workspaces) == 1
|
|
158 |
+ element_name, workspace = workspaces[0]
|
|
159 |
+ return element_name, workspace_object.project_path, workspace
|
|
105 | 160 |
|
106 | 161 |
|
107 | 162 |
@pytest.mark.datafiles(DATA_DIR)
|
... | ... | @@ -128,6 +183,103 @@ def test_open_bzr_customize(cli, tmpdir, datafiles): |
128 | 183 |
assert(expected_output_str in str(output))
|
129 | 184 |
|
130 | 185 |
|
186 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
187 |
+def test_open_multi(cli, tmpdir, datafiles):
|
|
188 |
+ |
|
189 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
|
190 |
+ workspaces = workspace_object.open_workspaces(repo_kinds, False)
|
|
191 |
+ |
|
192 |
+ for (elname, workspace), kind in zip(workspaces, repo_kinds):
|
|
193 |
+ assert kind in elname
|
|
194 |
+ workspace_lsdir = os.listdir(workspace)
|
|
195 |
+ if kind == 'git':
|
|
196 |
+ assert('.git' in workspace_lsdir)
|
|
197 |
+ elif kind == 'bzr':
|
|
198 |
+ assert('.bzr' in workspace_lsdir)
|
|
199 |
+ else:
|
|
200 |
+ assert not ('.git' in workspace_lsdir)
|
|
201 |
+ assert not ('.bzr' in workspace_lsdir)
|
|
202 |
+ |
|
203 |
+ |
|
204 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
205 |
+def test_open_multi_unwritable(cli, tmpdir, datafiles):
|
|
206 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
|
207 |
+ |
|
208 |
+ element_tuples = workspace_object.create_workspace_elements(repo_kinds, False, repo_kinds)
|
|
209 |
+ os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
|
|
210 |
+ |
|
211 |
+ # Now open the workspace, this should have the effect of automatically
|
|
212 |
+ # tracking & fetching the source from the repo.
|
|
213 |
+ args = ['workspace', 'open']
|
|
214 |
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
|
|
215 |
+ cli.configure({'workspacedir': workspace_object.workspace_cmd})
|
|
216 |
+ |
|
217 |
+ cwdstat = os.stat(workspace_object.workspace_cmd)
|
|
218 |
+ try:
|
|
219 |
+ os.chmod(workspace_object.workspace_cmd, cwdstat.st_mode - stat.S_IWRITE)
|
|
220 |
+ result = workspace_object.cli.run(project=workspace_object.project_path, args=args)
|
|
221 |
+ finally:
|
|
222 |
+ # Using this finally to make sure we always put thing back how they should be.
|
|
223 |
+ os.chmod(workspace_object.workspace_cmd, cwdstat.st_mode)
|
|
224 |
+ |
|
225 |
+ result.assert_main_error(ErrorDomain.STREAM, None)
|
|
226 |
+ # Normally we avoid checking stderr in favour of using the mechine readable result.assert_main_error
|
|
227 |
+ # But Tristan was very keen that the names of the elements left needing workspaces were present in the out put
|
|
228 |
+ assert (" ".join([element_name for element_name, workspace_dir_suffix in element_tuples[1:]]) in result.stderr)
|
|
229 |
+ |
|
230 |
+ |
|
231 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
232 |
+def test_open_multi_with_directory(cli, tmpdir, datafiles):
|
|
233 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
|
234 |
+ |
|
235 |
+ element_tuples = workspace_object.create_workspace_elements(repo_kinds, False, repo_kinds)
|
|
236 |
+ os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
|
|
237 |
+ |
|
238 |
+ # Now open the workspace, this should have the effect of automatically
|
|
239 |
+ # tracking & fetching the source from the repo.
|
|
240 |
+ args = ['workspace', 'open']
|
|
241 |
+ args.extend(['--directory', 'any/dir/should/fail'])
|
|
242 |
+ |
|
243 |
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
|
|
244 |
+ result = workspace_object.cli.run(cwd=workspace_object.workspace_cmd, project=workspace_object.project_path,
|
|
245 |
+ args=args)
|
|
246 |
+ |
|
247 |
+ result.assert_main_error(ErrorDomain.CAS, None)
|
|
248 |
+ assert ("Directory option can only be used if a single element is given" in result.stderr)
|
|
249 |
+ |
|
250 |
+ |
|
251 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
252 |
+def test_open_defaultlocation(cli, tmpdir, datafiles):
|
|
253 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
|
254 |
+ |
|
255 |
+ ((element_name, workspace_dir), ) = workspace_object.create_workspace_elements(['git'], False, ['git'])
|
|
256 |
+ os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
|
|
257 |
+ |
|
258 |
+ # Now open the workspace, this should have the effect of automatically
|
|
259 |
+ # tracking & fetching the source from the repo.
|
|
260 |
+ args = ['workspace', 'open']
|
|
261 |
+ args.append(element_name)
|
|
262 |
+ |
|
263 |
+ # In the other tests we set the cmd to workspace_object.workspace_cmd with the optional
|
|
264 |
+ # argument, cwd for the workspace_object.cli.run function. But hear we set the default
|
|
265 |
+ # workspace location to workspace_object.workspace_cmd and run the cli.run function with
|
|
266 |
+ # no cwd option so that it runs in the project directory.
|
|
267 |
+ cli.configure({'workspacedir': workspace_object.workspace_cmd})
|
|
268 |
+ result = workspace_object.cli.run(project=workspace_object.project_path,
|
|
269 |
+ args=args)
|
|
270 |
+ |
|
271 |
+ result.assert_success()
|
|
272 |
+ |
|
273 |
+ assert cli.get_element_state(workspace_object.project_path, element_name) == 'buildable'
|
|
274 |
+ |
|
275 |
+ # Check that the executable hello file is found in the workspace
|
|
276 |
+ # even though the cli.run function was not run with cwd = workspace_object.workspace_cmd
|
|
277 |
+ # the workspace should be created in there as we used the 'workspacedir' configuration
|
|
278 |
+ # option.
|
|
279 |
+ filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
|
280 |
+ assert os.path.exists(filename)
|
|
281 |
+ |
|
282 |
+ |
|
131 | 283 |
@pytest.mark.datafiles(DATA_DIR)
|
132 | 284 |
@pytest.mark.parametrize("kind", repo_kinds)
|
133 | 285 |
def test_open_track(cli, tmpdir, datafiles, kind):
|
... | ... | @@ -150,7 +302,7 @@ def test_open_force(cli, tmpdir, datafiles, kind): |
150 | 302 |
|
151 | 303 |
# Now open the workspace again with --force, this should happily succeed
|
152 | 304 |
result = cli.run(project=project, args=[
|
153 |
- 'workspace', 'open', '--force', element_name, workspace
|
|
305 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name
|
|
154 | 306 |
])
|
155 | 307 |
result.assert_success()
|
156 | 308 |
|
... | ... | @@ -165,7 +317,7 @@ def test_open_force_open(cli, tmpdir, datafiles, kind): |
165 | 317 |
|
166 | 318 |
# Now open the workspace again with --force, this should happily succeed
|
167 | 319 |
result = cli.run(project=project, args=[
|
168 |
- 'workspace', 'open', '--force', element_name, workspace
|
|
320 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name
|
|
169 | 321 |
])
|
170 | 322 |
result.assert_success()
|
171 | 323 |
|
... | ... | @@ -196,7 +348,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles, kind): |
196 | 348 |
|
197 | 349 |
# Now open the workspace again with --force, this should happily succeed
|
198 | 350 |
result = cli.run(project=project, args=[
|
199 |
- 'workspace', 'open', '--force', element_name2, workspace
|
|
351 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name2
|
|
200 | 352 |
])
|
201 | 353 |
|
202 | 354 |
# Assert that the file in workspace 1 has been replaced
|
... | ... | @@ -504,7 +656,7 @@ def test_buildable_no_ref(cli, tmpdir, datafiles): |
504 | 656 |
# Now open the workspace. We don't need to checkout the source though.
|
505 | 657 |
workspace = os.path.join(str(tmpdir), 'workspace-no-ref')
|
506 | 658 |
os.makedirs(workspace)
|
507 |
- args = ['workspace', 'open', '--no-checkout', element_name, workspace]
|
|
659 |
+ args = ['workspace', 'open', '--no-checkout', '--directory', workspace, element_name]
|
|
508 | 660 |
result = cli.run(project=project, args=args)
|
509 | 661 |
result.assert_success()
|
510 | 662 |
|
... | ... | @@ -766,7 +918,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte |
766 | 918 |
element_name))
|
767 | 919 |
|
768 | 920 |
# Make a change to the workspaces file
|
769 |
- result = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
921 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
770 | 922 |
result.assert_success()
|
771 | 923 |
result = cli.run(project=project, args=['workspace', 'close', '--remove-dir', element_name])
|
772 | 924 |
result.assert_success()
|
... | ... | @@ -278,7 +278,7 @@ def test_workspace_visible(cli, tmpdir, datafiles): |
278 | 278 |
|
279 | 279 |
# Open a workspace on our build failing element
|
280 | 280 |
#
|
281 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
281 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
282 | 282 |
assert res.exit_code == 0
|
283 | 283 |
|
284 | 284 |
# Ensure the dependencies of our build failing element are built
|
... | ... | @@ -23,7 +23,7 @@ def test_workspace_mount(cli, tmpdir, datafiles): |
23 | 23 |
workspace = os.path.join(cli.directory, 'workspace')
|
24 | 24 |
element_name = 'workspace/workspace-mount.bst'
|
25 | 25 |
|
26 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
26 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
27 | 27 |
assert res.exit_code == 0
|
28 | 28 |
|
29 | 29 |
res = cli.run(project=project, args=['build', element_name])
|
... | ... | @@ -39,7 +39,7 @@ def test_workspace_commanddir(cli, tmpdir, datafiles): |
39 | 39 |
workspace = os.path.join(cli.directory, 'workspace')
|
40 | 40 |
element_name = 'workspace/workspace-commanddir.bst'
|
41 | 41 |
|
42 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
42 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
43 | 43 |
assert res.exit_code == 0
|
44 | 44 |
|
45 | 45 |
res = cli.run(project=project, args=['build', element_name])
|
... | ... | @@ -75,7 +75,7 @@ def test_workspace_updated_dependency(cli, tmpdir, datafiles): |
75 | 75 |
_yaml.dump(dependency, os.path.join(element_path, dep_name))
|
76 | 76 |
|
77 | 77 |
# First open the workspace
|
78 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
78 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
79 | 79 |
assert res.exit_code == 0
|
80 | 80 |
|
81 | 81 |
# We build the workspaced element, so that we have an artifact
|
... | ... | @@ -130,7 +130,7 @@ def test_workspace_update_dependency_failed(cli, tmpdir, datafiles): |
130 | 130 |
_yaml.dump(dependency, os.path.join(element_path, dep_name))
|
131 | 131 |
|
132 | 132 |
# First open the workspace
|
133 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
133 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
134 | 134 |
assert res.exit_code == 0
|
135 | 135 |
|
136 | 136 |
# We build the workspaced element, so that we have an artifact
|
... | ... | @@ -205,7 +205,7 @@ def test_updated_dependency_nested(cli, tmpdir, datafiles): |
205 | 205 |
_yaml.dump(dependency, os.path.join(element_path, dep_name))
|
206 | 206 |
|
207 | 207 |
# First open the workspace
|
208 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
208 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
209 | 209 |
assert res.exit_code == 0
|
210 | 210 |
|
211 | 211 |
# We build the workspaced element, so that we have an artifact
|
... | ... | @@ -258,7 +258,7 @@ def test_incremental_configure_commands_run_only_once(cli, tmpdir, datafiles): |
258 | 258 |
_yaml.dump(element, os.path.join(element_path, element_name))
|
259 | 259 |
|
260 | 260 |
# We open a workspace on the above element
|
261 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
261 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
262 | 262 |
res.assert_success()
|
263 | 263 |
|
264 | 264 |
# Then we build, and check whether the configure step succeeded
|
... | ... | @@ -108,7 +108,7 @@ def test_filter_forbid_also_rdep(datafiles, cli): |
108 | 108 |
def test_filter_workspace_open(datafiles, cli, tmpdir):
|
109 | 109 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
110 | 110 |
workspace_dir = os.path.join(tmpdir.dirname, tmpdir.basename, "workspace")
|
111 |
- result = cli.run(project=project, args=['workspace', 'open', 'deps-permitted.bst', workspace_dir])
|
|
111 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'deps-permitted.bst'])
|
|
112 | 112 |
result.assert_success()
|
113 | 113 |
assert os.path.exists(os.path.join(workspace_dir, "foo"))
|
114 | 114 |
assert os.path.exists(os.path.join(workspace_dir, "bar"))
|
... | ... | @@ -120,7 +120,7 @@ def test_filter_workspace_build(datafiles, cli, tmpdir): |
120 | 120 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
121 | 121 |
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
|
122 | 122 |
workspace_dir = os.path.join(tempdir, "workspace")
|
123 |
- result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
|
|
123 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
|
|
124 | 124 |
result.assert_success()
|
125 | 125 |
src = os.path.join(workspace_dir, "foo")
|
126 | 126 |
dst = os.path.join(workspace_dir, "quux")
|
... | ... | @@ -138,7 +138,7 @@ def test_filter_workspace_close(datafiles, cli, tmpdir): |
138 | 138 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
139 | 139 |
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
|
140 | 140 |
workspace_dir = os.path.join(tempdir, "workspace")
|
141 |
- result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
|
|
141 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
|
|
142 | 142 |
result.assert_success()
|
143 | 143 |
src = os.path.join(workspace_dir, "foo")
|
144 | 144 |
dst = os.path.join(workspace_dir, "quux")
|
... | ... | @@ -158,7 +158,7 @@ def test_filter_workspace_reset(datafiles, cli, tmpdir): |
158 | 158 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
159 | 159 |
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
|
160 | 160 |
workspace_dir = os.path.join(tempdir, "workspace")
|
161 |
- result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
|
|
161 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
|
|
162 | 162 |
result.assert_success()
|
163 | 163 |
src = os.path.join(workspace_dir, "foo")
|
164 | 164 |
dst = os.path.join(workspace_dir, "quux")
|
1 |
+from hashlib import sha256
|
|
2 |
+import os
|
|
3 |
+import pytest
|
|
4 |
+import random
|
|
5 |
+import tempfile
|
|
6 |
+from tests.testutils import cli
|
|
7 |
+ |
|
8 |
+from buildstream.storage._casbaseddirectory import CasBasedDirectory
|
|
9 |
+from buildstream.storage._filebaseddirectory import FileBasedDirectory
|
|
10 |
+from buildstream._artifactcache import ArtifactCache
|
|
11 |
+from buildstream._artifactcache.cascache import CASCache
|
|
12 |
+from buildstream import utils
|
|
13 |
+ |
|
14 |
+ |
|
15 |
+# These are comparitive tests that check that FileBasedDirectory and
|
|
16 |
+# CasBasedDirectory act identically.
|
|
17 |
+ |
|
18 |
+ |
|
19 |
+class FakeArtifactCache():
|
|
20 |
+ def __init__(self):
|
|
21 |
+ self.cas = None
|
|
22 |
+ |
|
23 |
+ |
|
24 |
+class FakeContext():
|
|
25 |
+ def __init__(self):
|
|
26 |
+ self.artifactdir = ''
|
|
27 |
+ self.artifactcache = FakeArtifactCache()
|
|
28 |
+ |
|
29 |
+ |
|
30 |
+# This is a set of example file system contents. It's a set of trees
|
|
31 |
+# which are either expected to be problematic or were found to be
|
|
32 |
+# problematic during random testing.
|
|
33 |
+ |
|
34 |
+# The test attempts to import each on top of each other to test
|
|
35 |
+# importing works consistently. Each tuple is defined as (<filename>,
|
|
36 |
+# <type>, <content>). Type can be 'F' (file), 'S' (symlink) or 'D'
|
|
37 |
+# (directory) with content being the contents for a file or the
|
|
38 |
+# destination for a symlink.
|
|
39 |
+root_filesets = [
|
|
40 |
+ [('a/b/c/textfile1', 'F', 'This is textfile 1\n')],
|
|
41 |
+ [('a/b/c/textfile1', 'F', 'This is the replacement textfile 1\n')],
|
|
42 |
+ [('a/b/d', 'D', '')],
|
|
43 |
+ [('a/b/c', 'S', '/a/b/d')],
|
|
44 |
+ [('a/b/d', 'S', '/a/b/c')],
|
|
45 |
+ [('a/b/d', 'D', ''), ('a/b/c', 'S', '/a/b/d')],
|
|
46 |
+ [('a/b/c', 'D', ''), ('a/b/d', 'S', '/a/b/c')],
|
|
47 |
+ [('a/b', 'F', 'This is textfile 1\n')],
|
|
48 |
+ [('a/b/c', 'F', 'This is textfile 1\n')],
|
|
49 |
+ [('a/b/c', 'D', '')]
|
|
50 |
+]
|
|
51 |
+ |
|
52 |
+empty_hash_ref = sha256().hexdigest()
|
|
53 |
+RANDOM_SEED = 69105
|
|
54 |
+NUM_RANDOM_TESTS = 10
|
|
55 |
+ |
|
56 |
+ |
|
57 |
+def generate_import_roots(rootno, directory):
|
|
58 |
+ rootname = "root{}".format(rootno)
|
|
59 |
+ rootdir = os.path.join(directory, "content", rootname)
|
|
60 |
+ if os.path.exists(rootdir):
|
|
61 |
+ return
|
|
62 |
+ for (path, typesymbol, content) in root_filesets[rootno - 1]:
|
|
63 |
+ if typesymbol == 'F':
|
|
64 |
+ (dirnames, filename) = os.path.split(path)
|
|
65 |
+ os.makedirs(os.path.join(rootdir, dirnames), exist_ok=True)
|
|
66 |
+ with open(os.path.join(rootdir, dirnames, filename), "wt") as f:
|
|
67 |
+ f.write(content)
|
|
68 |
+ elif typesymbol == 'D':
|
|
69 |
+ os.makedirs(os.path.join(rootdir, path), exist_ok=True)
|
|
70 |
+ elif typesymbol == 'S':
|
|
71 |
+ (dirnames, filename) = os.path.split(path)
|
|
72 |
+ os.makedirs(os.path.join(rootdir, dirnames), exist_ok=True)
|
|
73 |
+ os.symlink(content, os.path.join(rootdir, path))
|
|
74 |
+ |
|
75 |
+ |
|
76 |
+def generate_random_root(rootno, directory):
|
|
77 |
+ # By seeding the random number generator, we ensure these tests
|
|
78 |
+ # will be repeatable, at least until Python changes the random
|
|
79 |
+ # number algorithm.
|
|
80 |
+ random.seed(RANDOM_SEED + rootno)
|
|
81 |
+ rootname = "root{}".format(rootno)
|
|
82 |
+ rootdir = os.path.join(directory, "content", rootname)
|
|
83 |
+ if os.path.exists(rootdir):
|
|
84 |
+ return
|
|
85 |
+ things = []
|
|
86 |
+ locations = ['.']
|
|
87 |
+ os.makedirs(rootdir)
|
|
88 |
+ for i in range(0, 100):
|
|
89 |
+ location = random.choice(locations)
|
|
90 |
+ thingname = "node{}".format(i)
|
|
91 |
+ thing = random.choice(['dir', 'link', 'file'])
|
|
92 |
+ target = os.path.join(rootdir, location, thingname)
|
|
93 |
+ if thing == 'dir':
|
|
94 |
+ os.makedirs(target)
|
|
95 |
+ locations.append(os.path.join(location, thingname))
|
|
96 |
+ elif thing == 'file':
|
|
97 |
+ with open(target, "wt") as f:
|
|
98 |
+ f.write("This is node {}\n".format(i))
|
|
99 |
+ elif thing == 'link':
|
|
100 |
+ symlink_type = random.choice(['absolute', 'relative', 'broken'])
|
|
101 |
+ if symlink_type == 'broken' or not things:
|
|
102 |
+ os.symlink("/broken", target)
|
|
103 |
+ elif symlink_type == 'absolute':
|
|
104 |
+ symlink_destination = random.choice(things)
|
|
105 |
+ os.symlink(symlink_destination, target)
|
|
106 |
+ else:
|
|
107 |
+ symlink_destination = random.choice(things)
|
|
108 |
+ relative_link = os.path.relpath(symlink_destination, start=location)
|
|
109 |
+ os.symlink(relative_link, target)
|
|
110 |
+ things.append(os.path.join(location, thingname))
|
|
111 |
+ |
|
112 |
+ |
|
113 |
+def file_contents(path):
|
|
114 |
+ with open(path, "r") as f:
|
|
115 |
+ result = f.read()
|
|
116 |
+ return result
|
|
117 |
+ |
|
118 |
+ |
|
119 |
+def file_contents_are(path, contents):
|
|
120 |
+ return file_contents(path) == contents
|
|
121 |
+ |
|
122 |
+ |
|
123 |
+def create_new_casdir(root_number, fake_context, tmpdir):
|
|
124 |
+ d = CasBasedDirectory(fake_context)
|
|
125 |
+ d.import_files(os.path.join(tmpdir, "content", "root{}".format(root_number)))
|
|
126 |
+ assert d.ref.hash != empty_hash_ref
|
|
127 |
+ return d
|
|
128 |
+ |
|
129 |
+ |
|
130 |
+def create_new_filedir(root_number, tmpdir):
|
|
131 |
+ root = os.path.join(tmpdir, "vdir")
|
|
132 |
+ os.makedirs(root)
|
|
133 |
+ d = FileBasedDirectory(root)
|
|
134 |
+ d.import_files(os.path.join(tmpdir, "content", "root{}".format(root_number)))
|
|
135 |
+ return d
|
|
136 |
+ |
|
137 |
+ |
|
138 |
+def combinations(integer_range):
|
|
139 |
+ for x in integer_range:
|
|
140 |
+ for y in integer_range:
|
|
141 |
+ yield (x, y)
|
|
142 |
+ |
|
143 |
+ |
|
144 |
+def resolve_symlinks(path, root):
|
|
145 |
+ """ A function to resolve symlinks inside 'path' components apart from the last one.
|
|
146 |
+ For example, resolve_symlinks('/a/b/c/d', '/a/b')
|
|
147 |
+ will return '/a/b/f/d' if /a/b/c is a symlink to /a/b/f. The final component of
|
|
148 |
+ 'path' is not resolved, because we typically want to inspect the symlink found
|
|
149 |
+ at that path, not its target.
|
|
150 |
+ |
|
151 |
+ """
|
|
152 |
+ components = path.split(os.path.sep)
|
|
153 |
+ location = root
|
|
154 |
+ for i in range(0, len(components) - 1):
|
|
155 |
+ location = os.path.join(location, components[i])
|
|
156 |
+ if os.path.islink(location):
|
|
157 |
+ # Resolve the link, add on all the remaining components
|
|
158 |
+ target = os.path.join(os.readlink(location))
|
|
159 |
+ tail = os.path.sep.join(components[i + 1:])
|
|
160 |
+ |
|
161 |
+ if target.startswith(os.path.sep):
|
|
162 |
+ # Absolute link - relative to root
|
|
163 |
+ location = os.path.join(root, target, tail)
|
|
164 |
+ else:
|
|
165 |
+ # Relative link - relative to symlink location
|
|
166 |
+ location = os.path.join(location, target)
|
|
167 |
+ return resolve_symlinks(location, root)
|
|
168 |
+ # If we got here, no symlinks were found. Add on the final component and return.
|
|
169 |
+ location = os.path.join(location, components[-1])
|
|
170 |
+ return location
|
|
171 |
+ |
|
172 |
+ |
|
173 |
+def directory_not_empty(path):
|
|
174 |
+ return os.listdir(path)
|
|
175 |
+ |
|
176 |
+ |
|
177 |
+def _import_test(tmpdir, original, overlay, generator_function, verify_contents=False):
|
|
178 |
+ fake_context = FakeContext()
|
|
179 |
+ fake_context.artifactcache.cas = CASCache(tmpdir)
|
|
180 |
+ # Create some fake content
|
|
181 |
+ generator_function(original, tmpdir)
|
|
182 |
+ if original != overlay:
|
|
183 |
+ generator_function(overlay, tmpdir)
|
|
184 |
+ |
|
185 |
+ d = create_new_casdir(original, fake_context, tmpdir)
|
|
186 |
+ |
|
187 |
+ duplicate_cas = create_new_casdir(original, fake_context, tmpdir)
|
|
188 |
+ |
|
189 |
+ assert duplicate_cas.ref.hash == d.ref.hash
|
|
190 |
+ |
|
191 |
+ d2 = create_new_casdir(overlay, fake_context, tmpdir)
|
|
192 |
+ d.import_files(d2)
|
|
193 |
+ export_dir = os.path.join(tmpdir, "output-{}-{}".format(original, overlay))
|
|
194 |
+ roundtrip_dir = os.path.join(tmpdir, "roundtrip-{}-{}".format(original, overlay))
|
|
195 |
+ d2.export_files(roundtrip_dir)
|
|
196 |
+ d.export_files(export_dir)
|
|
197 |
+ |
|
198 |
+ if verify_contents:
|
|
199 |
+ for item in root_filesets[overlay - 1]:
|
|
200 |
+ (path, typename, content) = item
|
|
201 |
+ realpath = resolve_symlinks(path, export_dir)
|
|
202 |
+ if typename == 'F':
|
|
203 |
+ if os.path.isdir(realpath) and directory_not_empty(realpath):
|
|
204 |
+ # The file should not have overwritten the directory in this case.
|
|
205 |
+ pass
|
|
206 |
+ else:
|
|
207 |
+ assert os.path.isfile(realpath), "{} did not exist in the combined virtual directory".format(path)
|
|
208 |
+ assert file_contents_are(realpath, content)
|
|
209 |
+ elif typename == 'S':
|
|
210 |
+ if os.path.isdir(realpath) and directory_not_empty(realpath):
|
|
211 |
+ # The symlink should not have overwritten the directory in this case.
|
|
212 |
+ pass
|
|
213 |
+ else:
|
|
214 |
+ assert os.path.islink(realpath)
|
|
215 |
+ assert os.readlink(realpath) == content
|
|
216 |
+ elif typename == 'D':
|
|
217 |
+ # We can't do any more tests than this because it
|
|
218 |
+ # depends on things present in the original. Blank
|
|
219 |
+ # directories here will be ignored and the original
|
|
220 |
+ # left in place.
|
|
221 |
+ assert os.path.lexists(realpath)
|
|
222 |
+ |
|
223 |
+ # Now do the same thing with filebaseddirectories and check the contents match
|
|
224 |
+ |
|
225 |
+ files = list(utils.list_relative_paths(roundtrip_dir))
|
|
226 |
+ duplicate_cas._import_files_from_directory(roundtrip_dir, files=files)
|
|
227 |
+ duplicate_cas._recalculate_recursing_down()
|
|
228 |
+ if duplicate_cas.parent:
|
|
229 |
+ duplicate_cas.parent._recalculate_recursing_up(duplicate_cas)
|
|
230 |
+ |
|
231 |
+ assert duplicate_cas.ref.hash == d.ref.hash
|
|
232 |
+ |
|
233 |
+ |
|
234 |
+# It's possible to parameterize on both original and overlay values,
|
|
235 |
+# but this leads to more tests being listed in the output than are
|
|
236 |
+# comfortable.
|
|
237 |
+@pytest.mark.parametrize("original", range(1, len(root_filesets) + 1))
|
|
238 |
+def test_fixed_cas_import(cli, tmpdir, original):
|
|
239 |
+ for overlay in range(1, len(root_filesets) + 1):
|
|
240 |
+ _import_test(str(tmpdir), original, overlay, generate_import_roots, verify_contents=True)
|
|
241 |
+ |
|
242 |
+ |
|
243 |
+@pytest.mark.parametrize("original", range(1, NUM_RANDOM_TESTS + 1))
|
|
244 |
+def test_random_cas_import(cli, tmpdir, original):
|
|
245 |
+ for overlay in range(1, NUM_RANDOM_TESTS + 1):
|
|
246 |
+ _import_test(str(tmpdir), original, overlay, generate_random_root, verify_contents=False)
|
|
247 |
+ |
|
248 |
+ |
|
249 |
+def _listing_test(tmpdir, root, generator_function):
|
|
250 |
+ fake_context = FakeContext()
|
|
251 |
+ fake_context.artifactcache.cas = CASCache(tmpdir)
|
|
252 |
+ # Create some fake content
|
|
253 |
+ generator_function(root, tmpdir)
|
|
254 |
+ |
|
255 |
+ d = create_new_filedir(root, tmpdir)
|
|
256 |
+ filelist = list(d.list_relative_paths())
|
|
257 |
+ |
|
258 |
+ d2 = create_new_casdir(root, fake_context, tmpdir)
|
|
259 |
+ filelist2 = list(d2.list_relative_paths())
|
|
260 |
+ |
|
261 |
+ assert filelist == filelist2
|
|
262 |
+ |
|
263 |
+ |
|
264 |
+@pytest.mark.parametrize("root", range(1, 11))
|
|
265 |
+def test_random_directory_listing(cli, tmpdir, root):
|
|
266 |
+ _listing_test(str(tmpdir), root, generate_random_root)
|
|
267 |
+ |
|
268 |
+ |
|
269 |
+@pytest.mark.parametrize("root", [1, 2, 3, 4, 5])
|
|
270 |
+def test_fixed_directory_listing(cli, tmpdir, root):
|
|
271 |
+ _listing_test(str(tmpdir), root, generate_import_roots)
|