[Notes] [Git][BuildStream/buildstream][valentindavid/rmtree_oserror] 28 commits: _platform/linux.py: Accept all configs for dummy sandbox



Title: GitLab

Valentin David pushed to branch valentindavid/rmtree_oserror at BuildStream / buildstream

Commits:

15 changed files:

Changes:

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -506,7 +506,7 @@ class CASCache(ArtifactCache):
    506 506
         def set_ref(self, ref, tree):
    
    507 507
             refpath = self._refpath(ref)
    
    508 508
             os.makedirs(os.path.dirname(refpath), exist_ok=True)
    
    509
    -        with utils.save_file_atomic(refpath, 'wb') as f:
    
    509
    +        with utils.save_file_atomic(refpath, 'wb', tempdir=self.tmpdir) as f:
    
    510 510
                 f.write(tree.SerializeToString())
    
    511 511
     
    
    512 512
         # resolve_ref():
    

  • buildstream/_platform/darwin.py
    ... ... @@ -34,6 +34,9 @@ class Darwin(Platform):
    34 34
             super().__init__()
    
    35 35
     
    
    36 36
         def create_sandbox(self, *args, **kwargs):
    
    37
    +        kwargs['dummy_reason'] = \
    
    38
    +            "OSXFUSE is not supported and there are no supported sandbox" + \
    
    39
    +            "technologies for OSX at this time"
    
    37 40
             return SandboxDummy(*args, **kwargs)
    
    38 41
     
    
    39 42
         def check_sandbox_config(self, config):
    

  • buildstream/_platform/linux.py
    ... ... @@ -37,24 +37,30 @@ class Linux(Platform):
    37 37
             self._uid = os.geteuid()
    
    38 38
             self._gid = os.getegid()
    
    39 39
     
    
    40
    +        self._have_fuse = os.path.exists("/dev/fuse")
    
    41
    +        self._bwrap_exists = _site.check_bwrap_version(0, 0, 0)
    
    42
    +        self._have_good_bwrap = _site.check_bwrap_version(0, 1, 2)
    
    43
    +
    
    44
    +        self._local_sandbox_available = self._have_fuse and self._have_good_bwrap
    
    45
    +
    
    40 46
             self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
    
    41 47
     
    
    42
    -        if self._local_sandbox_available():
    
    48
    +        if self._local_sandbox_available:
    
    43 49
                 self._user_ns_available = self._check_user_ns_available()
    
    44 50
             else:
    
    45 51
                 self._user_ns_available = False
    
    46 52
     
    
    47 53
         def create_sandbox(self, *args, **kwargs):
    
    48
    -        if not self._local_sandbox_available():
    
    49
    -            return SandboxDummy(*args, **kwargs)
    
    54
    +        if not self._local_sandbox_available:
    
    55
    +            return self._create_dummy_sandbox(*args, **kwargs)
    
    50 56
             else:
    
    51
    -            from ..sandbox._sandboxbwrap import SandboxBwrap
    
    52
    -            # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
    
    53
    -            kwargs['user_ns_available'] = self._user_ns_available
    
    54
    -            kwargs['die_with_parent_available'] = self._die_with_parent_available
    
    55
    -            return SandboxBwrap(*args, **kwargs)
    
    57
    +            return self._create_bwrap_sandbox(*args, **kwargs)
    
    56 58
     
    
    57 59
         def check_sandbox_config(self, config):
    
    60
    +        if not self._local_sandbox_available:
    
    61
    +            # Accept all sandbox configs as it's irrelevant with the dummy sandbox (no Sandbox.run).
    
    62
    +            return True
    
    63
    +
    
    58 64
             if self._user_ns_available:
    
    59 65
                 # User namespace support allows arbitrary build UID/GID settings.
    
    60 66
                 return True
    
    ... ... @@ -66,11 +72,26 @@ class Linux(Platform):
    66 72
         ################################################
    
    67 73
         #              Private Methods                 #
    
    68 74
         ################################################
    
    69
    -    def _local_sandbox_available(self):
    
    70
    -        try:
    
    71
    -            return os.path.exists(utils.get_host_tool('bwrap')) and os.path.exists('/dev/fuse')
    
    72
    -        except utils.ProgramNotFoundError:
    
    73
    -            return False
    
    75
    +
    
    76
    +    def _create_dummy_sandbox(self, *args, **kwargs):
    
    77
    +        reasons = []
    
    78
    +        if not self._have_fuse:
    
    79
    +            reasons.append("FUSE is unavailable")
    
    80
    +        if not self._have_good_bwrap:
    
    81
    +            if self._bwrap_exists:
    
    82
    +                reasons.append("`bwrap` is too old (bst needs at least 0.1.2)")
    
    83
    +            else:
    
    84
    +                reasons.append("`bwrap` executable not found")
    
    85
    +
    
    86
    +        kwargs['dummy_reason'] = " and ".join(reasons)
    
    87
    +        return SandboxDummy(*args, **kwargs)
    
    88
    +
    
    89
    +    def _create_bwrap_sandbox(self, *args, **kwargs):
    
    90
    +        from ..sandbox._sandboxbwrap import SandboxBwrap
    
    91
    +        # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
    
    92
    +        kwargs['user_ns_available'] = self._user_ns_available
    
    93
    +        kwargs['die_with_parent_available'] = self._die_with_parent_available
    
    94
    +        return SandboxBwrap(*args, **kwargs)
    
    74 95
     
    
    75 96
         def _check_user_ns_available(self):
    
    76 97
             # Here, lets check if bwrap is able to create user namespaces,
    

  • buildstream/_scheduler/jobs/job.py
    ... ... @@ -119,6 +119,8 @@ class Job():
    119 119
             self._result = None                    # Return value of child action in the parent
    
    120 120
             self._tries = 0                        # Try count, for retryable jobs
    
    121 121
             self._skipped_flag = False             # Indicate whether the job was skipped.
    
    122
    +        self._terminated = False               # Whether this job has been explicitly terminated
    
    123
    +
    
    122 124
             # If False, a retry will not be attempted regardless of whether _tries is less than _max_retries.
    
    123 125
             #
    
    124 126
             self._retry_flag = True
    
    ... ... @@ -190,6 +192,8 @@ class Job():
    190 192
             # Terminate the process using multiprocessing API pathway
    
    191 193
             self._process.terminate()
    
    192 194
     
    
    195
    +        self._terminated = True
    
    196
    +
    
    193 197
         # terminate_wait()
    
    194 198
         #
    
    195 199
         # Wait for terminated jobs to complete
    
    ... ... @@ -273,18 +277,22 @@ class Job():
    273 277
         # running the integration commands).
    
    274 278
         #
    
    275 279
         # Args:
    
    276
    -    #     (int): The plugin identifier for this task
    
    280
    +    #     task_id (int): The plugin identifier for this task
    
    277 281
         #
    
    278 282
         def set_task_id(self, task_id):
    
    279 283
             self._task_id = task_id
    
    280 284
     
    
    281 285
         # skipped
    
    282 286
         #
    
    287
    +    # This will evaluate to True if the job was skipped
    
    288
    +    # during processing, or if it was forcefully terminated.
    
    289
    +    #
    
    283 290
         # Returns:
    
    284
    -    #    bool: True if the job was skipped while processing.
    
    291
    +    #    (bool): Whether the job should appear as skipped
    
    292
    +    #
    
    285 293
         @property
    
    286 294
         def skipped(self):
    
    287
    -        return self._skipped_flag
    
    295
    +        return self._skipped_flag or self._terminated
    
    288 296
     
    
    289 297
         #######################################################
    
    290 298
         #                  Abstract Methods                   #
    

  • buildstream/_scheduler/queues/queue.py
    ... ... @@ -326,16 +326,20 @@ class Queue():
    326 326
                               detail=traceback.format_exc())
    
    327 327
                 self.failed_elements.append(element)
    
    328 328
             else:
    
    329
    -
    
    330
    -            # No exception occured, handle the success/failure state in the normal way
    
    331 329
                 #
    
    330
    +            # No exception occured in post processing
    
    331
    +            #
    
    332
    +
    
    333
    +            # All jobs get placed on the done queue for later processing.
    
    332 334
                 self._done_queue.append(job)
    
    333 335
     
    
    334
    -            if success:
    
    335
    -                if not job.skipped:
    
    336
    -                    self.processed_elements.append(element)
    
    337
    -                else:
    
    338
    -                    self.skipped_elements.append(element)
    
    336
    +            # A Job can be skipped whether or not it has failed,
    
    337
    +            # we want to only bookkeep them as processed or failed
    
    338
    +            # if they are not skipped.
    
    339
    +            if job.skipped:
    
    340
    +                self.skipped_elements.append(element)
    
    341
    +            elif success:
    
    342
    +                self.processed_elements.append(element)
    
    339 343
                 else:
    
    340 344
                     self.failed_elements.append(element)
    
    341 345
     
    

  • buildstream/_scheduler/scheduler.py
    ... ... @@ -387,6 +387,15 @@ class Scheduler():
    387 387
         # A loop registered event callback for keyboard interrupts
    
    388 388
         #
    
    389 389
         def _interrupt_event(self):
    
    390
    +
    
    391
    +        # FIXME: This should not be needed, but for some reason we receive an
    
    392
    +        #        additional SIGINT event when the user hits ^C a second time
    
    393
    +        #        to inform us that they really intend to terminate; even though
    
    394
    +        #        we have disconnected our handlers at this time.
    
    395
    +        #
    
    396
    +        if self.terminated:
    
    397
    +            return
    
    398
    +
    
    390 399
             # Leave this to the frontend to decide, if no
    
    391 400
             # interrrupt callback was specified, then just terminate.
    
    392 401
             if self._interrupt_callback:
    

  • buildstream/_site.py
    ... ... @@ -78,18 +78,12 @@ def check_bwrap_version(major, minor, patch):
    78 78
             if not bwrap_path:
    
    79 79
                 return False
    
    80 80
             cmd = [bwrap_path, "--version"]
    
    81
    -        version = str(subprocess.check_output(cmd).split()[1], "utf-8")
    
    81
    +        try:
    
    82
    +            version = str(subprocess.check_output(cmd).split()[1], "utf-8")
    
    83
    +        except subprocess.CalledProcessError:
    
    84
    +            # Failure trying to run bubblewrap
    
    85
    +            return False
    
    82 86
             _bwrap_major, _bwrap_minor, _bwrap_patch = map(int, version.split("."))
    
    83 87
     
    
    84 88
         # Check whether the installed version meets the requirements
    
    85
    -    if _bwrap_major > major:
    
    86
    -        return True
    
    87
    -    elif _bwrap_major < major:
    
    88
    -        return False
    
    89
    -    else:
    
    90
    -        if _bwrap_minor > minor:
    
    91
    -            return True
    
    92
    -        elif _bwrap_minor < minor:
    
    93
    -            return False
    
    94
    -        else:
    
    95
    -            return _bwrap_patch >= patch
    89
    +    return (_bwrap_major, _bwrap_minor, _bwrap_patch) >= (major, minor, patch)

  • buildstream/element.py
    ... ... @@ -212,7 +212,7 @@ class Element(Plugin):
    212 212
             self.__staged_sources_directory = None  # Location where Element.stage_sources() was called
    
    213 213
             self.__tainted = None                   # Whether the artifact is tainted and should not be shared
    
    214 214
             self.__required = False                 # Whether the artifact is required in the current session
    
    215
    -        self.__build_result = None              # The result of assembling this Element
    
    215
    +        self.__build_result = None              # The result of assembling this Element (success, description, detail)
    
    216 216
             self._build_log_path = None            # The path of the build log for this Element
    
    217 217
     
    
    218 218
             # hash tables of loaded artifact metadata, hashed by key
    
    ... ... @@ -1379,10 +1379,10 @@ class Element(Plugin):
    1379 1379
                 if not vdirectory.is_empty():
    
    1380 1380
                     raise ElementError("Staging directory '{}' is not empty".format(vdirectory))
    
    1381 1381
     
    
    1382
    -            # While mkdtemp is advertised as using the TMP environment variable, it
    
    1383
    -            # doesn't, so this explicit extraction is necesasry.
    
    1384
    -            tmp_prefix = os.environ.get("TMP", None)
    
    1385
    -            temp_staging_directory = tempfile.mkdtemp(prefix=tmp_prefix)
    
    1382
    +            # It's advantageous to have this temporary directory on
    
    1383
    +            # the same filing system as the rest of our cache.
    
    1384
    +            temp_staging_location = os.path.join(self._get_context().artifactdir, "staging_temp")
    
    1385
    +            temp_staging_directory = tempfile.mkdtemp(prefix=temp_staging_location)
    
    1386 1386
     
    
    1387 1387
                 try:
    
    1388 1388
                     workspace = self._get_workspace()
    
    ... ... @@ -1479,11 +1479,13 @@ class Element(Plugin):
    1479 1479
     
    
    1480 1480
             self._update_state()
    
    1481 1481
     
    
    1482
    -        if self._get_workspace() and self._cached():
    
    1482
    +        if self._get_workspace() and self._cached_success():
    
    1483
    +            assert utils._is_main_process(), \
    
    1484
    +                "Attempted to save workspace configuration from child process"
    
    1483 1485
                 #
    
    1484 1486
                 # Note that this block can only happen in the
    
    1485
    -            # main process, since `self._cached()` cannot
    
    1486
    -            # be true when assembly is completed in the task.
    
    1487
    +            # main process, since `self._cached_success()` cannot
    
    1488
    +            # be true when assembly is successful in the task.
    
    1487 1489
                 #
    
    1488 1490
                 # For this reason, it is safe to update and
    
    1489 1491
                 # save the workspaces configuration
    

  • buildstream/plugins/sources/git.py
    ... ... @@ -184,10 +184,18 @@ class GitMirror(SourceFetcher):
    184 184
                              cwd=self.mirror)
    
    185 185
     
    
    186 186
         def fetch(self, alias_override=None):
    
    187
    -        self.ensure(alias_override)
    
    188
    -        if not self.has_ref():
    
    189
    -            self._fetch(alias_override)
    
    190
    -        self.assert_ref()
    
    187
    +        # Resolve the URL for the message
    
    188
    +        resolved_url = self.source.translate_url(self.url,
    
    189
    +                                                 alias_override=alias_override,
    
    190
    +                                                 primary=self.primary)
    
    191
    +
    
    192
    +        with self.source.timed_activity("Fetching from {}"
    
    193
    +                                        .format(resolved_url),
    
    194
    +                                        silent_nested=True):
    
    195
    +            self.ensure(alias_override)
    
    196
    +            if not self.has_ref():
    
    197
    +                self._fetch(alias_override)
    
    198
    +            self.assert_ref()
    
    191 199
     
    
    192 200
         def has_ref(self):
    
    193 201
             if not self.ref:
    

  • buildstream/sandbox/_sandboxdummy.py
    ... ... @@ -23,6 +23,7 @@ from . import Sandbox
    23 23
     class SandboxDummy(Sandbox):
    
    24 24
         def __init__(self, *args, **kwargs):
    
    25 25
             super().__init__(*args, **kwargs)
    
    26
    +        self._reason = kwargs.get("dummy_reason", "no reason given")
    
    26 27
     
    
    27 28
         def run(self, command, flags, *, cwd=None, env=None):
    
    28 29
     
    
    ... ... @@ -37,4 +38,4 @@ class SandboxDummy(Sandbox):
    37 38
                                    "'{}'".format(command[0]),
    
    38 39
                                    reason='missing-command')
    
    39 40
     
    
    40
    -        raise SandboxError("This platform does not support local builds")
    41
    +        raise SandboxError("This platform does not support local builds: {}".format(self._reason))

  • buildstream/source.py
    ... ... @@ -965,28 +965,48 @@ class Source(Plugin):
    965 965
         # Tries to call fetch for every mirror, stopping once it succeeds
    
    966 966
         def __do_fetch(self, **kwargs):
    
    967 967
             project = self._get_project()
    
    968
    -        source_fetchers = self.get_source_fetchers()
    
    968
    +        context = self._get_context()
    
    969
    +
    
    970
    +        # Silence the STATUS messages which might happen as a result
    
    971
    +        # of checking the source fetchers.
    
    972
    +        with context.silence():
    
    973
    +            source_fetchers = self.get_source_fetchers()
    
    969 974
     
    
    970 975
             # Use the source fetchers if they are provided
    
    971 976
             #
    
    972 977
             if source_fetchers:
    
    973
    -            for fetcher in source_fetchers:
    
    974
    -                alias = fetcher._get_alias()
    
    975
    -                for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
    
    976
    -                    try:
    
    977
    -                        fetcher.fetch(uri)
    
    978
    -                    # FIXME: Need to consider temporary vs. permanent failures,
    
    979
    -                    #        and how this works with retries.
    
    980
    -                    except BstError as e:
    
    981
    -                        last_error = e
    
    982
    -                        continue
    
    983
    -
    
    984
    -                    # No error, we're done with this fetcher
    
    985
    -                    break
    
    986 978
     
    
    987
    -                else:
    
    988
    -                    # No break occurred, raise the last detected error
    
    989
    -                    raise last_error
    
    979
    +            # Use a contorted loop here, this is to allow us to
    
    980
    +            # silence the messages which can result from consuming
    
    981
    +            # the items of source_fetchers, if it happens to be a generator.
    
    982
    +            #
    
    983
    +            source_fetchers = iter(source_fetchers)
    
    984
    +            try:
    
    985
    +
    
    986
    +                while True:
    
    987
    +
    
    988
    +                    with context.silence():
    
    989
    +                        fetcher = next(source_fetchers)
    
    990
    +
    
    991
    +                    alias = fetcher._get_alias()
    
    992
    +                    for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
    
    993
    +                        try:
    
    994
    +                            fetcher.fetch(uri)
    
    995
    +                        # FIXME: Need to consider temporary vs. permanent failures,
    
    996
    +                        #        and how this works with retries.
    
    997
    +                        except BstError as e:
    
    998
    +                            last_error = e
    
    999
    +                            continue
    
    1000
    +
    
    1001
    +                        # No error, we're done with this fetcher
    
    1002
    +                        break
    
    1003
    +
    
    1004
    +                    else:
    
    1005
    +                        # No break occurred, raise the last detected error
    
    1006
    +                        raise last_error
    
    1007
    +
    
    1008
    +            except StopIteration:
    
    1009
    +                pass
    
    990 1010
     
    
    991 1011
             # Default codepath is to reinstantiate the Source
    
    992 1012
             #
    

  • buildstream/utils.py
    ... ... @@ -502,7 +502,7 @@ def get_bst_version():
    502 502
     
    
    503 503
     @contextmanager
    
    504 504
     def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
    
    505
    -                     errors=None, newline=None, closefd=True, opener=None):
    
    505
    +                     errors=None, newline=None, closefd=True, opener=None, tempdir=None):
    
    506 506
         """Save a file with a temporary name and rename it into place when ready.
    
    507 507
     
    
    508 508
         This is a context manager which is meant for saving data to files.
    
    ... ... @@ -529,8 +529,9 @@ def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
    529 529
         # https://bugs.python.org/issue8604
    
    530 530
     
    
    531 531
         assert os.path.isabs(filename), "The utils.save_file_atomic() parameter ``filename`` must be an absolute path"
    
    532
    -    dirname = os.path.dirname(filename)
    
    533
    -    fd, tempname = tempfile.mkstemp(dir=dirname)
    
    532
    +    if tempdir is None:
    
    533
    +        tempdir = os.path.dirname(filename)
    
    534
    +    fd, tempname = tempfile.mkstemp(dir=tempdir)
    
    534 535
         os.close(fd)
    
    535 536
     
    
    536 537
         f = open(tempname, mode=mode, buffering=buffering, encoding=encoding,
    
    ... ... @@ -562,6 +563,9 @@ def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
    562 563
     #
    
    563 564
     # Get the disk usage of a given directory in bytes.
    
    564 565
     #
    
    566
    +# This function assumes that files do not inadvertantly
    
    567
    +# disappear while this function is running.
    
    568
    +#
    
    565 569
     # Arguments:
    
    566 570
     #     (str) The path whose size to check.
    
    567 571
     #
    
    ... ... @@ -682,7 +686,7 @@ def _force_rmtree(rootpath, **kwargs):
    682 686
     
    
    683 687
         try:
    
    684 688
             shutil.rmtree(rootpath, **kwargs)
    
    685
    -    except shutil.Error as e:
    
    689
    +    except OSError as e:
    
    686 690
             raise UtilError("Failed to remove cache directory '{}': {}"
    
    687 691
                             .format(rootpath, e))
    
    688 692
     
    

  • setup.py
    ... ... @@ -54,12 +54,13 @@ REQUIRED_BWRAP_MINOR = 1
    54 54
     REQUIRED_BWRAP_PATCH = 2
    
    55 55
     
    
    56 56
     
    
    57
    -def exit_bwrap(reason):
    
    57
    +def warn_bwrap(reason):
    
    58 58
         print(reason +
    
    59
    -          "\nBuildStream requires Bubblewrap (bwrap) for"
    
    60
    -          " sandboxing the build environment. Install it using your package manager"
    
    61
    -          " (usually bwrap or bubblewrap)")
    
    62
    -    sys.exit(1)
    
    59
    +          "\nBuildStream requires Bubblewrap (bwrap {}.{}.{} or better),"
    
    60
    +          " during local builds, for"
    
    61
    +          " sandboxing the build environment.\nInstall it using your package manager"
    
    62
    +          " (usually bwrap or bubblewrap) otherwise you will be limited to"
    
    63
    +          " remote builds only.".format(REQUIRED_BWRAP_MAJOR, REQUIRED_BWRAP_MINOR, REQUIRED_BWRAP_PATCH))
    
    63 64
     
    
    64 65
     
    
    65 66
     def bwrap_too_old(major, minor, patch):
    
    ... ... @@ -76,18 +77,19 @@ def bwrap_too_old(major, minor, patch):
    76 77
             return False
    
    77 78
     
    
    78 79
     
    
    79
    -def assert_bwrap():
    
    80
    +def check_for_bwrap():
    
    80 81
         platform = os.environ.get('BST_FORCE_BACKEND', '') or sys.platform
    
    81 82
         if platform.startswith('linux'):
    
    82 83
             bwrap_path = shutil.which('bwrap')
    
    83 84
             if not bwrap_path:
    
    84
    -            exit_bwrap("Bubblewrap not found")
    
    85
    +            warn_bwrap("Bubblewrap not found")
    
    86
    +            return
    
    85 87
     
    
    86 88
             version_bytes = subprocess.check_output([bwrap_path, "--version"]).split()[1]
    
    87 89
             version_string = str(version_bytes, "utf-8")
    
    88 90
             major, minor, patch = map(int, version_string.split("."))
    
    89 91
             if bwrap_too_old(major, minor, patch):
    
    90
    -            exit_bwrap("Bubblewrap too old")
    
    92
    +            warn_bwrap("Bubblewrap too old")
    
    91 93
     
    
    92 94
     
    
    93 95
     ###########################################
    
    ... ... @@ -126,7 +128,7 @@ bst_install_entry_points = {
    126 128
     }
    
    127 129
     
    
    128 130
     if not os.environ.get('BST_ARTIFACTS_ONLY', ''):
    
    129
    -    assert_bwrap()
    
    131
    +    check_for_bwrap()
    
    130 132
         bst_install_entry_points['console_scripts'] += [
    
    131 133
             'bst = buildstream._frontend:cli'
    
    132 134
         ]
    

  • tests/frontend/mirror.py
    ... ... @@ -139,6 +139,82 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind):
    139 139
         result.assert_success()
    
    140 140
     
    
    141 141
     
    
    142
    +@pytest.mark.datafiles(DATA_DIR)
    
    143
    +@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
    
    144
    +@pytest.mark.parametrize("mirror", [("no-mirror"), ("mirror"), ("unrelated-mirror")])
    
    145
    +def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
    
    146
    +    bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
    
    147
    +    dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
    
    148
    +    upstream_repodir = os.path.join(str(tmpdir), 'upstream')
    
    149
    +    mirror_repodir = os.path.join(str(tmpdir), 'mirror')
    
    150
    +    project_dir = os.path.join(str(tmpdir), 'project')
    
    151
    +    os.makedirs(project_dir)
    
    152
    +    element_dir = os.path.join(project_dir, 'elements')
    
    153
    +
    
    154
    +    # Create repo objects of the upstream and mirror
    
    155
    +    upstream_repo = create_repo('tar', upstream_repodir)
    
    156
    +    upstream_ref = upstream_repo.create(bin_files_path)
    
    157
    +    mirror_repo = upstream_repo.copy(mirror_repodir)
    
    158
    +    mirror_ref = upstream_ref
    
    159
    +    upstream_ref = upstream_repo.create(dev_files_path)
    
    160
    +
    
    161
    +    element = {
    
    162
    +        'kind': 'import',
    
    163
    +        'sources': [
    
    164
    +            upstream_repo.source_config(ref=upstream_ref if ref_storage == 'inline' else None)
    
    165
    +        ]
    
    166
    +    }
    
    167
    +    element_name = 'test.bst'
    
    168
    +    element_path = os.path.join(element_dir, element_name)
    
    169
    +    full_repo = element['sources'][0]['url']
    
    170
    +    upstream_map, repo_name = os.path.split(full_repo)
    
    171
    +    alias = 'foo'
    
    172
    +    aliased_repo = alias + ':' + repo_name
    
    173
    +    element['sources'][0]['url'] = aliased_repo
    
    174
    +    full_mirror = mirror_repo.source_config()['url']
    
    175
    +    mirror_map, _ = os.path.split(full_mirror)
    
    176
    +    os.makedirs(element_dir)
    
    177
    +    _yaml.dump(element, element_path)
    
    178
    +
    
    179
    +    if ref_storage == 'project.refs':
    
    180
    +        # Manually set project.refs to avoid caching the repo prematurely
    
    181
    +        project_refs = {'projects': {
    
    182
    +            'test': {
    
    183
    +                element_name: [
    
    184
    +                    {'ref': upstream_ref}
    
    185
    +                ]
    
    186
    +            }
    
    187
    +        }}
    
    188
    +        project_refs_path = os.path.join(project_dir, 'project.refs')
    
    189
    +        _yaml.dump(project_refs, project_refs_path)
    
    190
    +
    
    191
    +    project = {
    
    192
    +        'name': 'test',
    
    193
    +        'element-path': 'elements',
    
    194
    +        'aliases': {
    
    195
    +            alias: upstream_map + "/"
    
    196
    +        },
    
    197
    +        'ref-storage': ref_storage
    
    198
    +    }
    
    199
    +    if mirror != 'no-mirror':
    
    200
    +        mirror_data = [{
    
    201
    +            'name': 'middle-earth',
    
    202
    +            'aliases': {alias: [mirror_map + '/']}
    
    203
    +        }]
    
    204
    +        if mirror == 'unrelated-mirror':
    
    205
    +            mirror_data.insert(0, {
    
    206
    +                'name': 'narnia',
    
    207
    +                'aliases': {'frob': ['http://www.example.com/repo']}
    
    208
    +            })
    
    209
    +        project['mirrors'] = mirror_data
    
    210
    +
    
    211
    +    project_file = os.path.join(project_dir, 'project.conf')
    
    212
    +    _yaml.dump(project, project_file)
    
    213
    +
    
    214
    +    result = cli.run(project=project_dir, args=['fetch', element_name])
    
    215
    +    result.assert_success()
    
    216
    +
    
    217
    +
    
    142 218
     @pytest.mark.datafiles(DATA_DIR)
    
    143 219
     @pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
    
    144 220
     def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
    

  • tests/frontend/workspace.py
    ... ... @@ -43,7 +43,8 @@ DATA_DIR = os.path.join(
    43 43
     )
    
    44 44
     
    
    45 45
     
    
    46
    -def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None, project_path=None):
    
    46
    +def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
    
    47
    +                   project_path=None, element_attrs=None):
    
    47 48
         if not workspace_dir:
    
    48 49
             workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
    
    49 50
         if not project_path:
    
    ... ... @@ -69,6 +70,8 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir
    69 70
                 repo.source_config(ref=ref)
    
    70 71
             ]
    
    71 72
         }
    
    73
    +    if element_attrs:
    
    74
    +        element = {**element, **element_attrs}
    
    72 75
         _yaml.dump(element,
    
    73 76
                    os.path.join(element_path,
    
    74 77
                                 element_name))
    
    ... ... @@ -854,3 +857,22 @@ def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
    854 857
     
    
    855 858
         # Check that the original /usr/bin/hello is not in the checkout
    
    856 859
         assert not os.path.exists(os.path.join(checkout, 'usr', 'bin', 'hello'))
    
    860
    +
    
    861
    +
    
    862
    +@pytest.mark.datafiles(DATA_DIR)
    
    863
    +def test_multiple_failed_builds(cli, tmpdir, datafiles):
    
    864
    +    element_config = {
    
    865
    +        "kind": "manual",
    
    866
    +        "config": {
    
    867
    +            "configure-commands": [
    
    868
    +                "unknown_command_that_will_fail"
    
    869
    +            ]
    
    870
    +        }
    
    871
    +    }
    
    872
    +    element_name, project, _ = open_workspace(cli, tmpdir, datafiles,
    
    873
    +                                              "git", False, element_attrs=element_config)
    
    874
    +
    
    875
    +    for _ in range(2):
    
    876
    +        result = cli.run(project=project, args=["build", element_name])
    
    877
    +        assert "BUG" not in result.stderr
    
    878
    +        assert cli.get_element_state(project, element_name) != "cached"



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]