element.py 93.5 KB
Newer Older
1
#
2
#  Copyright (C) 2016-2018 Codethink Limited
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
#
#  This program is free software; you can redistribute it and/or
#  modify it under the terms of the GNU Lesser General Public
#  License as published by the Free Software Foundation; either
#  version 2 of the License, or (at your option) any later version.
#
#  This library is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
#  Lesser General Public License for more details.
#
#  You should have received a copy of the GNU Lesser General Public
#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
#  Authors:
#        Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>

20
"""
21 22
Element - Base element class
============================
23 24 25 26 27 28 29 30 31 32 33 34 35 36


.. _core_element_abstract_methods:

Abstract Methods
----------------
For loading and configuration purposes, Elements must implement the
:ref:`Plugin base class abstract methods <core_plugin_abstract_methods>`.


.. _core_element_build_phase:

Build Phase
~~~~~~~~~~~
37 38 39
The following methods are the foundation of the element's *build
phase*, they must be implemented by all Element classes, unless
explicitly stated otherwise.
40 41 42 43 44 45 46 47 48 49

* :func:`Element.configure_sandbox() <buildstream.element.Element.configure_sandbox>`

  Configures the :class:`.Sandbox`. This is called before anything else

* :func:`Element.stage() <buildstream.element.Element.stage>`

  Stage dependencies and :class:`Sources <buildstream.source.Source>` into
  the sandbox.

50 51 52 53 54 55 56
* :func:`Element.prepare() <buildstream.element.Element.prepare>`

  Call preparation methods that should only be performed once in the
  lifetime of a build directory (e.g. autotools' ./configure).

  **Optional**: If left unimplemented, this step will be skipped.

57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72
* :func:`Element.assemble() <buildstream.element.Element.assemble>`

  Perform the actual assembly of the element


Miscellaneous
~~~~~~~~~~~~~
Miscellaneous abstract methods also exist:

* :func:`Element.generate_script() <buildstream.element.Element.generate_script>`

  For the purpose of ``bst source bundle``, an Element may optionally implmenent this.


Class Reference
---------------
73 74
"""

75
import os
76
import re
77
import stat
78
import copy
79
from collections import Mapping, OrderedDict
80
from contextlib import contextmanager
81
from enum import Enum
82
import tempfile
83
import time
84
import shutil
85 86

from . import _yaml
87
from ._variables import Variables
88
from ._versions import BST_CORE_ARTIFACT_VERSION
89
from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, ErrorDomain
90
from .utils import UtilError
91
from . import Plugin, Consistency
Jürg Billeter's avatar
Jürg Billeter committed
92
from . import SandboxFlags
93
from . import utils
94
from . import _cachekey
95
from . import _signals
96
from . import _site
Tristan Maat's avatar
Tristan Maat committed
97
from ._platform import Platform
98
from .sandbox._config import SandboxConfig
99

Tristan Maat's avatar
Tristan Maat committed
100

101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
# _KeyStrength():
#
# Strength of cache key
#
class _KeyStrength(Enum):

    # Includes strong cache keys of all build dependencies and their
    # runtime dependencies.
    STRONG = 1

    # Includes names of direct build dependencies but does not include
    # cache keys of dependencies.
    WEAK = 2


116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
class Scope(Enum):
    """Types of scope for a given element"""

    ALL = 1
    """All elements which the given element depends on, following
    all elements required for building. Including the element itself.
    """

    BUILD = 2
    """All elements required for building the element, including their
    respective run dependencies. Not including the given element itself.
    """

    RUN = 3
    """All elements required for running the element. Including the element
    itself.
    """


135
class ElementError(BstError):
136 137
    """This exception should be raised by :class:`.Element` implementations
    to report errors to the user.
138

139 140
    Args:
       message (str): The error message to report to the user
141
       detail (str): A possibly multiline, more detailed error message
142
       reason (str): An optional machine readable reason string, used for test cases
143
    """
144
    def __init__(self, message, *, detail=None, reason=None):
145
        super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason)
146 147


148
class Element(Plugin):
149 150 151
    """Element()

    Base Element class.
152 153 154 155

    All elements derive from this class, this interface defines how
    the core will be interacting with Elements.
    """
156 157 158 159
    __defaults = {}               # The defaults from the yaml file and project
    __defaults_set = False        # Flag, in case there are no defaults at all
    __instantiated_elements = {}  # A hash of Element by MetaElement
    __redundant_source_refs = []  # A list of (source, ref) tuples which were redundantly specified
160

161 162 163 164 165 166 167 168 169 170
    BST_ARTIFACT_VERSION = 0
    """The element plugin's artifact version

    Elements must first set this to 1 if they change their unique key
    structure in a way that would produce a different key for the
    same input, or introduce a change in the build output for the
    same unique key. Further changes of this nature require bumping the
    artifact version.
    """

171
    BST_STRICT_REBUILD = False
172 173 174 175
    """Whether to rebuild this element in non strict mode if
    any of the dependencies have changed.
    """

176
    BST_FORBID_RDEPENDS = False
177 178 179 180 181 182 183
    """Whether to raise exceptions if an element has runtime dependencies.

    *Since: 1.2*
    """

    BST_FORBID_BDEPENDS = False
    """Whether to raise exceptions if an element has build dependencies.
184 185

    *Since: 1.2*
186 187 188 189
    """

    BST_FORBID_SOURCES = False
    """Whether to raise exceptions if an element has sources.
190 191

    *Since: 1.2*
192 193
    """

194
    def __init__(self, context, project, artifacts, meta, plugin_conf):
195

196
        super().__init__(meta.name, context, project, meta.provenance, "element")
197

198 199 200 201 202 203 204
        self.normal_name = os.path.splitext(self.name.replace(os.sep, '-'))[0]
        """A normalized element name

        This is the original element without path separators or
        the extension, it's used mainly for composing log file names
        and creating directory names and such.
        """
205

206 207 208
        self.__runtime_dependencies = []        # Direct runtime dependency Elements
        self.__build_dependencies = []          # Direct build dependency Elements
        self.__sources = []                     # List of Sources
209
        self.__cache_key_dict = None            # Dict for cache key calculation
210 211
        self.__cache_key = None                 # Our cached cache key
        self.__weak_cache_key = None            # Our cached weak cache key
212
        self.__strict_cache_key = None          # Our cached cache key for strict builds
213
        self.__artifacts = artifacts            # Artifact cache
214
        self.__consistency = Consistency.INCONSISTENT  # Cached overall consistency state
215
        self.__cached = None                    # Whether we have a cached artifact
216
        self.__strong_cached = None             # Whether we have a cached artifact
217 218
        self.__assemble_scheduled = False       # Element is scheduled to be assembled
        self.__assemble_done = False            # Element is assembled
219 220
        self.__tracking_scheduled = False       # Sources are scheduled to be tracked
        self.__tracking_done = False            # Sources have been tracked
221
        self.__pull_done = False                # Whether pull was attempted
222
        self.__log_path = None                  # Path to dedicated log file or None
223 224
        self.__splits = None                    # Resolved regex objects for computing split domains
        self.__whitelist_regex = None           # Resolved regex object to check if file is allowed to overlap
225
        self.__staged_sources_directory = None  # Location where Element.stage_sources() was called
226
        self.__tainted = None                   # Whether the artifact is tainted and should not be shared
227
        self.__required = False                 # Whether the artifact is required in the current session
228

229 230 231 232 233 234
        # hash tables of loaded artifact metadata, hashed by key
        self.__metadata_keys = {}                     # Strong and weak keys for this key
        self.__metadata_dependencies = {}             # Dictionary of dependency strong keys
        self.__metadata_workspaced = {}               # Boolean of whether it's workspaced
        self.__metadata_workspaced_dependencies = {}  # List of which dependencies are workspaced

235
        # Ensure we have loaded this class's defaults
236
        self.__init_defaults(plugin_conf)
237

238 239
        # Collect the composited variables and resolve them
        variables = self.__extract_variables(meta)
240
        variables['element-name'] = self.name
241
        self.__variables = Variables(variables)
242

243 244 245 246
        # Collect the composited environment now that we have variables
        env = self.__extract_environment(meta)
        self.__environment = env

247 248 249 250
        # Collect the environment nocache blacklist list
        nocache = self.__extract_env_nocache(meta)
        self.__env_nocache = nocache

251
        # Grab public domain data declared for this instance
252
        self.__public = self.__extract_public(meta)
253
        self.__dynamic_public = None
254

255 256
        # Collect the composited element configuration and
        # ask the element to configure itself.
257 258
        self.__config = self.__extract_config(meta)
        self.configure(self.__config)
259

260 261 262
        # Extract Sandbox config
        self.__sandbox_config = self.__extract_sandbox_config(meta)

263 264 265
    def __lt__(self, other):
        return self.name < other.name

266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
    #############################################################
    #                      Abstract Methods                     #
    #############################################################
    def configure_sandbox(self, sandbox):
        """Configures the the sandbox for execution

        Args:
           sandbox (:class:`.Sandbox`): The build sandbox

        Raises:
           (:class:`.ElementError`): When the element raises an error

        Elements must implement this method to configure the sandbox object
        for execution.
        """
        raise ImplError("element plugin '{kind}' does not implement configure_sandbox()".format(
            kind=self.get_kind()))

    def stage(self, sandbox):
        """Stage inputs into the sandbox directories

        Args:
           sandbox (:class:`.Sandbox`): The build sandbox

        Raises:
           (:class:`.ElementError`): When the element raises an error

        Elements must implement this method to populate the sandbox
        directory with data. This is done either by staging :class:`.Source`
        objects, by staging the artifacts of the elements this element depends
        on, or both.
        """
        raise ImplError("element plugin '{kind}' does not implement stage()".format(
            kind=self.get_kind()))

301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316
    def prepare(self, sandbox):
        """Run one-off preparation commands.

        This is run before assemble(), but is guaranteed to run only
        the first time if we build incrementally - this makes it
        possible to run configure-like commands without causing the
        entire element to rebuild.

        Args:
           sandbox (:class:`.Sandbox`): The build sandbox

        Raises:
           (:class:`.ElementError`): When the element raises an error

        By default, this method does nothing, but may be overriden to
        allow configure-like commands.
317 318

        *Since: 1.2*
319 320 321
        """
        pass

322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364
    def assemble(self, sandbox):
        """Assemble the output artifact

        Args:
           sandbox (:class:`.Sandbox`): The build sandbox

        Returns:
           (str): An absolute path within the sandbox to collect the artifact from

        Raises:
           (:class:`.ElementError`): When the element raises an error

        Elements must implement this method to create an output
        artifact from its sources and dependencies.
        """
        raise ImplError("element plugin '{kind}' does not implement assemble()".format(
            kind=self.get_kind()))

    def generate_script(self):
        """Generate a build (sh) script to build this element

        Returns:
           (str): A string containing the shell commands required to build the element

        BuildStream guarantees the following environment when the
        generated script is run:

        - All element variables have been exported.
        - The cwd is `self.get_variable('build_root')/self.normal_name`.
        - $PREFIX is set to `self.get_variable('install_root')`.
        - The directory indicated by $PREFIX is an empty directory.

        Files are expected to be installed to $PREFIX.

        If the script fails, it is expected to return with an exit
        code != 0.
        """
        raise ImplError("element plugin '{kind}' does not implement write_script()".format(
            kind=self.get_kind()))

    #############################################################
    #                       Public Methods                      #
    #############################################################
365 366 367 368 369 370 371 372 373
    def sources(self):
        """A generator function to enumerate the element sources

        Yields:
           (:class:`.Source`): The sources of this element
        """
        for source in self.__sources:
            yield source

374
    def dependencies(self, scope, *, recurse=True, visited=None, recursed=False):
375
        """dependencies(scope, *, recurse=True)
376

377 378 379 380 381 382 383
        A generator function which yields the dependencies of the given element.

        If `recurse` is specified (the default), the full dependencies will be listed
        in deterministic staging order, starting with the basemost elements in the
        given `scope`. Otherwise, if `recurse` is not specified then only the direct
        dependencies in the given `scope` will be traversed, and the element itself
        will be omitted.
384 385 386

        Args:
           scope (:class:`.Scope`): The scope to iterate in
387
           recurse (bool): Whether to recurse
388

389
        Yields:
390
           (:class:`.Element`): The dependencies in `scope`, in deterministic staging order
391
        """
392
        if visited is None:
Tristan Maat's avatar
Tristan Maat committed
393 394
            visited = {}

395 396
        full_name = self._get_full_name()

Tristan Maat's avatar
Tristan Maat committed
397
        scope_set = set((Scope.BUILD, Scope.RUN)) if scope == Scope.ALL else set((scope,))
398

399
        if full_name in visited and scope_set.issubset(visited[full_name]):
400 401
            return

Tristan Maat's avatar
Tristan Maat committed
402
        should_yield = False
403 404
        if full_name not in visited:
            visited[full_name] = scope_set
Tristan Maat's avatar
Tristan Maat committed
405 406
            should_yield = True
        else:
407
            visited[full_name] |= scope_set
Tristan Maat's avatar
Tristan Maat committed
408 409

        if recurse or not recursed:
410 411
            if scope == Scope.ALL:
                for dep in self.__build_dependencies:
Tristan Maat's avatar
Tristan Maat committed
412 413 414
                    yield from dep.dependencies(Scope.ALL, recurse=recurse,
                                                visited=visited, recursed=True)

415 416
                for dep in self.__runtime_dependencies:
                    if dep not in self.__build_dependencies:
Tristan Maat's avatar
Tristan Maat committed
417 418 419
                        yield from dep.dependencies(Scope.ALL, recurse=recurse,
                                                    visited=visited, recursed=True)

420 421
            elif scope == Scope.BUILD:
                for dep in self.__build_dependencies:
Tristan Maat's avatar
Tristan Maat committed
422 423 424
                    yield from dep.dependencies(Scope.RUN, recurse=recurse,
                                                visited=visited, recursed=True)

425 426
            elif scope == Scope.RUN:
                for dep in self.__runtime_dependencies:
Tristan Maat's avatar
Tristan Maat committed
427 428
                    yield from dep.dependencies(Scope.RUN, recurse=recurse,
                                                visited=visited, recursed=True)
429 430

        # Yeild self only at the end, after anything needed has been traversed
Tristan Maat's avatar
Tristan Maat committed
431
        if should_yield and (recurse or recursed) and (scope == Scope.ALL or scope == Scope.RUN):
432 433
            yield self

434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449
    def search(self, scope, name):
        """Search for a dependency by name

        Args:
           scope (:class:`.Scope`): The scope to search
           name (str): The dependency to search for

        Returns:
           (:class:`.Element`): The dependency element, or None if not found.
        """
        for dep in self.dependencies(scope):
            if dep.name == name:
                return dep

        return None

450
    def node_subst_member(self, node, member_name, default=utils._sentinel):
451 452 453 454 455 456
        """Fetch the value of a string node member, substituting any variables
        in the loaded value with the element contextual variables.

        Args:
           node (dict): A dictionary loaded from YAML
           member_name (str): The name of the member to fetch
457
           default (str): A value to return when *member_name* is not specified in *node*
458 459

        Returns:
460
           The value of *member_name* in *node*, otherwise *default*
461

462
        Raises:
463
           :class:`.LoadError`: When *member_name* is not found and no *default* was provided
464

465 466 467
        This is essentially the same as :func:`~buildstream.plugin.Plugin.node_get_member`
        except that it assumes the expected type is a string and will also perform variable
        substitutions.
468

469
        **Example:**
470

471 472 473 474 475
        .. code:: python

          # Expect a string 'name' in 'node', substituting any
          # variables in the returned string
          name = self.node_subst_member(node, 'name')
476
        """
477
        value = self.node_get_member(node, str, member_name, default)
478 479 480 481 482
        try:
            return self.__variables.subst(value)
        except LoadError as e:
            provenance = _yaml.node_get_provenance(node, key=member_name)
            raise LoadError(e.reason, '{}: {}'.format(provenance, str(e))) from e
483

484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501
    def node_subst_list(self, node, member_name):
        """Fetch a list from a node member, substituting any variables in the list

        Args:
          node (dict): A dictionary loaded from YAML
          member_name (str): The name of the member to fetch (a list)

        Returns:
          The list in *member_name*

        Raises:
          :class:`.LoadError`

        This is essentially the same as :func:`~buildstream.plugin.Plugin.node_get_member`
        except that it assumes the expected type is a list of strings and will also
        perform variable substitutions.
        """
        value = self.node_get_member(node, list, member_name)
502 503 504 505 506 507 508 509
        ret = []
        for index, x in enumerate(value):
            try:
                ret.append(self.__variables.subst(x))
            except LoadError as e:
                provenance = _yaml.node_get_provenance(node, key=member_name, indices=[index])
                raise LoadError(e.reason, '{}: {}'.format(provenance, str(e))) from e
        return ret
510

511 512 513
    def node_subst_list_element(self, node, member_name, indices):
        """Fetch the value of a list element from a node member, substituting any variables
        in the loaded value with the element contextual variables.
514 515

        Args:
516 517 518 519 520 521
           node (dict): A dictionary loaded from YAML
           member_name (str): The name of the member to fetch
           indices (list of int): List of indices to search, in case of nested lists

        Returns:
           The value of the list element in *member_name* at the specified *indices*
522 523 524 525

        Raises:
           :class:`.LoadError`

526 527 528
        This is essentially the same as :func:`~buildstream.plugin.Plugin.node_get_list_element`
        except that it assumes the expected type is a string and will also perform variable
        substitutions.
529

530
        **Example:**
531

532
        .. code:: python
533

534 535
          # Fetch the list itself
          strings = self.node_get_member(node, list, 'strings')
536

537 538
          # Iterate over the list indices
          for i in range(len(strings)):
539

540 541 542 543
              # Fetch the strings in this list, substituting content
              # with our element's variables if needed
              string = self.node_subst_list_element(
                  node, 'strings', [ i ])
544
        """
545
        value = self.node_get_list_element(node, str, member_name, indices)
546 547 548 549 550
        try:
            return self.__variables.subst(value)
        except LoadError as e:
            provenance = _yaml.node_get_provenance(node, key=member_name, indices=indices)
            raise LoadError(e.reason, '{}: {}'.format(provenance, str(e))) from e
551

552
    def compute_manifest(self, *, include=None, exclude=None, orphans=True):
553
        """Compute and return this element's selective manifest
554 555

        The manifest consists on the list of file paths in the
556
        artifact. The files in the manifest are selected according to
557 558 559 560 561 562 563 564 565 566 567 568
        `include`, `exclude` and `orphans` parameters. If `include` is
        not specified then all files spoken for by any domain are
        included unless explicitly excluded with an `exclude` domain.

        Args:
           include (list): An optional list of domains to include files from
           exclude (list): An optional list of domains to exclude files from
           orphans (bool): Whether to include files not spoken for by split domains

        Yields:
           (str): The paths of the files in manifest
        """
569
        self.__assert_cached()
570 571
        return self.__compute_splits(include, exclude, orphans)

572
    def stage_artifact(self, sandbox, *, path=None, include=None, exclude=None, orphans=True, update_mtimes=None):
573
        """Stage this element's output artifact in the sandbox
574

575 576 577 578 579
        This will stage the files from the artifact to the sandbox at specified location.
        The files are selected for staging according to the `include`, `exclude` and `orphans`
        parameters; if `include` is not specified then all files spoken for by any domain
        are included unless explicitly excluded with an `exclude` domain.

580 581 582
        Args:
           sandbox (:class:`.Sandbox`): The build sandbox
           path (str): An optional sandbox relative path
583 584
           include (list): An optional list of domains to include files from
           exclude (list): An optional list of domains to exclude files from
585
           orphans (bool): Whether to include files not spoken for by split domains
586
           update_mtimes (list): An optional list of files whose mtimes to set to the current time.
587 588

        Raises:
589
           (:class:`.ElementError`): If the element has not yet produced an artifact.
590

591
        Returns:
592
           (:class:`~.utils.FileListResult`): The result describing what happened while staging
593

594
        .. note::
595 596

           Directories in `dest` are replaced with files from `src`,
597 598
           unless the existing directory in `dest` is not empty in
           which case the path will be reported in the return value.
599

600 601 602 603 604 605
        **Example:**

        .. code:: python

          # Stage the dependencies for a build of 'self'
          for dep in self.dependencies(Scope.BUILD):
606
              dep.stage_artifact(sandbox)
607 608
        """

609 610 611
        if update_mtimes is None:
            update_mtimes = []

612
        # Time to use the artifact, check once more that it's there
613
        self.__assert_cached()
614

615
        with self.timed_activity("Staging {}/{}".format(self.name, self.__get_brief_display_key())):
616
            # Get the extracted artifact
617 618
            artifact_base, _ = self.__extract()
            artifact = os.path.join(artifact_base, 'files')
619 620 621

            # Hard link it into the staging area
            #
622
            basedir = sandbox.get_directory()
623 624 625
            stagedir = basedir \
                if path is None \
                else os.path.join(basedir, path.lstrip(os.sep))
626

627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645
            files = list(self.__compute_splits(include, exclude, orphans))

            # We must not hardlink files whose mtimes we want to update
            if update_mtimes:
                link_files = [f for f in files if f not in update_mtimes]
                copy_files = [f for f in files if f in update_mtimes]
            else:
                link_files = files
                copy_files = []

            link_result = utils.link_files(artifact, stagedir, files=link_files,
                                           report_written=True)
            copy_result = utils.copy_files(artifact, stagedir, files=copy_files,
                                           report_written=True)

            cur_time = time.time()

            for f in copy_result.files_written:
                os.utime(os.path.join(stagedir, f), times=(cur_time, cur_time))
646

647
        return link_result.combine(copy_result)
648

649
    def stage_dependency_artifacts(self, sandbox, scope, *, path=None,
650
                                   include=None, exclude=None, orphans=True):
651 652
        """Stage element dependencies in scope

653
        This is primarily a convenience wrapper around
654 655 656
        :func:`Element.stage_artifact() <buildstream.element.Element.stage_artifact>`
        which takes care of staging all the dependencies in `scope` and issueing the
        appropriate warnings.
657

658 659 660 661
        Args:
           sandbox (:class:`.Sandbox`): The build sandbox
           scope (:class:`.Scope`): The scope to stage dependencies in
           path (str): An optional sandbox relative path
662 663
           include (list): An optional list of domains to include files from
           exclude (list): An optional list of domains to exclude files from
664
           orphans (bool): Whether to include files not spoken for by split domains
665

666
        Raises:
667
           (:class:`.ElementError`): If any of the dependencies in `scope` have not
668 669
                                     yet produced artifacts, or if forbidden overlaps
                                     occur.
670 671
        """
        ignored = {}
672 673
        overlaps = OrderedDict()
        files_written = {}
674
        old_dep_keys = {}
675
        workspace = self._get_workspace()
676

677
        if self.__can_build_incrementally() and workspace.last_successful:
678
            old_dep_keys = self.__get_artifact_metadata_dependencies(workspace.last_successful)
679

680
        for dep in self.dependencies(scope):
681 682 683 684 685
            # If we are workspaced, and we therefore perform an
            # incremental build, we must ensure that we update the mtimes
            # of any files created by our dependencies since the last
            # successful build.
            to_update = None
686
            if workspace and old_dep_keys:
687
                dep.__assert_cached()
688 689 690 691 692 693 694 695 696

                if dep.name in old_dep_keys:
                    key_new = dep._get_cache_key()
                    key_old = old_dep_keys[dep.name]

                    # We only need to worry about modified and added
                    # files, since removed files will be picked up by
                    # build systems anyway.
                    to_update, _, added = self.__artifacts.diff(dep, key_old, key_new, subdir='files')
697
                    workspace.add_running_files(dep.name, to_update + added)
698
                    to_update.extend(workspace.running_files[dep.name])
699

700 701 702
                    # In case we are running `bst shell`, this happens in the
                    # main process and we need to update the workspace config
                    if utils._is_main_process():
703
                        self._get_context().get_workspaces().save_config()
704

705 706 707 708
            result = dep.stage_artifact(sandbox,
                                        path=path,
                                        include=include,
                                        exclude=exclude,
709 710
                                        orphans=orphans,
                                        update_mtimes=to_update)
711
            if result.overwritten:
712 713 714 715 716 717 718 719 720 721 722 723
                for overwrite in result.overwritten:
                    # Completely new overwrite
                    if overwrite not in overlaps:
                        # Find the overwritten element by checking where we've
                        # written the element before
                        for elm, contents in files_written.items():
                            if overwrite in contents:
                                overlaps[overwrite] = [elm, dep.name]
                    else:
                        overlaps[overwrite].append(dep.name)
            files_written[dep.name] = result.files_written

724 725
            if result.ignored:
                ignored[dep.name] = result.ignored
726

727
        if overlaps:
728 729
            overlap_error = overlap_warning = False
            error_detail = warning_detail = "Staged files overwrite existing files in staging area:\n"
730
            for f, elements in overlaps.items():
731 732
                overlap_error_elements = []
                overlap_warning_elements = []
733 734 735
                # The bottom item overlaps nothing
                overlapping_elements = elements[1:]
                for elm in overlapping_elements:
736
                    element = self.search(scope, elm)
737 738
                    element_project = element._get_project()
                    if not element.__file_is_whitelisted(f):
739
                        if element_project.fail_on_overlap:
740
                            overlap_error_elements.append(elm)
741
                            overlap_error = True
742 743 744
                        else:
                            overlap_warning_elements.append(elm)
                            overlap_warning = True
745

746 747
                warning_detail += _overlap_error_detail(f, overlap_warning_elements, elements)
                error_detail += _overlap_error_detail(f, overlap_error_elements, elements)
748

749 750 751 752 753
            if overlap_warning:
                self.warn("Non-whitelisted overlaps detected", detail=warning_detail)
            if overlap_error:
                raise ElementError("Non-whitelisted overlaps detected and fail-on-overlaps is set",
                                   detail=error_detail, reason="overlap-error")
754 755 756 757 758 759 760

        if ignored:
            detail = "Not staging files which would replace non-empty directories:\n"
            for key, value in ignored.items():
                detail += "\nFrom {}:\n".format(key)
                detail += "  " + "  ".join(["/" + f + "\n" for f in value])
            self.warn("Ignored files", detail=detail)
761

762 763 764 765 766 767 768 769 770 771 772 773 774
    def integrate(self, sandbox):
        """Integrate currently staged filesystem against this artifact.

        Args:
           sandbox (:class:`.Sandbox`): The build sandbox

        This modifies the sysroot staged inside the sandbox so that
        the sysroot is *integrated*. Only an *integrated* sandbox
        may be trusted for running the software therein, as the integration
        commands will create and update important system cache files
        required for running the installed software (such as the ld.so.cache).
        """
        bstdata = self.get_public_data('bst')
775 776
        environment = self.get_environment()

777
        if bstdata is not None:
778
            commands = self.node_get_member(bstdata, list, 'integration-commands', [])
779 780
            for i in range(len(commands)):
                cmd = self.node_subst_list_element(bstdata, 'integration-commands', [i])
781
                self.status("Running integration command", detail=cmd)
Tristan Maat's avatar
Tristan Maat committed
782
                exitcode = sandbox.run(['sh', '-e', '-c', cmd], 0, env=environment, cwd='/')
783 784 785
                if exitcode != 0:
                    raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode))

786
    def stage_sources(self, sandbox, directory):
787
        """Stage this element's sources to a directory in the sandbox
788 789 790

        Args:
           sandbox (:class:`.Sandbox`): The build sandbox
791
           directory (str): An absolute path within the sandbox to stage the sources at
792
        """
793

794 795 796 797 798 799 800
        # Hold on to the location where a plugin decided to stage sources,
        # this will be used to reconstruct the failed sysroot properly
        # after a failed build.
        #
        assert self.__staged_sources_directory is None
        self.__staged_sources_directory = directory

801
        self._stage_sources_in_sandbox(sandbox, directory)
802

803 804 805 806 807 808 809 810
    def get_public_data(self, domain):
        """Fetch public data on this element

        Args:
           domain (str): A public domain name to fetch data for

        Returns:
           (dict): The public data dictionary for the given domain
811 812 813

        .. note::

814 815 816
           This can only be called the abstract methods which are
           called as a part of the :ref:`build phase <core_element_build_phase>`
           and never before.
817
        """
818
        if self.__dynamic_public is None:
819
            self.__load_public_data()
820

821 822
        data = self.__dynamic_public.get(domain)
        if data is not None:
823
            data = _yaml.node_copy(data)
824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839

        return data

    def set_public_data(self, domain, data):
        """Set public data on this element

        Args:
           domain (str): A public domain name to fetch data for
           data (dict): The public data dictionary for the given domain

        This allows an element to dynamically mutate public data of
        elements or add new domains as the result of success completion
        of the :func:`Element.assemble() <buildstream.element.Element.assemble>`
        method.
        """
        if self.__dynamic_public is None:
840
            self.__load_public_data()
841 842

        if data is not None:
843
            data = _yaml.node_copy(data)
844 845

        self.__dynamic_public[domain] = data
846

847 848 849 850 851 852 853
    def get_environment(self):
        """Fetch the environment suitable for running in the sandbox

        Returns:
           (dict): A dictionary of string key/values suitable for passing
           to :func:`Sandbox.run() <buildstream.sandbox.Sandbox.run>`
        """
854
        return _yaml.node_sanitize(self.__environment)
855

856 857 858 859 860 861 862 863 864 865 866 867 868 869 870
    def get_variable(self, varname):
        """Fetch the value of a variable resolved for this element.

        Args:
           varname (str): The name of the variable to fetch

        Returns:
           (str): The resolved value for *varname*, or None if no
           variable was declared with the given name.
        """
        if varname in self.__variables.variables:
            return self.__variables.variables[varname]

        return None

871
    #############################################################
872 873
    #            Private Methods used in BuildStream            #
    #############################################################
874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917

    # _new_from_meta():
    #
    # Recursively instantiate a new Element instance, it's sources
    # and it's dependencies from a meta element.
    #
    # Args:
    #    artifacts (ArtifactCache): The artifact cache
    #    meta (MetaElement): The meta element
    #
    # Returns:
    #    (Element): A newly created Element instance
    #
    @classmethod
    def _new_from_meta(cls, meta, artifacts):

        if meta in cls.__instantiated_elements:
            return cls.__instantiated_elements[meta]

        project = meta.project
        element = project.create_element(artifacts, meta)
        cls.__instantiated_elements[meta] = element

        # Instantiate sources
        for meta_source in meta.sources:
            source = project.create_source(meta_source)
            redundant_ref = source._load_ref()
            element.__sources.append(source)

            # Collect redundant refs which occurred at load time
            if redundant_ref is not None:
                cls.__redundant_source_refs.append((source, redundant_ref))

        # Instantiate dependencies
        for meta_dep in meta.dependencies:
            dependency = Element._new_from_meta(meta_dep, artifacts)
            element.__runtime_dependencies.append(dependency)
        for meta_dep in meta.build_dependencies:
            dependency = Element._new_from_meta(meta_dep, artifacts)
            element.__build_dependencies.append(dependency)

        return element

    # _get_redundant_source_refs()
918
    #
919 920 921
    # Fetches a list of (Source, ref) tuples of all the Sources
    # which were loaded with a ref specified in the element declaration
    # for projects which use project.refs ref-storage.
922
    #
923 924 925 926
    # This is used to produce a warning
    @classmethod
    def _get_redundant_source_refs(cls):
        return cls.__redundant_source_refs
927

928
    # _reset_load_state()
929
    #
930 931
    # This is called by Pipeline.cleanup() and is used to
    # reset the loader state between multiple sessions.
932
    #
933 934 935 936
    @classmethod
    def _reset_load_state(cls):
        cls.__instantiated_elements = {}
        cls.__redundant_source_refs = []
937

938
    # _get_consistency()
939
    #
940
    # Returns cached consistency state
941
    #
942 943
    def _get_consistency(self):
        return self.__consistency
944

945
    # _cached():
946 947 948 949 950
    #
    # Returns:
    #    (bool): Whether this element is already present in
    #            the artifact cache
    #
951 952
    def _cached(self):
        return self.__cached
953

954 955 956 957 958
    # _buildable():
    #
    # Returns:
    #    (bool): Whether this element can currently be built
    #
959
    def _buildable(self):
960
        if self._get_consistency() != Consistency.CACHED:
961 962 963
            return False

        for dependency in self.dependencies(Scope.BUILD):
964 965 966 967 968 969
            # In non-strict mode an element's strong cache key may not be available yet
            # even though an artifact is available in the local cache. This can happen
            # if the pull job is still pending as the remote cache may have an artifact
            # that matches the strict cache key, which is preferred over a locally
            # cached artifact with a weak cache key match.
            if not dependency._cached() or not dependency._get_cache_key(strength=_KeyStrength.STRONG):
970 971
                return False

972 973 974
        if not self.__assemble_scheduled:
            return False

975 976
        return True

977 978
    # _get_cache_key():
    #
979
    # Returns the cache key
980
    #
981 982 983
    # Args:
    #    strength (_KeyStrength): Either STRONG or WEAK key strength
    #
984 985 986 987 988
    # Returns:
    #    (str): A hex digest cache key for this Element, or None
    #
    # None is returned if information for the cache key is missing.
    #
989 990 991 992 993
    def _get_cache_key(self, strength=_KeyStrength.STRONG):
        if strength == _KeyStrength.STRONG:
            return self.__cache_key
        else:
            return self.__weak_cache_key
994

995 996 997 998 999 1000 1001 1002
    # _can_query_cache():
    #
    # Returns whether the cache key required for cache queries is available.
    #
    # Returns:
    #    (bool): True if cache can be queried
    #
    def _can_query_cache(self):
1003 1004 1005 1006 1007 1008 1009 1010
        # If build has already been scheduled, we know that the element is
        # not cached and thus can allow cache query even if the strict cache key
        # is not available yet.
        # This special case is required for workspaced elements to prevent
        # them from getting blocked in the pull queue.
        if self.__assemble_scheduled:
            return True

1011 1012 1013
        # cache cannot be queried until strict cache key is available
        return self.__strict_cache_key is not None

1014
    # _update_state()
1015
    #
1016 1017
    # Keep track of element state. Calculate cache keys if possible and
    # check whether artifacts are cached.
1018
    #
1019
    # This must be called whenever the state of an element may have changed.
1020
    #
1021
    def _update_state(self):
1022
        context = self._get_context()
1023

1024 1025
        # Compute and determine consistency of sources
        self.__update_source_state()
1026

1027 1028 1029
        if self._get_consistency() == Consistency.INCONSISTENT:
            # Tracking may still be pending
            return
1030

1031 1032 1033 1034 1035 1036 1037 1038 1039 1040
        if self._get_workspace() and self.__assemble_scheduled:
            # If we have an active workspace and are going to build, then
            # discard current cache key values as their correct values can only
            # be calculated once the build is complete
            self.__cache_key_dict = None
            self.__cache_key = None
            self.__weak_cache_key = None
            self.__strict_cache_key = None
            self.__strong_cached = None
            return
1041

1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054
        if self.__weak_cache_key is None:
            # Calculate weak cache key
            # Weak cache key includes names of direct build dependencies
            # but does not include keys of dependencies.
            if self.BST_STRICT_REBUILD:
                dependencies = [
                    e._get_cache_key(strength=_KeyStrength.WEAK)
                    for e in self.dependencies(Scope.BUILD)
                ]
            else:
                dependencies = [
                    e.name for e in self.dependencies(Scope.BUILD, recurse=False)
                ]
1055

1056
            self.__weak_cache_key = self.__calculate_cache_key(dependencies)
1057

1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070
            if self.__weak_cache_key is None:
                # Weak cache key could not be calculated yet
                return

        if not context.get_strict():
            # Full cache query in non-strict mode requires both the weak and
            # strict cache keys. However, we need to determine as early as
            # possible whether a build is pending to discard unstable cache keys
            # for workspaced elements. For this cache check the weak cache keys
            # are sufficient. However, don't update the `cached` attributes
            # until the full cache query below.
            cached = self.__artifacts.contains(self, self.__weak_cache_key)
            if (not self.__assemble_scheduled and not self.__assemble_done and
1071
                    not cached and not self._pull_pending() and self._is_required()):
1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092
                self._schedule_assemble()
                return

        if self.__strict_cache_key is None:
            dependencies = [
                e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
            ]
            self.__strict_cache_key = self.__calculate_cache_key(dependencies)

            if self.__strict_cache_key is None:
                # Strict cache key could not be calculated yet
                return

        # Query caches now that the weak and strict cache keys are available
        key_for_cache_lookup = self.__strict_cache_key if context.get_strict() else self.__weak_cache_key
        if not self.__cached:
            self.__cached = self.__artifacts.contains(self, key_for_cache_lookup)
        if not self.__strong_cached:
            self.__strong_cached = self.__artifacts.contains(self, self.__strict_cache_key)

        if (not self.__assemble_scheduled and not self.__assemble_done and
1093
                not self.__cached and not self._pull_pending() and self._is_required()):
1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122
            # Workspaced sources are considered unstable if a build is pending
            # as the build will modify the contents of the workspace.
            # Determine as early as possible if a build is pending to discard
            # unstable cache keys.
            self._schedule_assemble()
            return

        if self.__cache_key is None:
            # Calculate strong cache key
            if context.get_strict():
                self.__cache_key = self.__strict_cache_key
            elif self._pull_pending():
                # Effective strong cache key is unknown until after the pull
                pass
            elif self._cached():
                # Load the strong cache key from the artifact
                strong_key, _ = self.__get_artifact_metadata_keys()
                self.__cache_key = strong_key
            elif self.__assemble_scheduled or self.__assemble_done:
                # Artifact will or has been built, not downloaded
                dependencies = [
                    e._get_cache_key() for e in self.dependencies(Scope.BUILD)
                ]
                self.__cache_key = self.__calculate_cache_key(dependencies)

            if self.__cache_key is None:
                # Strong cache key could not be calculated yet
                return

1123
    # _get_display_key():
1124 1125 1126 1127 1128 1129 1130 1131 1132 1133
    #
    # Returns cache keys for display purposes
    #
    # Returns:
    #    (str): A full hex digest cache key for this Element
    #    (str): An abbreviated hex digest cache key for this Element
    #    (bool): True if key should be shown as dim, False otherwise
    #
    # Question marks are returned if information for the cache key is missing.
    #
1134
    def _get_display_key(self):
1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151
        context = self._get_context()
        dim_key = True

        cache_key = self._get_cache_key()

        if not cache_key:
            cache_key = "{:?<64}".format('')
        elif self._get_cache_key() == self.__strict_cache_key:
            # Strong cache key used in this session matches cache key
            # that would be used in strict build mode
            dim_key = False

        length = min(len(cache_key), context.log_key_length)
        return (cache_key, cache_key[0:length], dim_key)

    # _preflight():
    #
1152 1153
    # A wrapper for calling the abstract preflight() method on
    # the element and it's sources.
1154 1155
    #
    def _preflight(self):
1156 1157

        if self.BST_FORBID_RDEPENDS and self.BST_FORBID_BDEPENDS:
1158
            if any(self.dependencies(Scope.RUN, recurse=False)) or any(self.dependencies(Scope.BUILD, recurse=False)):
1159 1160 1161
                raise ElementError("{}: Dependencies are forbidden for '{}' elements"
                                   .format(self, self.get_kind()), reason="element-forbidden-depends")

1162
        if self.BST_FORBID_RDEPENDS:
1163
            if any(self.dependencies(Scope.RUN, recurse=False)):
1164 1165 1166
                raise ElementError("{}: Runtime dependencies are forbidden for '{}' elements"
                                   .format(self, self.get_kind()), reason="element-forbidden-rdepends")

1167 1168 1169 1170 1171
        if self.BST_FORBID_BDEPENDS:
            if any(self.dependencies(Scope.BUILD, recurse=False)):
                raise ElementError("{}: Build dependencies are forbidden for '{}' elements"
                                   .format(self, self.get_kind()), reason="element-forbidden-bdepends")

1172
        if self.BST_FORBID_SOURCES:
1173
            if any(self.sources()):
1174 1175 1176
                raise ElementError("{}: Sources are forbidden for '{}' elements"
                                   .format(self, self.get_kind()), reason="element-forbidden-sources")

1177 1178 1179 1180 1181 1182 1183 1184 1185
        try:
            self.preflight()
        except BstError as e:
            # Prepend provenance to the error
            raise ElementError("{}: {}".format(self, e), reason=e.reason) from e

        # Preflight the sources
        for source in self.sources():
            source._preflight()
1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221

    # _schedule_tracking():
    #
    # Force an element state to be inconsistent. Any sources appear to be
    # inconsistent.
    #
    # This is used across the pipeline in sessions where the
    # elements in question are going to be tracked, causing the
    # pipeline to rebuild safely by ensuring cache key recalculation
    # and reinterrogation of element state after tracking of elements
    # succeeds.
    #
    def _schedule_tracking(self):
        self.__tracking_scheduled = True
        self._update_state()

    # _tracking_done():
    #
    # This is called in the main process after the element has been tracked
    #
    def _tracking_done(self):
        assert self.__tracking_scheduled

        self.__tracking_scheduled = False
        self.__tracking_done = True

        self._update_state()

    # _track():
    #
    # Calls track() on the Element sources
    #
    # Raises:
    #    SourceError: If one of the element sources has an error
    #
    # Returns:
1222
    #    (list): A list of Source object ids and their new references
1223
    #
1224
    def _track(self):
1225
        refs = []
1226
        for source in self.__sources:
1227
            old_ref = source.get_ref()
1228
            new_ref = source._track()
1229
            refs.append((source._get_unique_id(), new_ref))
1230

1231 1232 1233 1234 1235 1236
            # Complimentary warning that the new ref will be unused.
            if old_ref != new_ref and self._get_workspace():
                detail = "This source has an open workspace.\n" \
                    + "To start using the new reference, please close the existing workspace."
                source.warn("Updated reference will be ignored as source has open workspace", detail=detail)

1237
        return refs
1238

1239
    # _prepare_sandbox():
1240
    #
1241 1242
    # This stages things for either _shell() (below) or also
    # is used to stage things by the `bst checkout` codepath
1243
    #
1244 1245
    @contextmanager
    def _prepare_sandbox(self, scope, directory, integrate=True):
1246

1247
        with self.__sandbox(directory, config=self.__sandbox_config) as sandbox:
1248

1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307
            # Configure always comes first, and we need it.
            self.configure_sandbox(sandbox)

            # Stage something if we need it
            if not directory:
                if scope == Scope.BUILD:
                    self.stage(sandbox)
                elif scope == Scope.RUN:
                    # Stage deps in the sandbox root
                    with self.timed_activity("Staging dependencies", silent_nested=True):
                        self.stage_dependency_artifacts(sandbox, scope)

                    # Run any integration commands provided by the dependencies
                    # once they are all staged and ready
                    if integrate:
                        with self.timed_activity("Integrating sandbox"):
                            for dep in self.dependencies(scope):
                                dep.integrate(sandbox)

            yield sandbox

    # _stage_sources_in_sandbox():
    #
    # Stage this element's sources to a directory inside sandbox
    #
    # Args:
    #     sandbox (:class:`.Sandbox`): The build sandbox
    #     directory (str): An absolute path to stage the sources at
    #     mount_workspaces (bool): mount workspaces if True, copy otherwise
    #
    def _stage_sources_in_sandbox(self, sandbox, directory, mount_workspaces=True):

        # Only artifact caches that implement diff() are allowed to
        # perform incremental builds.
        if mount_workspaces and self.__can_build_incrementally():
            workspace = self._get_workspace()
            sandbox.mark_directory(directory)
            sandbox._set_mount_source(directory, workspace.get_absolute_path())

        # Stage all sources that need to be copied
        sandbox_root = sandbox.get_directory()
        host_directory = os.path.join(sandbox_root, directory.lstrip(os.sep))
        self._stage_sources_at(host_directory, mount_workspaces=mount_workspaces)

    # _stage_sources_at():
    #
    # Stage this element's sources to a directory
    #
    # Args:
    #     directory (str): An absolute path to stage the sources at
    #     mount_workspaces (bool): mount workspaces if True, copy otherwise
    #
    def _stage_sources_at(self, directory, mount_workspaces=True):
        with self.timed_activity("Staging sources", silent_nested=True):

            if os.path.isdir(directory) and os.listdir(directory):
                raise ElementError("Staging directory '{}' is not empty".format(directory))

            workspace = self._get_workspace()
1308
            if workspace:
1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323
                # If mount_workspaces is set and we're doing incremental builds,
                # the workspace is already mounted into the sandbox.
                if not (mount_workspaces and self.__can_build_incrementally()):
                    with self.timed_activity("Staging local files at {}".format(workspace.path)):
                        workspace.stage(directory)
            else:
                # No workspace, stage directly
                for source in self.sources():
                    source._stage(directory)

        # Ensure deterministic mtime of sources at build time
        utils._set_deterministic_mtime(directory)
        # Ensure deterministic owners of sources at build time
        utils._set_deterministic_user(directory)

1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348
    # _set_required():
    #
    # Mark this element and its runtime dependencies as required.
    # This unblocks pull/fetch/build.
    #
    def _set_required(self):
        if self.__required:
            # Already done
            return

        self.__required = True

        # Request artifacts of runtime dependencies
        for dep in self.dependencies(Scope.RUN, recurse=False):
            dep._set_required()

        self._update_state()

    # _is_required():
    #
    # Returns whether this element has been marked as required.
    #
    def _is_required(self):
        return self.__required

1349 1350 1351 1352 1353 1354
    # _schedule_assemble():
    #
    # This is called in the main process before the element is assembled
    # in a subprocess.
    #
    def _schedule_assemble(self):
1355
        assert self._is_required()
1356 1357 1358
        assert not self.__assemble_scheduled
        self.__assemble_scheduled = True

1359 1360 1361 1362
        # Requests artifacts of build dependencies
        for dep in self.dependencies(Scope.BUILD, recurse=False):
            dep._set_required()

1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397
        # Invalidate workspace key as the build modifies the workspace directory
        workspace = self._get_workspace()
        if workspace:
            workspace.invalidate_key()

        self._update_state()

    # _assemble_done():
    #
    # This is called in the main process after the element has been assembled
    # and in the a subprocess after assembly completes.
    #
    # This will result in updating the element state.
    #
    def _assemble_done(self):
        assert self.__assemble_scheduled

        self.__assemble_scheduled = False
        self.__assemble_done = True

        self._update_state()

        if self._get_workspace() and self._cached():
            #
            # Note that this block can only happen in the
            # main process, since `self._cached()` cannot
            # be true when assembly is completed in the task.
            #
            # For this reason, it is safe to update and
            # save the workspaces configuration
            #
            key = self._get_cache_key()
            workspace = self._get_workspace()
            workspace.last_successful = key
            workspace.clear_running_files()
1398
            self._get_context().get_workspaces().save_config()
1399

1400 1401
    # _assemble():
    #
1402 1403 1404 1405 1406 1407
    # Internal method for running the entire build phase.
    #
    # This will:
    #   - Prepare a sandbox for the build
    #   - Call the public abstract methods for the build phase
    #   - Cache the resulting artifact
1408 1409 1410
    #
    def _assemble(self):

1411
        # Assert call ordering
1412
        assert not self._cached()
1413

1414
        context = self._get_context()
1415 1416 1417 1418
        with self._output_file() as output_file:

            # Explicitly clean it up, keep the build dir around if exceptions are raised
            os.makedirs(context.builddir, exist_ok=True)
1419
            rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
1420

1421 1422
            # Cleanup the build directory on explicit SIGTERM
            def cleanup_rootdir():
1423
                utils._force_rmtree(rootdir)
1424

1425
            with _signals.terminator(cleanup_rootdir), \
1426
                self.__sandbox(rootdir, output_file, output_file, self.__sandbox_config) as sandbox:  # nopep8
1427 1428

                sandbox_root = sandbox.get_directory()
1429

1430 1431
                # By default, the dynamic public data is the same as the static public data.
                # The plugin's assemble() method may modify this, though.
1432
                self.__dynamic_public = _yaml.node_copy(self.__public)
1433

1434
                # Call the abstract plugin methods
1435
                try:
1436 1437 1438 1439
                    # Step 1 - Configure
                    self.configure_sandbox(sandbox)
                    # Step 2 - Stage
                    self.stage(sandbox)
1440
                    # Step 3 - Prepare
1441
                    self.__prepare(sandbox)
1442
                    # Step 4 - Assemble
1443
                    collect = self.assemble(sandbox)
1444
                except BstError as e:
Tristan Van Berkom's avatar