Commit acc5760b authored by Valentin David's avatar Valentin David
Browse files

Add support for importing dependencies in sysroots.

This feature is useful when bootstraping a system.
parent 2d012a21
Loading
Loading
Loading
Loading
Loading
+16 −2
Original line number Diff line number Diff line
@@ -72,10 +72,24 @@ class LoadElement():
            'variables', 'environment', 'environment-nocache',
            'config', 'public', 'description',
            'build-depends', 'runtime-depends',
            'sysroots',
        ])

        self.deps = []
        sysroots = _yaml.node_get(node, list, 'sysroots', default_value=[])
        for sysroot in sysroots:
            _yaml.node_validate(sysroot, ['path', 'depends', 'build-depends'])
            path = _yaml.node_get(sysroot, str, 'path')
            for dep in _extract_depends_from_node(sysroot):
                if dep.dep_type == Symbol.RUNTIME:
                    raise LoadError(LoadErrorReason.INVALID_DATA,
                                    "{}: Sysroot'ed dependencies can not be of type 'runtime'"
                                    .format(dep.provenance, dep.dep_type))
                self.deps.append((path, dep))

        # Extract the Dependencies
        self.deps = _extract_depends_from_node(self.node)
        for dep in _extract_depends_from_node(self.node):
            self.deps.append(('/', dep))

    # depends():
    #
@@ -101,7 +115,7 @@ class LoadElement():
            return

        self._dep_cache = {}
        for dep in self.deps:
        for _, dep in self.deps:
            elt = self._loader.get_element_for_dep(dep)

            # Ensure the cache of the element we depend on
+14 −7
Original line number Diff line number Diff line
@@ -121,7 +121,7 @@ class Loader():
                junction, name, loader = self._parse_name(target, rewritable, ticker,
                                                          fetch_subprojects=fetch_subprojects)
                loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
                deps.append(Dependency(name, junction=junction))
                deps.append(('/', Dependency(name, junction=junction)))
                profile_end(Topics.LOAD_PROJECT, target)

        #
@@ -269,7 +269,7 @@ class Loader():
        self._elements[filename] = element

        # Load all dependency files for the new LoadElement
        for dep in element.deps:
        for _, dep in element.deps:
            if dep.junction:
                self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
                loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
@@ -330,7 +330,7 @@ class Loader():
        # Push / Check each dependency / Pop
        check_elements[element_name] = True
        sequence.append(element_name)
        for dep in element.deps:
        for _, dep in element.deps:
            loader = self._get_loader_for_dep(dep)
            loader._check_circular_deps(dep.name, check_elements, validated, sequence)
        del check_elements[element_name]
@@ -365,14 +365,21 @@ class Loader():
        if visited.get(element_name) is not None:
            return

        for dep in element.deps:
        for _, dep in element.deps:
            loader = self._get_loader_for_dep(dep)
            loader._sort_dependencies(dep.name, visited=visited)

        def dependency_cmp(dep_a, dep_b):
        def dependency_cmp(sdep_a, sdep_b):
            sysroot_a, dep_a = sdep_a
            sysroot_b, dep_b = sdep_b
            element_a = self.get_element_for_dep(dep_a)
            element_b = self.get_element_for_dep(dep_b)

            if sysroot_a < sysroot_b:
                return -1
            if sysroot_b < sysroot_a:
                return 1

            # Sort on inter element dependency first
            if element_a.depends(element_b):
                return 1
@@ -471,11 +478,11 @@ class Loader():
        self._meta_elements[element_name] = meta_element

        # Descend
        for dep in element.deps:
        for sysroot, dep in element.deps:
            loader = self._get_loader_for_dep(dep)
            meta_dep = loader._collect_element(dep.name)
            if dep.dep_type != 'runtime':
                meta_element.build_dependencies.append(meta_dep)
                meta_element.build_dependencies.append((sysroot, meta_dep))
            if dep.dep_type != 'build':
                meta_element.dependencies.append(meta_dep)

+4 −3
Original line number Diff line number Diff line
@@ -213,8 +213,9 @@ class BuildElement(Element):

        # Run any integration commands provided by the dependencies
        # once they are all staged and ready
        with self.timed_activity("Integrating sandbox"):
            for dep in self.dependencies(Scope.BUILD):
        with self.timed_activity("Integrating sandbox (buildelement)"):
            for sysroot, dep in self.dependencies(Scope.BUILD, with_sysroot=True):
                if sysroot == '/':
                    dep.integrate(sandbox)

        # Stage sources in the build root
+64 −37
Original line number Diff line number Diff line
@@ -375,7 +375,8 @@ class Element(Plugin):
        for source in self.__sources:
            yield source

    def dependencies(self, scope, *, recurse=True, visited=None, recursed=False):
    def dependencies(self, scope, *, recurse=True, visited=None, recursed=False,
                     with_sysroot=False, sysroot='/'):
        """dependencies(scope, *, recurse=True)

        A generator function which yields the dependencies of the given element.
@@ -400,39 +401,55 @@ class Element(Plugin):

        scope_set = set((Scope.BUILD, Scope.RUN)) if scope == Scope.ALL else set((scope,))

        if full_name in visited and scope_set.issubset(visited[full_name]):
        if with_sysroot:
            key = (sysroot, full_name)
        else:
            key = full_name

        if key in visited and scope_set.issubset(visited[key]):
            return

        should_yield = False
        if full_name not in visited:
            visited[full_name] = scope_set
        if key not in visited:
            visited[key] = scope_set
            should_yield = True
        else:
            visited[full_name] |= scope_set
            visited[key] |= scope_set

        if recurse or not recursed:
            if scope == Scope.ALL:
                for dep in self.__build_dependencies:
                build_deps = []
                for dep_sysroot, dep in self.__build_dependencies:
                    new_sysroot = self._subst_string(dep_sysroot) if not recursed else sysroot
                    yield from dep.dependencies(Scope.ALL, recurse=recurse,
                                                visited=visited, recursed=True)
                                                visited=visited, recursed=True,
                                                sysroot=new_sysroot, with_sysroot=with_sysroot)
                    build_deps.append(dep)

                for dep in self.__runtime_dependencies:
                    if dep not in self.__build_dependencies:
                    if dep not in build_deps:
                        yield from dep.dependencies(Scope.ALL, recurse=recurse,
                                                    visited=visited, recursed=True)
                                                    visited=visited, recursed=True,
                                                    sysroot=sysroot, with_sysroot=with_sysroot)

            elif scope == Scope.BUILD:
                for dep in self.__build_dependencies:
                for dep_sysroot, dep in self.__build_dependencies:
                    new_sysroot = self._subst_string(dep_sysroot) if not recursed else sysroot
                    yield from dep.dependencies(Scope.RUN, recurse=recurse,
                                                visited=visited, recursed=True)
                                                visited=visited, recursed=True,
                                                sysroot=new_sysroot, with_sysroot=with_sysroot)

            elif scope == Scope.RUN:
                for dep in self.__runtime_dependencies:
                    yield from dep.dependencies(Scope.RUN, recurse=recurse,
                                                visited=visited, recursed=True)
                                                visited=visited, recursed=True,
                                                sysroot=sysroot, with_sysroot=with_sysroot)

        # Yeild self only at the end, after anything needed has been traversed
        if should_yield and (recurse or recursed) and (scope == Scope.ALL or scope == Scope.RUN):
        if should_yield and (recurse or recursed) and (scope in [Scope.ALL, Scope.RUN]):
            if with_sysroot:
                yield sysroot, self
            else:
                yield self

    def search(self, scope, name):
@@ -632,7 +649,7 @@ class Element(Plugin):
            vbasedir = sandbox.get_virtual_directory()
            vstagedir = vbasedir \
                if path is None \
                else vbasedir.descend(path.lstrip(os.sep).split(os.sep))
                else vbasedir.descend(path.lstrip(os.sep).split(os.sep), create=True)

            files = list(self.__compute_splits(include, exclude, orphans))

@@ -650,7 +667,8 @@ class Element(Plugin):
        return link_result.combine(copy_result)

    def stage_dependency_artifacts(self, sandbox, scope, *, path=None,
                                   include=None, exclude=None, orphans=True):
                                   include=None, exclude=None, orphans=True,
                                   build=True):
        """Stage element dependencies in scope

        This is primarily a convenience wrapper around
@@ -680,7 +698,14 @@ class Element(Plugin):
        if self.__can_build_incrementally() and workspace.last_successful:
            old_dep_keys = self.__get_artifact_metadata_dependencies(workspace.last_successful)

        for dep in self.dependencies(scope):
        def deps():
            if build:
                yield from self.dependencies(scope, with_sysroot=True)
            else:
                for dep in self.dependencies(scope, with_sysroot=False):
                    yield '/', dep

        for sysroot, dep in deps():
            # If we are workspaced, and we therefore perform an
            # incremental build, we must ensure that we update the mtimes
            # of any files created by our dependencies since the last
@@ -705,8 +730,13 @@ class Element(Plugin):
                    if utils._is_main_process():
                        self._get_context().get_workspaces().save_config()

            if build:
                sub_path = os.path.join(path, os.path.relpath(sysroot, '/')) if path else sysroot
            else:
                sub_path = path

            result = dep.stage_artifact(sandbox,
                                        path=path,
                                        path=sub_path,
                                        include=include,
                                        exclude=exclude,
                                        orphans=orphans,
@@ -906,9 +936,9 @@ class Element(Plugin):
        for meta_dep in meta.dependencies:
            dependency = Element._new_from_meta(meta_dep, artifacts)
            element.__runtime_dependencies.append(dependency)
        for meta_dep in meta.build_dependencies:
        for sysroot, meta_dep in meta.build_dependencies:
            dependency = Element._new_from_meta(meta_dep, artifacts)
            element.__build_dependencies.append(dependency)
            element.__build_dependencies.append((sysroot, dependency))

        return element

@@ -1088,14 +1118,11 @@ class Element(Plugin):
            # Weak cache key includes names of direct build dependencies
            # but does not include keys of dependencies.
            if self.BST_STRICT_REBUILD:
                dependencies = [
                    e._get_cache_key(strength=_KeyStrength.WEAK)
                    for e in self.dependencies(Scope.BUILD)
                ]
                dependencies = [(sysroot, e._get_cache_key(strength=_KeyStrength.WEAK))
                                for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
            else:
                dependencies = [
                    e.name for e in self.dependencies(Scope.BUILD, recurse=False)
                ]
                dependencies = [(sysroot, e.name)
                                for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]

            self.__weak_cache_key = self.__calculate_cache_key(dependencies)

@@ -1123,9 +1150,8 @@ class Element(Plugin):
                    return

        if self.__strict_cache_key is None:
            dependencies = [
                e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
            ]
            dependencies = [(sysroot, e.__strict_cache_key)
                            for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
            self.__strict_cache_key = self.__calculate_cache_key(dependencies)

            if self.__strict_cache_key is None:
@@ -1165,10 +1191,8 @@ class Element(Plugin):
                strong_key, _ = self.__get_artifact_metadata_keys()
                self.__cache_key = strong_key
            elif self.__assemble_scheduled or self.__assemble_done:
                # Artifact will or has been built, not downloaded
                dependencies = [
                    e._get_cache_key() for e in self.dependencies(Scope.BUILD)
                ]
                dependencies = [(sysroot, e._get_cache_key())
                                for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
                self.__cache_key = self.__calculate_cache_key(dependencies)

            if self.__cache_key is None:
@@ -1331,7 +1355,7 @@ class Element(Plugin):
                    # Stage deps in the sandbox root
                    if deps == 'run':
                        with self.timed_activity("Staging dependencies", silent_nested=True):
                            self.stage_dependency_artifacts(sandbox, scope)
                            self.stage_dependency_artifacts(sandbox, scope, build=False)

                        # Run any integration commands provided by the dependencies
                        # once they are all staged and ready
@@ -2024,8 +2048,11 @@ class Element(Plugin):
    #
    def __calculate_cache_key(self, dependencies):
        # No cache keys for dependencies which have no cache keys
        if None in dependencies:
        for dep in dependencies:
            if dep[1] is None:
                return None
        # Do not break cache keys
        dependencies = [(sysroot, key) if sysroot != '/' else key for sysroot, key in dependencies]

        # Generate dict that is used as base for all cache keys
        if self.__cache_key_dict is None:
+4 −3
Original line number Diff line number Diff line
@@ -115,14 +115,15 @@ class ComposeElement(Element):
        # Run any integration commands provided by the dependencies
        # once they are all staged and ready
        if self.integration:
            with self.timed_activity("Integrating sandbox"):
            with self.timed_activity("Integrating sandbox (compose)"):
                if require_split:

                    # Make a snapshot of all the files before integration-commands are run.
                    snapshot = set(vbasedir.list_relative_paths())
                    vbasedir.mark_unmodified()

                for dep in self.dependencies(Scope.BUILD):
                for sysroot, dep in self.dependencies(Scope.BUILD, with_sysroot=True):
                    if sysroot == '/':
                        dep.integrate(sandbox)

                if require_split:
Loading