Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (18)
Showing
with 1203 additions and 129 deletions
......@@ -17,6 +17,13 @@ buildstream 1.3.1
to avoid having to specify the dependency type for every entry in
'depends'.
o Source plugins may now request access access to previous during track and
fetch by setting `BST_REQUIRES_PREVIOUS_SOURCES_TRACK` and/or
`BST_REQUIRES_PREVIOUS_SOURCES_FETCH` attributes.
o Add new `pip` source plugin for downloading python packages using pip,
based on requirements files from previous sources.
=================
buildstream 1.1.5
......
......@@ -30,6 +30,7 @@ if "_BST_COMPLETION" not in os.environ:
from .sandbox import Sandbox, SandboxFlags
from .plugin import Plugin
from .source import Source, SourceError, Consistency, SourceFetcher
from .element import Element, ElementError, Scope
from .element import Element, ElementError
from .element_enums import Scope
from .buildelement import BuildElement
from .scriptelement import ScriptElement
......@@ -21,7 +21,7 @@ import os
import string
from collections import Mapping, namedtuple
from ..element import _KeyStrength
from ..element_enums import _KeyStrength
from .._exceptions import ArtifactError, ImplError, LoadError, LoadErrorReason
from .._message import Message, MessageType
from .. import utils
......
......@@ -522,14 +522,15 @@ class Loader():
element = Element._new_from_meta(meta_element, platform.artifactcache)
element._preflight()
for source in element.sources():
sources = list(element.sources())
for idx, source in enumerate(sources):
# Handle the case where a subproject needs to be fetched
#
if source.get_consistency() == Consistency.RESOLVED:
if fetch_subprojects:
if ticker:
ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
source._fetch()
source._fetch(sources[0:idx])
else:
detail = "Try fetching the project with `bst fetch {}`".format(filename)
raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,
......
......@@ -40,8 +40,10 @@ class FetchQueue(Queue):
self._skip_cached = skip_cached
def process(self, element):
previous_sources = []
for source in element.sources():
source._fetch()
source._fetch(previous_sources)
previous_sources.append(source)
def status(self, element):
# state of dependencies may have changed, recalculate element state
......
......@@ -23,7 +23,7 @@
# This version is bumped whenever enhancements are made
# to the `project.conf` format or the core element format.
#
BST_FORMAT_VERSION = 15
BST_FORMAT_VERSION = 16
# The base BuildStream artifact version
......
......@@ -78,7 +78,6 @@ import stat
import copy
from collections import Mapping, OrderedDict
from contextlib import contextmanager
from enum import Enum
import tempfile
import shutil
......@@ -98,41 +97,9 @@ from .plugin import CoreWarnings
from .sandbox._config import SandboxConfig
from .storage.directory import Directory
from .storage._filebaseddirectory import FileBasedDirectory, VirtualDirectoryError
# _KeyStrength():
#
# Strength of cache key
#
class _KeyStrength(Enum):
# Includes strong cache keys of all build dependencies and their
# runtime dependencies.
STRONG = 1
# Includes names of direct build dependencies but does not include
# cache keys of dependencies.
WEAK = 2
class Scope(Enum):
"""Types of scope for a given element"""
ALL = 1
"""All elements which the given element depends on, following
all elements required for building. Including the element itself.
"""
BUILD = 2
"""All elements required for building the element, including their
respective run dependencies. Not including the given element itself.
"""
RUN = 3
"""All elements required for running the element. Including the element
itself.
"""
from .storage._filebaseddirectory import FileBasedDirectory
from .storage.directory import VirtualDirectoryError
from .element_enums import _KeyStrength, Scope
class ElementError(BstError):
......@@ -227,6 +194,7 @@ class Element(Plugin):
self.__runtime_dependencies = [] # Direct runtime dependency Elements
self.__build_dependencies = [] # Direct build dependency Elements
self.__reverse_dependencies = [] # Direct reverse dependency Elements
self.__sources = [] # List of Sources
self.__weak_cache_key = None # Our cached weak cache key
self.__strict_cache_key = None # Our cached cache key for strict builds
......@@ -925,9 +893,11 @@ class Element(Plugin):
for meta_dep in meta.dependencies:
dependency = Element._new_from_meta(meta_dep, artifacts)
element.__runtime_dependencies.append(dependency)
dependency.__reverse_dependencies.append(element)
for meta_dep in meta.build_dependencies:
dependency = Element._new_from_meta(meta_dep, artifacts)
element.__build_dependencies.append(dependency)
dependency.__reverse_dependencies.append(element)
return element
......@@ -1262,6 +1232,12 @@ class Element(Plugin):
# Prepend provenance to the error
raise ElementError("{}: {}".format(self, e), reason=e.reason) from e
# Ensure that the first source does not need access to previous soruces
if self.__sources and self.__sources[0]._requires_previous_sources():
raise ElementError("{}: {} cannot be the first source of an element "
"as it requires access to previous sources"
.format(self, self.__sources[0]))
# Preflight the sources
for source in self.sources():
source._preflight()
......@@ -1305,9 +1281,9 @@ class Element(Plugin):
#
def _track(self):
refs = []
for source in self.__sources:
for index, source in enumerate(self.__sources):
old_ref = source.get_ref()
new_ref = source._track()
new_ref = source._track(self.__sources[0:index])
refs.append((source._get_unique_id(), new_ref))
# Complimentary warning that the new ref will be unused.
......@@ -1455,6 +1431,16 @@ class Element(Plugin):
self._update_state()
if workspace:
# We need to invalidate reverse dependencies
for reverse_dep in self.__get_reverse_dependencies():
reverse_dep.__cache_key_dict = None
reverse_dep.__cache_key = None
reverse_dep.__weak_cache_key = None
reverse_dep.__strict_cache_key = None
reverse_dep.__strong_cached = None
reverse_dep._update_state()
# _assemble_done():
#
# This is called in the main process after the element has been assembled
......@@ -1492,8 +1478,14 @@ class Element(Plugin):
# This does *not* cause a race condition, because
# _assemble_done is called before a cleanup job may be
# launched.
#
self.__artifacts.append_required_artifacts([self])
required_artifacts = [self]
# Reverse dependencies can now compute their keys
for reverse_dep in self.__get_reverse_dependencies():
reverse_dep._update_state()
required_artifacts.append(reverse_dep)
self.__artifacts.append_required_artifacts(required_artifacts)
# _assemble():
#
......@@ -2625,6 +2617,31 @@ class Element(Plugin):
return utils._deduplicate(keys)
# __get_reverse_dependencies():
#
# Iterates through the closure of reverse dependenices.
#
# Args:
# visited (set): The elements to skip (only for recursion)
# recursed (bool): Whether to emit the current element (only for recursion)
#
# Yields:
# (:class:`.Element`): The reverse dependent elements
def __get_reverse_dependencies(self, *, visited=None, recursed=False):
if visited is None:
visited = set()
full_name = self._get_full_name()
if full_name in visited:
return
if recursed:
yield self
for reverse_dep in self.__reverse_dependencies:
yield from reverse_dep.__get_reverse_dependencies(visited=visited, recursed=True)
def _overlap_error_detail(f, forbidden_overlap_elements, elements):
if forbidden_overlap_elements:
......
#
# Copyright (C) 2018 Bloomberg LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
# Jim MacArthur <jim.macarthur@codethink.co.uk>
"""
Element - Globally visible enumerations
=======================================
"""
from enum import Enum
# _KeyStrength():
#
# Strength of cache key
#
class _KeyStrength(Enum):
# Includes strong cache keys of all build dependencies and their
# runtime dependencies.
STRONG = 1
# Includes names of direct build dependencies but does not include
# cache keys of dependencies.
WEAK = 2
class Scope(Enum):
"""Types of scope for a given element"""
ALL = 1
"""All elements which the given element depends on, following
all elements required for building. Including the element itself.
"""
BUILD = 2
"""All elements required for building the element, including their
respective run dependencies. Not including the given element itself.
"""
RUN = 3
"""All elements required for running the element. Including the element
itself.
"""
......@@ -71,7 +71,7 @@ class OSTreeSource(Source):
self.ref = self.node_get_member(node, str, 'ref', None)
self.tracking = self.node_get_member(node, str, 'track', None)
self.mirror = os.path.join(self.get_mirror_directory(),
utils.url_directory_name(self.url))
utils.url_directory_name(self.original_url))
# (optional) Not all repos are signed. But if they are, get the gpg key
self.gpg_key_path = None
......@@ -104,10 +104,11 @@ class OSTreeSource(Source):
return None
self.ensure()
remote_name = self.ensure_remote(self.url)
with self.timed_activity("Fetching tracking ref '{}' from origin: {}"
.format(self.tracking, self.url)):
try:
_ostree.fetch(self.repo, ref=self.tracking, progress=self.progress)
_ostree.fetch(self.repo, remote=remote_name, ref=self.tracking, progress=self.progress)
except OSTreeError as e:
raise SourceError("{}: Failed to fetch tracking ref '{}' from origin {}\n\n{}"
.format(self, self.tracking, self.url, e)) from e
......@@ -116,11 +117,12 @@ class OSTreeSource(Source):
def fetch(self):
self.ensure()
remote_name = self.ensure_remote(self.url)
if not _ostree.exists(self.repo, self.ref):
with self.timed_activity("Fetching remote ref: {} from origin: {}"
.format(self.ref, self.url)):
try:
_ostree.fetch(self.repo, ref=self.ref, progress=self.progress)
_ostree.fetch(self.repo, remote=remote_name, ref=self.ref, progress=self.progress)
except OSTreeError as e:
raise SourceError("{}: Failed to fetch ref '{}' from origin: {}\n\n{}"
.format(self, self.ref, self.url, e)) from e
......@@ -171,14 +173,22 @@ class OSTreeSource(Source):
self.status("Creating local mirror for {}".format(self.url))
self.repo = _ostree.ensure(self.mirror, True)
gpg_key = None
if self.gpg_key_path:
gpg_key = 'file://' + self.gpg_key_path
try:
_ostree.configure_remote(self.repo, "origin", self.url, key_url=gpg_key)
except OSTreeError as e:
raise SourceError("{}: Failed to configure origin {}\n\n{}".format(self, self.url, e)) from e
def ensure_remote(self, url):
if self.original_url == self.url:
remote_name = 'origin'
else:
remote_name = utils.url_directory_name(url)
gpg_key = None
if self.gpg_key_path:
gpg_key = 'file://' + self.gpg_key_path
try:
_ostree.configure_remote(self.repo, remote_name, url, key_url=gpg_key)
except OSTreeError as e:
raise SourceError("{}: Failed to configure origin {}\n\n{}".format(self, self.url, e)) from e
return remote_name
def progress(self, percent, message):
self.status(message)
......
#
# Copyright 2018 Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Chandan Singh <csingh43@bloomberg.net>
"""
pip - stage python packages using pip
=====================================
**Host depndencies:**
* ``pip`` python module
This plugin will download source distributions for specified packages using
``pip`` but will not install them. It is expected that the elements using this
source will install the downloaded packages.
Downloaded tarballs will be stored in a directory called ".bst_pip_downloads".
**Usage:**
.. code:: yaml
# Specify the pip source kind
kind: pip
# Optionally specify index url, defaults to PyPi
# This url is used to discover new versions of packages and download them
# Projects intending to mirror their sources to a permanent location should
# use an aliased url, and declare the alias in the project configuration
url: https://mypypi.example.com/simple
# Optionally specify the path to requirements files
# Note that either 'requirements-files' or 'packages' must be defined
requirements-files:
- requirements.txt
# Optionally specify a list of additional packages
# Note that either 'requirements-files' or 'packages' must be defined
packages:
- flake8
# Optionally specify a relative staging directory
directory: path/to/stage
# Specify the ref. It is a list of strings of format
# "<package-name>==<version>", separated by "\\n".
# Usually this will be contents of a requirements.txt file where all
# package versions have been frozen.
ref: "flake8==3.5.0\\nmccabe==0.6.1\\npkg-resources==0.0.0\\npycodestyle==2.3.1\\npyflakes==1.6.0"
.. note::
The ``pip`` plugin is available since :ref:`format version 16 <project_format_version>`
"""
import errno
import hashlib
import os
import re
from buildstream import Consistency, Source, SourceError, utils
_OUTPUT_DIRNAME = '.bst_pip_downloads'
_PYPI_INDEX_URL = 'https://pypi.org/simple/'
# Used only for finding pip command
_PYTHON_VERSIONS = [
'python2.7',
'python3.0',
'python3.1',
'python3.2',
'python3.3',
'python3.4',
'python3.5',
'python3.6',
'python3.7',
]
# List of allowed extensions taken from
# https://docs.python.org/3/distutils/sourcedist.html.
# Names of source distribution archives must be of the form
# '%{package-name}-%{version}.%{extension}'.
_SDIST_RE = re.compile(
r'^([a-zA-Z0-9]+?)-(.+).(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$',
re.IGNORECASE)
class PipSource(Source):
# pylint: disable=attribute-defined-outside-init
# We need access to previous sources at track time to use requirements.txt
# but not at fetch time as self.ref should contain sufficient information
# for this plugin
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = True
def configure(self, node):
self.node_validate(node, ['url', 'packages', 'ref', 'requirements-files'] +
Source.COMMON_CONFIG_KEYS)
self.ref = self.node_get_member(node, str, 'ref', None)
self.original_url = self.node_get_member(node, str, 'url', _PYPI_INDEX_URL)
self.index_url = self.translate_url(self.original_url)
self.packages = self.node_get_member(node, list, 'packages', [])
self.requirements_files = self.node_get_member(node, list, 'requirements-files', [])
if not (self.packages or self.requirements_files):
raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified". format(self))
def preflight(self):
# Try to find a pip version that supports download command
self.host_pip = None
for python in reversed(_PYTHON_VERSIONS):
try:
host_python = utils.get_host_tool(python)
rc = self.call([host_python, '-m', 'pip', 'download', '--help'])
if rc == 0:
self.host_pip = [host_python, '-m', 'pip']
break
except utils.ProgramNotFoundError:
pass
if self.host_pip is None:
raise SourceError("{}: Unable to find a suitable pip command".format(self))
def get_unique_key(self):
return [self.original_url, self.ref]
def get_consistency(self):
if not self.ref:
return Consistency.INCONSISTENT
if os.path.exists(self._mirror) and os.listdir(self._mirror):
return Consistency.CACHED
return Consistency.RESOLVED
def get_ref(self):
return self.ref
def load_ref(self, node):
self.ref = self.node_get_member(node, str, 'ref', None)
def set_ref(self, ref, node):
node['ref'] = self.ref = ref
def track(self, previous_sources_dir):
# XXX pip does not offer any public API other than the CLI tool so it
# is not feasible to correctly parse the requirements file or to check
# which package versions pip is going to install.
# See https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program
# for details.
# As a result, we have to wastefully install the packages during track.
with self.tempdir() as tmpdir:
install_args = self.host_pip + ['download',
'--no-binary', ':all:',
'--index-url', self.index_url,
'--dest', tmpdir]
for requirement_file in self.requirements_files:
fpath = os.path.join(previous_sources_dir, requirement_file)
install_args += ['-r', fpath]
install_args += self.packages
self.call(install_args, fail="Failed to install python packages")
reqs = self._parse_sdist_names(tmpdir)
return '\n'.join(["{}=={}".format(pkg, ver) for pkg, ver in reqs])
def fetch(self):
with self.tempdir() as tmpdir:
packages = self.ref.strip().split('\n')
package_dir = os.path.join(tmpdir, 'packages')
os.makedirs(package_dir)
self.call(self.host_pip + ['download',
'--no-binary', ':all:',
'--index-url', self.index_url,
'--dest', package_dir] + packages,
fail="Failed to install python packages: {}".format(packages))
# If the mirror directory already exists, assume that some other
# process has fetched the sources before us and ensure that we do
# not raise an error in that case.
try:
os.makedirs(self._mirror)
os.rename(package_dir, self._mirror)
except FileExistsError:
return
except OSError as e:
if e.errno != errno.ENOTEMPTY:
raise
def stage(self, directory):
with self.timed_activity("Staging Python packages", silent_nested=True):
utils.copy_files(self._mirror, os.path.join(directory, _OUTPUT_DIRNAME))
# Directory where this source should stage its files
#
@property
def _mirror(self):
if not self.ref:
return None
return os.path.join(self.get_mirror_directory(),
utils.url_directory_name(self.original_url),
hashlib.sha256(self.ref.encode()).hexdigest())
# Parse names of downloaded source distributions
#
# Args:
# basedir (str): Directory containing source distribution archives
#
# Returns:
# (list): List of (package_name, version) tuples in sorted order
#
def _parse_sdist_names(self, basedir):
reqs = []
for f in os.listdir(basedir):
pkg_match = _SDIST_RE.match(f)
if pkg_match:
reqs.append(pkg_match.groups())
return sorted(reqs)
def setup():
return PipSource
......@@ -32,8 +32,10 @@ from .._fuse import SafeHardlinks
class Mount():
def __init__(self, sandbox, mount_point, safe_hardlinks):
scratch_directory = sandbox._get_scratch_directory()
# Getting external_directory here is acceptable as we're part of the sandbox code.
root_directory = sandbox.get_virtual_directory().external_directory
# Getting _get_underlying_directory() here is acceptable as
# we're part of the sandbox code. This will fail if our
# directory is CAS-based.
root_directory = sandbox.get_virtual_directory()._get_underlying_directory()
self.mount_point = mount_point
self.safe_hardlinks = safe_hardlinks
......
......@@ -58,7 +58,7 @@ class SandboxBwrap(Sandbox):
stdout, stderr = self._get_output()
# Allowable access to underlying storage as we're part of the sandbox
root_directory = self.get_virtual_directory().external_directory
root_directory = self.get_virtual_directory()._get_underlying_directory()
# Fallback to the sandbox default settings for
# the cwd and env.
......@@ -248,6 +248,7 @@ class SandboxBwrap(Sandbox):
# a bug, bwrap mounted a tempfs here and when it exits, that better be empty.
pass
self._vdir._mark_changed()
return exit_code
def run_bwrap(self, argv, stdin, stdout, stderr, interactive):
......
......@@ -106,6 +106,7 @@ class SandboxChroot(Sandbox):
status = self.chroot(rootfs, command, stdin, stdout,
stderr, cwd, env, flags)
self._vdir._mark_changed()
return status
# chroot()
......
......@@ -31,6 +31,7 @@ See also: :ref:`sandboxing`.
import os
from .._exceptions import ImplError, BstError
from ..storage._filebaseddirectory import FileBasedDirectory
from ..storage._casbaseddirectory import CasBasedDirectory
class SandboxFlags():
......@@ -105,6 +106,7 @@ class Sandbox():
self.__scratch = os.path.join(self.__directory, 'scratch')
for directory_ in [self._root, self.__scratch]:
os.makedirs(directory_, exist_ok=True)
self._vdir = None
def get_directory(self):
"""Fetches the sandbox root directory
......@@ -133,8 +135,14 @@ class Sandbox():
(str): The sandbox root directory
"""
# For now, just create a new Directory every time we're asked
return FileBasedDirectory(self._root)
if not self._vdir:
# BST_CAS_DIRECTORIES is a deliberately hidden environment variable which
# can be used to switch on CAS-based directories for testing.
if 'BST_CAS_DIRECTORIES' in os.environ:
self._vdir = CasBasedDirectory(self.__context, ref=None)
else:
self._vdir = FileBasedDirectory(self._root)
return self._vdir
def set_environment(self, environment):
"""Sets the environment variables for the sandbox
......
......@@ -76,6 +76,39 @@ these methods are mandatory to implement.
:ref:`SourceFetcher <core_source_fetcher>`.
Accessing previous sources
--------------------------
*Since: 1.4*
In the general case, all sources are fetched and tracked independently of one
another. In situations where a source needs to access previous source(s) in
order to perform its own track and/or fetch, following attributes can be set to
request access to previous sources:
* :attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_TRACK`
Indicate that access to previous sources is required during track
* :attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_FETCH`
Indicate that access to previous sources is required during fetch
The intended use of such plugins is to fetch external dependencies of other
sources, typically using some kind of package manager, such that all the
dependencies of the original source(s) are available at build time.
When implementing such a plugin, implementors should adhere to the following
guidelines:
* Implementations must be able to store the obtained artifacts in a
subdirectory.
* Implementations must be able to deterministically generate a unique ref, such
that two refs are different if and only if they produce different outputs.
* Implementations must not introduce host contamination.
.. _core_source_fetcher:
SourceFetcher - Object for fetching individual URLs
......@@ -92,6 +125,8 @@ mentioned, these methods are mandatory to implement.
Fetches the URL associated with this SourceFetcher, optionally taking an
alias override.
Class Reference
---------------
"""
import os
......@@ -156,7 +191,7 @@ class SourceFetcher():
#############################################################
# Abstract Methods #
#############################################################
def fetch(self, alias_override=None):
def fetch(self, alias_override=None, **kwargs):
"""Fetch remote sources and mirror them locally, ensuring at least
that the specific reference is cached locally.
......@@ -209,6 +244,32 @@ class Source(Plugin):
__defaults = {} # The defaults from the project
__defaults_set = False # Flag, in case there are not defaults at all
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = False
"""Whether access to previous sources is required during track
When set to True:
* all sources listed before this source in the given element will be
fetched before this source is tracked
* Source.track() will be called with an additional keyword argument
`previous_sources_dir` where previous sources will be staged
* this source can not be the first source for an element
*Since: 1.4*
"""
BST_REQUIRES_PREVIOUS_SOURCES_FETCH = False
"""Whether access to previous sources is required during fetch
When set to True:
* all sources listed before this source in the given element will be
fetched before this source is fetched
* Source.fetch() will be called with an additional keyword argument
`previous_sources_dir` where previous sources will be staged
* this source can not be the first source for an element
*Since: 1.4*
"""
def __init__(self, context, project, meta, *, alias_override=None):
provenance = _yaml.node_get_provenance(meta.config)
super().__init__("{}-{}".format(meta.element_name, meta.element_index),
......@@ -305,9 +366,15 @@ class Source(Plugin):
"""
raise ImplError("Source plugin '{}' does not implement set_ref()".format(self.get_kind()))
def track(self):
def track(self, **kwargs):
"""Resolve a new ref from the plugin's track option
Args:
previous_sources_dir (str): directory where previous sources are staged.
Note that this keyword argument is available only when
:attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_TRACK`
is set to True.
Returns:
(simple object): A new internal source reference, or None
......@@ -326,10 +393,16 @@ class Source(Plugin):
# Allow a non implementation
return None
def fetch(self):
def fetch(self, **kwargs):
"""Fetch remote sources and mirror them locally, ensuring at least
that the specific reference is cached locally.
Args:
previous_sources_dir (str): directory where previous sources are staged.
Note that this keyword argument is available only when
:attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_FETCH`
is set to True.
Raises:
:class:`.SourceError`
......@@ -519,50 +592,19 @@ class Source(Plugin):
# Wrapper function around plugin provided fetch method
#
def _fetch(self):
project = self._get_project()
source_fetchers = self.get_source_fetchers()
if source_fetchers:
for fetcher in source_fetchers:
alias = fetcher._get_alias()
success = False
for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
try:
fetcher.fetch(uri)
# FIXME: Need to consider temporary vs. permanent failures,
# and how this works with retries.
except BstError as e:
last_error = e
continue
success = True
break
if not success:
raise last_error
# Args:
# previous_sources (list): List of Sources listed prior to this source
#
def _fetch(self, previous_sources):
if self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH:
self.__ensure_previous_sources(previous_sources)
with self.tempdir() as staging_directory:
for src in previous_sources:
src._stage(staging_directory)
self.__do_fetch(previous_sources_dir=self.__ensure_directory(staging_directory))
else:
alias = self._get_alias()
if self.__first_pass:
mirrors = project.first_pass_config.mirrors
else:
mirrors = project.config.mirrors
if not mirrors or not alias:
self.fetch()
return
context = self._get_context()
source_kind = type(self)
for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
new_source = source_kind(context, project, self.__meta,
alias_override=(alias, uri))
new_source._preflight()
try:
new_source.fetch()
# FIXME: Need to consider temporary vs. permanent failures,
# and how this works with retries.
except BstError as e:
last_error = e
continue
return
raise last_error
self.__do_fetch()
# Wrapper for stage() api which gives the source
# plugin a fully constructed path considering the
......@@ -773,8 +815,19 @@ class Source(Plugin):
# Wrapper for track()
#
def _track(self):
new_ref = self.__do_track()
# Args:
# previous_sources (list): List of Sources listed prior to this source
#
def _track(self, previous_sources):
if self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK:
self.__ensure_previous_sources(previous_sources)
with self.tempdir() as staging_directory:
for src in previous_sources:
src._stage(staging_directory)
new_ref = self.__do_track(previous_sources_dir=self.__ensure_directory(staging_directory))
else:
new_ref = self.__do_track()
current_ref = self.get_ref()
if new_ref is None:
......@@ -786,6 +839,17 @@ class Source(Plugin):
return new_ref
# _requires_previous_sources()
#
# If a plugin requires access to previous sources at track or fetch time,
# then it cannot be the first source of an elemenet.
#
# Returns:
# (bool): Whether this source requires access to previous sources
#
def _requires_previous_sources(self):
return self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK or self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH
# Returns the alias if it's defined in the project
def _get_alias(self):
alias = self.__expected_alias
......@@ -801,8 +865,54 @@ class Source(Plugin):
# Local Private Methods #
#############################################################
# Tries to call fetch for every mirror, stopping once it succeeds
def __do_fetch(self, **kwargs):
project = self._get_project()
source_fetchers = self.get_source_fetchers()
if source_fetchers:
for fetcher in source_fetchers:
alias = fetcher._get_alias()
success = False
for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
try:
fetcher.fetch(uri)
# FIXME: Need to consider temporary vs. permanent failures,
# and how this works with retries.
except BstError as e:
last_error = e
continue
success = True
break
if not success:
raise last_error
else:
alias = self._get_alias()
if self.__first_pass:
mirrors = project.first_pass_config.mirrors
else:
mirrors = project.config.mirrors
if not mirrors or not alias:
self.fetch(**kwargs)
return
context = self._get_context()
source_kind = type(self)
for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
new_source = source_kind(context, project, self.__meta,
alias_override=(alias, uri))
new_source._preflight()
try:
new_source.fetch(**kwargs)
# FIXME: Need to consider temporary vs. permanent failures,
# and how this works with retries.
except BstError as e:
last_error = e
continue
return
raise last_error
# Tries to call track for every mirror, stopping once it succeeds
def __do_track(self):
def __do_track(self, **kwargs):
project = self._get_project()
# If there are no mirrors, or no aliases to replace, there's nothing to do here.
alias = self._get_alias()
......@@ -811,7 +921,7 @@ class Source(Plugin):
else:
mirrors = project.config.mirrors
if not mirrors or not alias:
return self.track()
return self.track(**kwargs)
context = self._get_context()
source_kind = type(self)
......@@ -823,7 +933,7 @@ class Source(Plugin):
alias_override=(alias, uri))
new_source._preflight()
try:
ref = new_source.track()
ref = new_source.track(**kwargs)
# FIXME: Need to consider temporary vs. permanent failures,
# and how this works with retries.
except BstError as e:
......@@ -867,3 +977,14 @@ class Source(Plugin):
_yaml.node_final_assertions(config)
return config
# Ensures that previous sources have been tracked and fetched.
#
def __ensure_previous_sources(self, previous_sources):
for index, src in enumerate(previous_sources):
# BuildStream should track sources in the order they appear so
# previous sources should never be in an inconsistent state
assert src.get_consistency() != Consistency.INCONSISTENT
if src.get_consistency() == Consistency.RESOLVED:
src._fetch(previous_sources[0:index])
This diff is collapsed.
......@@ -29,25 +29,12 @@ See also: :ref:`sandboxing`.
import os
import time
from .._exceptions import BstError, ErrorDomain
from .directory import Directory
from .directory import Directory, VirtualDirectoryError
from ..utils import link_files, copy_files, list_relative_paths, _get_link_mtime, _magic_timestamp
from ..utils import _set_deterministic_user, _set_deterministic_mtime
class VirtualDirectoryError(BstError):
"""Raised by Directory functions when system calls fail.
This will be handled internally by the BuildStream core,
if you need to handle this error, then it should be reraised,
or either of the :class:`.ElementError` or :class:`.SourceError`
exceptions should be raised from this error.
"""
def __init__(self, message, reason=None):
super().__init__(message, domain=ErrorDomain.VIRTUAL_FS, reason=reason)
# FileBasedDirectory intentionally doesn't call its superclass constuctor,
# which is mean to be unimplemented.
# which is meant to be unimplemented.
# pylint: disable=super-init-not-called
......@@ -108,7 +95,8 @@ class FileBasedDirectory(Directory):
if create:
new_path = os.path.join(self.external_directory, subdirectory_spec[0])
os.makedirs(new_path, exist_ok=True)
return FileBasedDirectory(new_path).descend(subdirectory_spec[1:], create)
self.index[subdirectory_spec[0]] = FileBasedDirectory(new_path).descend(subdirectory_spec[1:], create)
return self.index[subdirectory_spec[0]]
else:
error = "No entry called '{}' found in the directory rooted at {}"
raise VirtualDirectoryError(error.format(subdirectory_spec[0], self.external_directory))
......@@ -134,8 +122,12 @@ class FileBasedDirectory(Directory):
for f in import_result.files_written:
os.utime(os.path.join(self.external_directory, f), times=(cur_time, cur_time))
self._mark_changed()
return import_result
def _mark_changed(self):
self._directory_read = False
def set_deterministic_mtime(self):
_set_deterministic_mtime(self.external_directory)
......@@ -214,3 +206,8 @@ class FileBasedDirectory(Directory):
# which exposes the sandbox directory; we will have to assume for the time being
# that people will not abuse __str__.
return self.external_directory
def _get_underlying_directory(self) -> str:
""" Returns the underlying (real) file system directory this
object refers to. """
return self.external_directory
......@@ -31,6 +31,19 @@ See also: :ref:`sandboxing`.
"""
from .._exceptions import BstError, ErrorDomain
class VirtualDirectoryError(BstError):
"""Raised by Directory functions when system calls fail.
This will be handled internally by the BuildStream core,
if you need to handle this error, then it should be reraised,
or either of the :class:`.ElementError` or :class:`.SourceError`
exceptions should be raised from this error.
"""
def __init__(self, message, reason=None):
super().__init__(message, domain=ErrorDomain.VIRTUAL_FS, reason=reason)
class Directory():
def __init__(self, external_directory=None):
......@@ -153,3 +166,13 @@ class Directory():
"""
raise NotImplementedError()
def _mark_changed(self):
"""Internal function to mark this directory as having been changed
outside this API. This normally can only happen by calling the
Sandbox's `run` method. This does *not* mark everything as modified
(i.e. list_modified_paths will not necessarily return the same results
as list_relative_paths after calling this.)
"""
raise NotImplementedError()
......@@ -58,6 +58,7 @@ Sources
sources/ostree
sources/patch
sources/deb
sources/pip
External plugins
......
#
# Copyright (C) 2018 Codethink Limited
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors: Tristan Maat <tristan.maat@codethink.co.uk>
#
import os
import pytest
......@@ -5,7 +24,7 @@ import pytest
from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
from tests.testutils import cli, create_element_size
from tests.testutils import cli, create_element_size, wait_for_cache_granularity
DATA_DIR = os.path.join(
......@@ -108,6 +127,8 @@ def test_expiry_order(cli, datafiles, tmpdir):
res = cli.run(project=project, args=['build', 'target2.bst'])
res.assert_success()
wait_for_cache_granularity()
# Now extract dep.bst
res = cli.run(project=project, args=['checkout', 'dep.bst', checkout])
res.assert_success()
......