Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (2)
Showing
with 549 additions and 210 deletions
......@@ -17,17 +17,16 @@
# Authors:
# Tristan Maat <tristan.maat@codethink.co.uk>
import multiprocessing
import os
from collections.abc import Mapping
from ._basecache import BaseCache
from .types import _KeyStrength
from ._exceptions import ArtifactError, CASError
from ._message import Message, MessageType
from ._message import MessageType
from . import utils
from . import _yaml
from ._cas import CASRemote, CASRemoteSpec
from ._cas import CASRemoteSpec
from .storage._casbaseddirectory import CasBasedDirectory
......@@ -51,93 +50,18 @@ class ArtifactCacheSpec(CASRemoteSpec):
# Args:
# context (Context): The BuildStream context
#
class ArtifactCache():
def __init__(self, context):
self.context = context
self.extractdir = context.extractdir
class ArtifactCache(BaseCache):
self.cas = context.get_cascache()
self.casquota = context.get_casquota()
self.casquota._calculate_cache_quota()
spec_class = ArtifactCacheSpec
spec_name = "artifact_cache_specs"
spec_error = ArtifactError
config_node_name = "artifacts"
self.global_remote_specs = []
self.project_remote_specs = {}
def __init__(self, context):
super().__init__(context)
self._required_elements = set() # The elements required for this session
self._remotes_setup = False # Check to prevent double-setup of remotes
# Per-project list of _CASRemote instances.
self._remotes = {}
self._has_fetch_remotes = False
self._has_push_remotes = False
os.makedirs(self.extractdir, exist_ok=True)
# setup_remotes():
#
# Sets up which remotes to use
#
# Args:
# use_config (bool): Whether to use project configuration
# remote_url (str): Remote artifact cache URL
#
# This requires that all of the projects which are to be processed in the session
# have already been loaded and are observable in the Context.
#
def setup_remotes(self, *, use_config=False, remote_url=None):
# Ensure we do not double-initialise since this can be expensive
assert not self._remotes_setup
self._remotes_setup = True
# Initialize remote artifact caches. We allow the commandline to override
# the user config in some cases (for example `bst artifact push --remote=...`).
has_remote_caches = False
if remote_url:
self._set_remotes([ArtifactCacheSpec(remote_url, push=True)])
has_remote_caches = True
if use_config:
for project in self.context.get_projects():
artifact_caches = _configured_remote_artifact_cache_specs(self.context, project)
if artifact_caches: # artifact_caches is a list of ArtifactCacheSpec instances
self._set_remotes(artifact_caches, project=project)
has_remote_caches = True
if has_remote_caches:
self._initialize_remotes()
# specs_from_config_node()
#
# Parses the configuration of remote artifact caches from a config block.
#
# Args:
# config_node (dict): The config block, which may contain the 'artifacts' key
# basedir (str): The base directory for relative paths
#
# Returns:
# A list of ArtifactCacheSpec instances.
#
# Raises:
# LoadError, if the config block contains invalid keys.
#
@staticmethod
def specs_from_config_node(config_node, basedir=None):
cache_specs = []
artifacts = config_node.get('artifacts', [])
if isinstance(artifacts, Mapping):
cache_specs.append(ArtifactCacheSpec._new_from_config_node(artifacts, basedir))
elif isinstance(artifacts, list):
for spec_node in artifacts:
cache_specs.append(ArtifactCacheSpec._new_from_config_node(spec_node, basedir))
else:
provenance = _yaml.node_get_provenance(config_node, key='artifacts')
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
"%s: 'artifacts' must be a single 'url:' mapping, or a list of mappings" %
(str(provenance)))
return cache_specs
# mark_required_elements():
#
# Mark elements whose artifacts are required for the current run.
......@@ -314,56 +238,6 @@ class ArtifactCache():
def preflight(self):
self.cas.preflight()
# initialize_remotes():
#
# This will contact each remote cache.
#
# Args:
# on_failure (callable): Called if we fail to contact one of the caches.
#
def initialize_remotes(self, *, on_failure=None):
remote_specs = list(self.global_remote_specs)
for project in self.project_remote_specs:
remote_specs += self.project_remote_specs[project]
remote_specs = list(utils._deduplicate(remote_specs))
remotes = {}
q = multiprocessing.Queue()
for remote_spec in remote_specs:
error = CASRemote.check_remote(remote_spec, q)
if error and on_failure:
on_failure(remote_spec.url, error)
elif error:
raise ArtifactError(error)
else:
self._has_fetch_remotes = True
if remote_spec.push:
self._has_push_remotes = True
remotes[remote_spec.url] = CASRemote(remote_spec)
for project in self.context.get_projects():
remote_specs = self.global_remote_specs
if project in self.project_remote_specs:
remote_specs = list(utils._deduplicate(remote_specs + self.project_remote_specs[project]))
project_remotes = []
for remote_spec in remote_specs:
# Errors are already handled in the loop above,
# skip unreachable remotes here.
if remote_spec.url not in remotes:
continue
remote = remotes[remote_spec.url]
project_remotes.append(remote)
self._remotes[project] = project_remotes
# contains():
#
# Check whether the artifact for the specified Element is already available
......@@ -726,61 +600,3 @@ class ArtifactCache():
cache_id = self.cas.resolve_ref(ref, update_mtime=True)
vdir = CasBasedDirectory(self.cas, cache_id).descend(descend)
return vdir
################################################
# Local Private Methods #
################################################
# _message()
#
# Local message propagator
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
self.context.message(
Message(None, message_type, message, **args))
# _set_remotes():
#
# Set the list of remote caches. If project is None, the global list of
# remote caches will be set, which is used by all projects. If a project is
# specified, the per-project list of remote caches will be set.
#
# Args:
# remote_specs (list): List of ArtifactCacheSpec instances, in priority order.
# project (Project): The Project instance for project-specific remotes
def _set_remotes(self, remote_specs, *, project=None):
if project is None:
# global remotes
self.global_remote_specs = remote_specs
else:
self.project_remote_specs[project] = remote_specs
# _initialize_remotes()
#
# An internal wrapper which calls the abstract method and
# reports takes care of messaging
#
def _initialize_remotes(self):
def remote_failed(url, error):
self._message(MessageType.WARN, "Failed to initialize remote {}: {}".format(url, error))
with self.context.timed_activity("Initializing remote caches", silent_nested=True):
self.initialize_remotes(on_failure=remote_failed)
# _configured_remote_artifact_cache_specs():
#
# Return the list of configured artifact remotes for a given project, in priority
# order. This takes into account the user and project configuration.
#
# Args:
# context (Context): The BuildStream context
# project (Project): The BuildStream project
#
# Returns:
# A list of ArtifactCacheSpec instances describing the remote artifact caches.
#
def _configured_remote_artifact_cache_specs(context, project):
return list(utils._deduplicate(
project.artifact_cache_specs + context.artifact_cache_specs))
# Copyright (C) 2019 Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Raoul Hidalgo Charman <raoul.hidalgocharman@codethink.co.uk>
#
from collections.abc import Mapping
import multiprocessing
import os
from . import utils
from . import _yaml
from ._cas import CASRemote
from ._message import Message, MessageType
# Base Cache for Caches to derive from
#
class BaseCache():
# None of these should ever be called in the base class, but this appeases
# pylint
spec_class = None
spec_name = None
spec_error = None
config_node_name = None
def __init__(self, context):
self.context = context
self.extractdir = context.extractdir
self.cas = context.get_cascache()
self.casquota = context.get_casquota()
self.casquota._calculate_cache_quota()
os.makedirs(self.extractdir, exist_ok=True)
self._remotes_setup = False # Check to prevent double-setup of remotes
# Per-project list of _CASRemote instances.
self._remotes = {}
self.global_remote_specs = []
self.project_remote_specs = {}
self._has_fetch_remotes = False
self._has_push_remotes = False
# specs_from_config_node()
#
# Parses the configuration of remote artifact caches from a config block.
#
# Args:
# config_node (dict): The config block, which may contain the 'artifacts' key
# basedir (str): The base directory for relative paths
#
# Returns:
# A list of ArtifactCacheSpec instances.
#
# Raises:
# LoadError, if the config block contains invalid keys.
#
@classmethod
def specs_from_config_node(cls, config_node, basedir=None):
cache_specs = []
artifacts = config_node.get(cls.config_node_name, [])
if isinstance(artifacts, Mapping):
cache_specs.append(cls.spec_class._new_from_config_node(artifacts, basedir))
elif isinstance(artifacts, list):
for spec_node in artifacts:
cache_specs.append(cls.spec_class._new_from_config_node(spec_node, basedir))
else:
provenance = _yaml.node_get_provenance(config_node, key='artifacts')
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
"%s: 'artifacts' must be a single 'url:' mapping, or a list of mappings" %
(str(provenance)))
return cache_specs
# _configured_remote_cache_specs():
#
# Return the list of configured remotes for a given project, in priority
# order. This takes into account the user and project configuration.
#
# Args:
# context (Context): The BuildStream context
# project (Project): The BuildStream project
#
# Returns:
# A list of ArtifactCacheSpec instances describing the remote artifact caches.
#
@classmethod
def _configured_remote_cache_specs(cls, context, project):
project_overrides = context.get_overrides(project.name)
project_extra_specs = cls.specs_from_config_node(project_overrides)
project_specs = getattr(project, cls.spec_name)
context_specs = getattr(context, cls.spec_name)
return list(utils._deduplicate(
project_extra_specs + project_specs + context_specs))
# setup_remotes():
#
# Sets up which remotes to use
#
# Args:
# use_config (bool): Whether to use project configuration
# remote_url (str): Remote cache URL
#
# This requires that all of the projects which are to be processed in the session
# have already been loaded and are observable in the Context.
#
def setup_remotes(self, *, use_config=False, remote_url=None):
# Ensure we do not double-initialise since this can be expensive
assert not self._remotes_setup
self._remotes_setup = True
# Initialize remote caches. We allow the commandline to override
# the user config in some cases (for example `bst artifact push --remote=...`).
has_remote_caches = False
if remote_url:
self._set_remotes([self.__class__.spec_class(remote_url, push=True)])
has_remote_caches = True
if use_config:
for project in self.context.get_projects():
caches = self._configured_remote_cache_specs(self.context, project)
if caches: # caches is a list of spec_class instances
self._set_remotes(caches, project=project)
has_remote_caches = True
if has_remote_caches:
self._initialize_remotes()
# initialize_remotes():
#
# This will contact each remote cache.
#
# Args:
# on_failure (callable): Called if we fail to contact one of the caches.
#
def initialize_remotes(self, *, on_failure=None):
remote_specs = self.global_remote_specs
for project in self.project_remote_specs:
remote_specs += self.project_remote_specs[project]
remote_specs = list(utils._deduplicate(remote_specs))
remotes = {}
q = multiprocessing.Queue()
for remote_spec in remote_specs:
error = CASRemote.check_remote(remote_spec, q)
if error and on_failure:
on_failure(remote_spec.url, error)
elif error:
raise self.__class__.spec_error(error)
else:
self._has_fetch_remotes = True
if remote_spec.push:
self._has_push_remotes = True
remotes[remote_spec.url] = CASRemote(remote_spec)
for project in self.context.get_projects():
remote_specs = self.global_remote_specs
if project in self.project_remote_specs:
remote_specs = list(utils._deduplicate(remote_specs + self.project_remote_specs[project]))
project_remotes = []
for remote_spec in remote_specs:
# Errors are already handled in the loop above,
# skip unreachable remotes here.
if remote_spec.url not in remotes:
continue
remote = remotes[remote_spec.url]
project_remotes.append(remote)
self._remotes[project] = project_remotes
################################################
# Local Private Methods #
################################################
# _message()
#
# Local message propagator
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
self.context.message(
Message(None, message_type, message, **args))
# _set_remotes():
#
# Set the list of remote caches. If project is None, the global list of
# remote caches will be set, which is used by all projects. If a project is
# specified, the per-project list of remote caches will be set.
#
# Args:
# remote_specs (list): List of ArtifactCacheSpec instances, in priority order.
# project (Project): The Project instance for project-specific remotes
def _set_remotes(self, remote_specs, *, project=None):
if project is None:
# global remotes
self.global_remote_specs = remote_specs
else:
self.project_remote_specs[project] = remote_specs
# _initialize_remotes()
#
# An internal wrapper which calls the abstract method and
# reports takes care of messaging
#
def _initialize_remotes(self):
def remote_failed(url, error):
self._message(MessageType.WARN, "Failed to initialize remote {}: {}".format(url, error))
with self.context.timed_activity("Initializing remote caches", silent_nested=True):
self.initialize_remotes(on_failure=remote_failed)
......@@ -150,15 +150,15 @@ class CASCache():
#
# Returns: path to extracted directory
#
def extract(self, ref, path, subdir=None):
def extract(self, ref, path, subdir=None, hash_dir=True):
tree = self.resolve_ref(ref, update_mtime=True)
originaldest = dest = os.path.join(path, tree.hash)
originaldest = dest = os.path.join(path, tree.hash) if hash_dir else str(path)
# If artifact is already extracted, check if the optional subdir
# has also been extracted. If the artifact has not been extracted
# a full extraction would include the optional subdir
if os.path.isdir(dest):
if os.path.isdir(dest) and hash_dir is True:
if subdir:
if not os.path.isdir(os.path.join(dest, subdir)):
dest = os.path.join(dest, subdir)
......
......@@ -31,6 +31,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
from ._message import Message, MessageType
from ._profile import Topics, profile_start, profile_end
from ._artifactcache import ArtifactCache
from ._sourcecache import SourceCache
from ._cas import CASCache, CASQuota, CASCacheUsage
from ._workspaces import Workspaces, WorkspaceProjectCache
from .plugin import _plugin_lookup
......@@ -64,6 +65,9 @@ class Context():
# The directory where various sources are stored
self.sourcedir = None
# specs for source cache remotes
self.source_cache_specs = None
# The directory where build sandboxes will be created
self.builddir = None
......@@ -148,6 +152,7 @@ class Context():
self._message_handler = None
self._message_depth = deque()
self._artifactcache = None
self._sourcecache = None
self._projects = []
self._project_overrides = {}
self._workspaces = None
......@@ -165,6 +170,7 @@ class Context():
# Args:
# config (filename): The user specified configuration file, if any
#
# Raises:
# LoadError
#
......@@ -249,6 +255,9 @@ class Context():
# Load artifact share configuration
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
# Load source cache config
self.source_cache_specs = SourceCache.specs_from_config_node(defaults)
self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
# Load pull build trees configuration
......@@ -330,6 +339,13 @@ class Context():
def get_cache_usage(self):
return CASCacheUsage(self.get_casquota())
@property
def sourcecache(self):
if not self._sourcecache:
self._sourcecache = SourceCache(self)
return self._sourcecache
# add_project():
#
# Add a project to the context.
......
......@@ -266,6 +266,15 @@ class SandboxError(BstError):
super().__init__(message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason)
# SourceCacheError
#
# Raised when errors are encountered in the source caches
#
class SourceCacheError(BstError):
def __init__(self, message, detail=None, reason=None):
super().__init__(message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason)
# ArtifactError
#
# Raised when errors are encountered in the artifact caches
......
......@@ -31,6 +31,7 @@ from ._profile import Topics, profile_start, profile_end
from ._exceptions import LoadError, LoadErrorReason
from ._options import OptionPool
from ._artifactcache import ArtifactCache
from ._sourcecache import SourceCache
from .sandbox import SandboxRemote
from ._elementfactory import ElementFactory
from ._sourcefactory import SourceFactory
......@@ -137,6 +138,7 @@ class Project():
self._shell_host_files = [] # A list of HostMount objects
self.artifact_cache_specs = None
self.source_cache_specs = None
self.remote_execution_specs = None
self._sandbox = None
self._splits = None
......@@ -236,7 +238,7 @@ class Project():
'artifacts', 'options',
'fail-on-overlap', 'shell', 'fatal-warnings',
'ref-storage', 'sandbox', 'mirrors', 'remote-execution',
'sources', '(@)'
'sources', 'source-caches', '(@)'
])
# create_element()
......@@ -570,6 +572,9 @@ class Project():
parent = self.junction._get_project()
self.artifact_cache_specs = parent.artifact_cache_specs + self.artifact_cache_specs
# Load source caches with pull/push config
self.source_cache_specs = SourceCache.specs_from_config_node(config, self.directory)
# Load remote-execution configuration for this project
project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
override_specs = SandboxRemote.specs_from_config_node(
......
......@@ -62,7 +62,7 @@ class FetchQueue(Queue):
# This will automatically skip elements which
# have no sources.
if element._get_consistency() == Consistency.CACHED:
if element._get_consistency() >= Consistency.CACHED:
return QueueStatus.SKIP
return QueueStatus.READY
......@@ -75,4 +75,4 @@ class FetchQueue(Queue):
element._update_state()
# Successful fetch, we must be CACHED now
assert element._get_consistency() == Consistency.CACHED
assert element._get_consistency() >= Consistency.CACHED
#
# Copyright (C) 2019 Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Raoul Hidalgo Charman <raoul.hidalgocharman@codethink.co.uk>
#
from ._cas import CASRemoteSpec
from ._basecache import BaseCache
from ._exceptions import SourceCacheError
class SourceCacheSpec(CASRemoteSpec):
"""Holds configuration for a remote used for the source cache.
Args:
url (str): Location of the remote source cache
push (bool): Whether we should attempt to push sources to this cache,
in addition to pulling from it.
instance-name (str): Name if any, of instance of server
"""
class SourceCache(BaseCache):
"""SourceCache()
Class that keeps config of remotes and deals with caching of sources.
"""
spec_class = SourceCacheSpec
spec_name = "source_cache_specs"
spec_error = SourceCacheError
config_node_name = "source-caches"
def get_source_fullname(self, source):
"""Get a string for the sources CAS ref"""
return "{}/{}/{}".format(
'source',
source.get_kind(),
source.get_ref())
def contains(self, source):
"""Checks the local CAS for source"""
ref = self.get_source_fullname(source)
return self.cas.contains(ref)
def commit(self, source, content):
"""Stages and captures a source into the local CAS"""
ref = self.get_source_fullname(source)
self.cas.commit([ref], content)
def export(self, source, vdir):
"""Exports a source in the CAS to a virtual directory"""
ref = self.get_source_fullname(source)
self.cas.extract(ref, vdir, hash_dir=False)
def fetch(self, source, previous_sources, *, progress=None):
"""Tries to fetch source from remote CAS's"""
def push(self, source):
"""Pushes a source to configured remote CAS's"""
......@@ -1461,9 +1461,16 @@ class Element(Plugin):
detail = "Element type either does not expect a buildtree or it was explictily cached without one."
self.warn("WARNING: {} Artifact contains an empty buildtree".format(self.name), detail=detail)
else:
# No workspace or cached buildtree, stage source directly
for source in self.sources():
source._stage(temp_staging_directory)
# stage last source if that's staged
last_source = list(self.sources())[-1]
if last_source._get_consistency() == Consistency.STAGED:
self.info("new fangled staging")
self._get_context().sourcecache.export(last_source, temp_staging_directory)
# No workspace, cached buildtree or staged sources, stage source directly
else:
self.info("old fangled staging")
for source in self.sources():
source._stage(temp_staging_directory)
vdirectory.import_files(import_dir)
......@@ -2077,7 +2084,7 @@ class Element(Plugin):
def _fetch(self):
previous_sources = []
for source in self.sources():
if source._get_consistency() < Consistency.CACHED:
if source._get_consistency() < Consistency.STAGED:
source._fetch(previous_sources)
previous_sources.append(source)
......
......@@ -170,6 +170,8 @@ from . import _yaml, utils
from ._exceptions import BstError, ImplError, ErrorDomain
from ._projectrefs import ProjectRefStorage
from ._message import Message, MessageType
class SourceError(BstError):
"""This exception should be raised by :class:`.Source` implementations
......@@ -288,6 +290,8 @@ class Source(Plugin):
super().__init__("{}-{}".format(meta.element_name, meta.element_index),
context, project, provenance, "source")
self.__source_cache = context.sourcecache
self.__element_name = meta.element_name # The name of the element owning this source
self.__element_index = meta.element_index # The index of the source in the owning element's source list
self.__element_kind = meta.element_kind # The kind of the element owning this source
......@@ -463,7 +467,7 @@ class Source(Plugin):
Implementors should raise :class:`.SourceError` when encountering
some system error.
"""
self.stage(directory)
self._stage(directory)
def get_source_fetchers(self):
"""Get the objects that are used for fetching
......@@ -669,7 +673,7 @@ class Source(Plugin):
#
def _update_state(self):
if self.__consistency < Consistency.CACHED:
if self.__consistency < Consistency.STAGED:
# Source consistency interrogations are silent.
context = self._get_context()
......@@ -681,6 +685,10 @@ class Source(Plugin):
if self.__consistency == Consistency.CACHED:
self.validate_cache()
if (self.__consistency <= Consistency.STAGED and
self.__source_cache.contains(self)):
self.__consistency = Consistency.STAGED
# Return cached consistency
#
def _get_consistency(self):
......@@ -690,8 +698,14 @@ class Source(Plugin):
#
# Args:
# previous_sources (list): List of Sources listed prior to this source
# fetch_original (bool): whether to fetch full source, or use local CAS
#
def _fetch(self, previous_sources):
def _fetch(self, previous_sources, fetch_original=False):
# return if we've got the source
if self.__source_cache.contains(self) and fetch_original is False:
self._get_context().message(Message(None, MessageType.INFO, "source cached"))
return
if self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH:
self.__ensure_previous_sources(previous_sources)
......@@ -709,7 +723,15 @@ class Source(Plugin):
def _stage(self, directory):
staging_directory = self.__ensure_directory(directory)
self.stage(staging_directory)
if self._get_consistency() == Consistency.STAGED:
self.__source_cache.export(self, staging_directory)
elif self._get_consistency() == Consistency.CACHED:
self.stage(staging_directory)
# Stage into local CAS, this will be dependent on the previous sources
# committed which is important for the patch plugin
self.__source_cache.commit(self, staging_directory)
# Wrapper for init_workspace()
def _init_workspace(self, directory):
......
......@@ -80,6 +80,13 @@ class Consistency():
source cache. Only cached sources can be staged.
"""
STAGED = 3
"""STAGED
Sources are staged in the local CAS, but are not present unstaged in the
source cache.
"""
class CoreWarnings():
"""CoreWarnings()
......
......@@ -3,7 +3,7 @@ import pytest
import itertools
import os
from buildstream._artifactcache import ArtifactCacheSpec, _configured_remote_artifact_cache_specs
from buildstream._artifactcache import ArtifactCacheSpec, ArtifactCache
from buildstream._context import Context
from buildstream._project import Project
from buildstream.utils import _deduplicate
......@@ -104,7 +104,7 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
project.ensure_fully_loaded()
# Use the helper from the artifactcache module to parse our configuration.
parsed_cache_specs = _configured_remote_artifact_cache_specs(context, project)
parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(context, project)
# Verify that it was correctly read.
expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches)))
......
#
# Copyright (C) 2019 Bloomberg Finance L.P.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Raoul Hidalgo Chraman <raoul.hidalgocharman@codethink.co.uk>
import os
import pytest
from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream.plugintestutils.runcli import cli
DATA_DIR = os.path.dirname(os.path.realpath(__file__))
# Assert that if either the client key or client cert is specified
# without specifying its counterpart, we get a comprehensive LoadError
# instead of an unhandled exception.
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize('config_key, config_value', [
('client-cert', 'client.crt'),
('client-key', 'client.key')
])
def test_missing_certs(cli, datafiles, config_key, config_value):
project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
project_conf = {
'name': 'test',
'source-caches': {
'url': 'https://cache.example.com:12345',
'push': 'true',
config_key: config_value
}
}
project_conf_file = os.path.join(project, 'project.conf')
_yaml.dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
# This does not happen for a simple `bst show`.
result = cli.run(project=project, args=['source', 'fetch', 'element.bst'])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
#
# Copyright (C) 2019 Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Raoul Hidalgo Charman <raoul.hidalgocharman@codethink.co.uk>
#
import filecmp
import os
import pytest
from buildstream._context import Context
from buildstream._project import Project
from buildstream.source import Source
from buildstream import _yaml
from buildstream.plugintestutils.runcli import cli
from tests.testutils.artifactshare import create_artifact_share
DATA_DIR = os.path.dirname(os.path.realpath(__file__))
def dummy_message_handler(message, context):
pass
@pytest.mark.datafiles(DATA_DIR)
def test_source_staged(tmpdir, cli, datafiles):
project_dir = os.path.join(datafiles.dirname, datafiles.basename, 'project')
casdir = os.path.join(datafiles.dirname, datafiles.basename, 'cache', 'cas')
cachedir = os.path.join(str(tmpdir), 'cache')
cli.configure({
'cachedir': cachedir
})
# set up minimal context
context = Context()
context.load()
# load project and sourcecache
project = Project(project_dir, context)
project.ensure_fully_loaded()
context.cachedir = cachedir
context.set_message_handler(dummy_message_handler)
sourcecache = context.sourcecache
cas = context.get_cascache()
cli.run(project=project_dir, args=["build", "import-bin.bst"])
# now check that the source is in the refs file, this is pretty messy but
# seems to be the only way to get the sources?
source = list(project.load_elements(["import-bin.bst"])[0].sources())[0]
assert sourcecache.contains(source)
# Extract the file and check it's the same as the one we imported
refname = sourcecache.get_source_fullname(source)
extractdir = os.path.join(str(tmpdir), "extract")
extract = cas.extract(refname, extractdir)
hellopath = os.path.join("usr", "bin", "hello")
file1 = os.path.join(extract, hellopath)
file2 = os.path.join(project_dir, "files", "bin-files", hellopath)
assert filecmp.cmp(file1, file2) is True
kind: autotools
kind: compose
depends:
- filename: import-bin.bst
type: build
- filename: import-dev.bst
type: build
config:
# Dont try running the sandbox, we dont have a
# runtime to run anything in this context.
integrate: False
kind: import
sources:
- kind: local
path: files/bin-files