Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (25)
Showing
with 575 additions and 108 deletions
......@@ -17,6 +17,13 @@ buildstream 1.3.1
to avoid having to specify the dependency type for every entry in
'depends'.
o Source plugins may now request access access to previous during track and
fetch by setting `BST_REQUIRES_PREVIOUS_SOURCES_TRACK` and/or
`BST_REQUIRES_PREVIOUS_SOURCES_FETCH` attributes.
o Add new `pip` source plugin for downloading python packages using pip,
based on requirements files from previous sources.
=================
buildstream 1.1.5
......
......@@ -30,6 +30,7 @@ if "_BST_COMPLETION" not in os.environ:
from .sandbox import Sandbox, SandboxFlags
from .plugin import Plugin
from .source import Source, SourceError, Consistency, SourceFetcher
from .element import Element, ElementError, Scope
from .element import Element, ElementError
from .element_enums import Scope
from .buildelement import BuildElement
from .scriptelement import ScriptElement
......@@ -21,7 +21,7 @@ import os
import string
from collections import Mapping, namedtuple
from ..element import _KeyStrength
from ..element_enums import _KeyStrength
from .._exceptions import ArtifactError, ImplError, LoadError, LoadErrorReason
from .._message import Message, MessageType
from .. import utils
......
......@@ -24,6 +24,7 @@ import os
import signal
import stat
import tempfile
import uuid
from urllib.parse import urlparse
import grpc
......@@ -309,8 +310,11 @@ class CASCache(ArtifactCache):
# Upload any blobs missing on the server
skipped_remote = False
for digest in missing_blobs.values():
uuid_ = uuid.uuid4()
resource_name = '/'.join(['uploads', str(uuid_), 'blobs',
digest.hash, str(digest.size_bytes)])
def request_stream():
resource_name = os.path.join(digest.hash, str(digest.size_bytes))
with open(self.objpath(digest), 'rb') as f:
assert os.fstat(f.fileno()).st_size == digest.size_bytes
offset = 0
......@@ -747,7 +751,7 @@ class CASCache(ArtifactCache):
yield from self._required_blobs(dirnode.digest)
def _fetch_blob(self, remote, digest, out):
resource_name = os.path.join(digest.hash, str(digest.size_bytes))
resource_name = '/'.join(['blobs', digest.hash, str(digest.size_bytes)])
request = bytestream_pb2.ReadRequest()
request.resource_name = resource_name
request.read_offset = 0
......
......@@ -23,6 +23,7 @@ import os
import signal
import sys
import tempfile
import uuid
import click
import grpc
......@@ -130,12 +131,21 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
def Read(self, request, context):
resource_name = request.resource_name
client_digest = _digest_from_resource_name(resource_name)
assert request.read_offset <= client_digest.size_bytes
client_digest = _digest_from_download_resource_name(resource_name)
if client_digest is None:
context.set_code(grpc.StatusCode.NOT_FOUND)
return
if request.read_offset > client_digest.size_bytes:
context.set_code(grpc.StatusCode.OUT_OF_RANGE)
return
try:
with open(self.cas.objpath(client_digest), 'rb') as f:
assert os.fstat(f.fileno()).st_size == client_digest.size_bytes
if os.fstat(f.fileno()).st_size != client_digest.size_bytes:
context.set_code(grpc.StatusCode.NOT_FOUND)
return
if request.read_offset > 0:
f.seek(request.read_offset)
......@@ -163,12 +173,18 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
resource_name = None
with tempfile.NamedTemporaryFile(dir=self.cas.tmpdir) as out:
for request in request_iterator:
assert not finished
assert request.write_offset == offset
if finished or request.write_offset != offset:
context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
return response
if resource_name is None:
# First request
resource_name = request.resource_name
client_digest = _digest_from_resource_name(resource_name)
client_digest = _digest_from_upload_resource_name(resource_name)
if client_digest is None:
context.set_code(grpc.StatusCode.NOT_FOUND)
return response
try:
_clean_up_cache(self.cas, client_digest.size_bytes)
except ArtifactTooLargeException as e:
......@@ -177,14 +193,20 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
return response
elif request.resource_name:
# If it is set on subsequent calls, it **must** match the value of the first request.
assert request.resource_name == resource_name
if request.resource_name != resource_name:
context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
return response
out.write(request.data)
offset += len(request.data)
if request.finish_write:
assert client_digest.size_bytes == offset
if client_digest.size_bytes != offset:
context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
return response
out.flush()
digest = self.cas.add_object(path=out.name)
assert digest.hash == client_digest.hash
if digest.hash != client_digest.hash:
context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
return response
finished = True
assert finished
......@@ -247,13 +269,48 @@ class _ReferenceStorageServicer(buildstream_pb2_grpc.ReferenceStorageServicer):
return response
def _digest_from_resource_name(resource_name):
def _digest_from_download_resource_name(resource_name):
parts = resource_name.split('/')
# Accept requests from non-conforming BuildStream 1.1.x clients
if len(parts) == 2:
parts.insert(0, 'blobs')
if len(parts) != 3 or parts[0] != 'blobs':
return None
try:
digest = remote_execution_pb2.Digest()
digest.hash = parts[1]
digest.size_bytes = int(parts[2])
return digest
except ValueError:
return None
def _digest_from_upload_resource_name(resource_name):
parts = resource_name.split('/')
assert len(parts) == 2
digest = remote_execution_pb2.Digest()
digest.hash = parts[0]
digest.size_bytes = int(parts[1])
return digest
# Accept requests from non-conforming BuildStream 1.1.x clients
if len(parts) == 2:
parts.insert(0, 'uploads')
parts.insert(1, str(uuid.uuid4()))
parts.insert(2, 'blobs')
if len(parts) < 5 or parts[0] != 'uploads' or parts[2] != 'blobs':
return None
try:
uuid_ = uuid.UUID(hex=parts[1])
if uuid_.version != 4:
return None
digest = remote_execution_pb2.Digest()
digest.hash = parts[3]
digest.size_bytes = int(parts[4])
return digest
except ValueError:
return None
def _has_object(cas, digest):
......
......@@ -522,14 +522,15 @@ class Loader():
element = Element._new_from_meta(meta_element, platform.artifactcache)
element._preflight()
for source in element.sources():
sources = list(element.sources())
for idx, source in enumerate(sources):
# Handle the case where a subproject needs to be fetched
#
if source.get_consistency() == Consistency.RESOLVED:
if fetch_subprojects:
if ticker:
ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
source._fetch()
source._fetch(sources[0:idx])
else:
detail = "Try fetching the project with `bst fetch {}`".format(filename)
raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,
......
......@@ -31,6 +31,7 @@ from ._options import OptionPool
from ._artifactcache import ArtifactCache
from ._elementfactory import ElementFactory
from ._sourcefactory import SourceFactory
from .plugin import CoreWarnings
from ._projectrefs import ProjectRefs, ProjectRefStorage
from ._versions import BST_FORMAT_VERSION
from ._loader import Loader
......@@ -105,7 +106,7 @@ class Project():
self.first_pass_config = ProjectConfig()
self.junction = junction # The junction Element object, if this is a subproject
self.fail_on_overlap = False # Whether overlaps are treated as errors
self.ref_storage = None # ProjectRefStorage setting
self.base_environment = {} # The base set of environment variables
self.base_env_nocache = None # The base nocache mask (list) for the environment
......@@ -120,6 +121,8 @@ class Project():
self._cli_options = cli_options
self._cache_key = None
self._fatal_warnings = [] # A list of warnings which should trigger an error
self._shell_command = [] # The default interactive shell command
self._shell_environment = {} # Statically set environment vars
self._shell_host_files = [] # A list of HostMount objects
......@@ -456,7 +459,7 @@ class Project():
'split-rules', 'elements', 'plugins',
'aliases', 'name',
'artifacts', 'options',
'fail-on-overlap', 'shell',
'fail-on-overlap', 'shell', 'fatal-warnings',
'ref-storage', 'sandbox', 'mirrors'
])
......@@ -478,8 +481,25 @@ class Project():
# Load project split rules
self._splits = _yaml.node_get(config, Mapping, 'split-rules')
# Fail on overlap
self.fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap')
# Fatal warnings
self._fatal_warnings = _yaml.node_get(config, list, 'fatal-warnings', default_value=[])
# Support backwards compatibility for fail-on-overlap
fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap', default_value=None)
if (CoreWarnings.OVERLAPS not in self._fatal_warnings) and fail_on_overlap:
self._fatal_warnings.append(CoreWarnings.OVERLAPS)
# Deprecation check
if fail_on_overlap is not None:
self._context.message(
Message(
None,
MessageType.WARN,
"Use of fail-on-overlap within project.conf " +
"is deprecated. Consider using fatal-warnings instead."
)
)
# Load project.refs if it exists, this may be ignored.
if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
......@@ -712,3 +732,17 @@ class Project():
# paths are passed in relative to the project, but must be absolute
origin_dict['path'] = os.path.join(self.directory, path)
destination.append(origin_dict)
# _warning_is_fatal():
#
# Returns true if the warning in question should be considered fatal based on
# the project configuration.
#
# Args:
# warning_str (str): The warning configuration string to check against
#
# Returns:
# (bool): True if the warning should be considered fatal and cause an error.
#
def _warning_is_fatal(self, warning_str):
return warning_str in self._fatal_warnings
......@@ -40,8 +40,10 @@ class FetchQueue(Queue):
self._skip_cached = skip_cached
def process(self, element):
previous_sources = []
for source in element.sources():
source._fetch()
source._fetch(previous_sources)
previous_sources.append(source)
def status(self, element):
# state of dependencies may have changed, recalculate element state
......
......@@ -23,7 +23,7 @@
# This version is bumped whenever enhancements are made
# to the `project.conf` format or the core element format.
#
BST_FORMAT_VERSION = 14
BST_FORMAT_VERSION = 16
# The base BuildStream artifact version
......@@ -33,4 +33,4 @@ BST_FORMAT_VERSION = 14
# or if buildstream was changed in a way which can cause
# the same cache key to produce something that is no longer
# the same.
BST_CORE_ARTIFACT_VERSION = 4
BST_CORE_ARTIFACT_VERSION = 5
......@@ -13,10 +13,6 @@ element-path: .
# Store source references in element files
ref-storage: inline
# Overlaps are just warnings
fail-on-overlap: False
# Variable Configuration
#
variables:
......
......@@ -78,7 +78,6 @@ import stat
import copy
from collections import Mapping, OrderedDict
from contextlib import contextmanager
from enum import Enum
import tempfile
import shutil
......@@ -94,44 +93,13 @@ from . import _cachekey
from . import _signals
from . import _site
from ._platform import Platform
from .plugin import CoreWarnings
from .sandbox._config import SandboxConfig
from .storage.directory import Directory
from .storage._filebaseddirectory import FileBasedDirectory, VirtualDirectoryError
# _KeyStrength():
#
# Strength of cache key
#
class _KeyStrength(Enum):
# Includes strong cache keys of all build dependencies and their
# runtime dependencies.
STRONG = 1
# Includes names of direct build dependencies but does not include
# cache keys of dependencies.
WEAK = 2
class Scope(Enum):
"""Types of scope for a given element"""
ALL = 1
"""All elements which the given element depends on, following
all elements required for building. Including the element itself.
"""
BUILD = 2
"""All elements required for building the element, including their
respective run dependencies. Not including the given element itself.
"""
RUN = 3
"""All elements required for running the element. Including the element
itself.
"""
from .storage._filebaseddirectory import FileBasedDirectory
from .storage.directory import VirtualDirectoryError
from .element_enums import _KeyStrength, Scope
class ElementError(BstError):
......@@ -746,32 +714,23 @@ class Element(Plugin):
ignored[dep.name] = result.ignored
if overlaps:
overlap_error = overlap_warning = False
error_detail = warning_detail = "Staged files overwrite existing files in staging area:\n"
overlap_warning = False
warning_detail = "Staged files overwrite existing files in staging area:\n"
for f, elements in overlaps.items():
overlap_error_elements = []
overlap_warning_elements = []
# The bottom item overlaps nothing
overlapping_elements = elements[1:]
for elm in overlapping_elements:
element = self.search(scope, elm)
element_project = element._get_project()
if not element.__file_is_whitelisted(f):
if element_project.fail_on_overlap:
overlap_error_elements.append(elm)
overlap_error = True
else:
overlap_warning_elements.append(elm)
overlap_warning = True
overlap_warning_elements.append(elm)
overlap_warning = True
warning_detail += _overlap_error_detail(f, overlap_warning_elements, elements)
error_detail += _overlap_error_detail(f, overlap_error_elements, elements)
if overlap_warning:
self.warn("Non-whitelisted overlaps detected", detail=warning_detail)
if overlap_error:
raise ElementError("Non-whitelisted overlaps detected and fail-on-overlaps is set",
detail=error_detail, reason="overlap-error")
self.warn("Non-whitelisted overlaps detected", detail=warning_detail,
warning_token=CoreWarnings.OVERLAPS)
if ignored:
detail = "Not staging files which would replace non-empty directories:\n"
......@@ -1270,6 +1229,12 @@ class Element(Plugin):
# Prepend provenance to the error
raise ElementError("{}: {}".format(self, e), reason=e.reason) from e
# Ensure that the first source does not need access to previous soruces
if self.__sources and self.__sources[0]._requires_previous_sources():
raise ElementError("{}: {} cannot be the first source of an element "
"as it requires access to previous sources"
.format(self, self.__sources[0]))
# Preflight the sources
for source in self.sources():
source._preflight()
......@@ -1313,9 +1278,9 @@ class Element(Plugin):
#
def _track(self):
refs = []
for source in self.__sources:
for index, source in enumerate(self.__sources):
old_ref = source.get_ref()
new_ref = source._track()
new_ref = source._track(self.__sources[0:index])
refs.append((source._get_unique_id(), new_ref))
# Complimentary warning that the new ref will be unused.
......@@ -2054,9 +2019,7 @@ class Element(Plugin):
'cache': type(self.__artifacts).__name__
}
# fail-on-overlap setting cannot affect elements without dependencies
if project.fail_on_overlap and dependencies:
self.__cache_key_dict['fail-on-overlap'] = True
self.__cache_key_dict['fatal-warnings'] = sorted(project._fatal_warnings)
cache_key_dict = self.__cache_key_dict.copy()
cache_key_dict['dependencies'] = dependencies
......
#
# Copyright (C) 2018 Bloomberg LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
# Jim MacArthur <jim.macarthur@codethink.co.uk>
"""
Element - Globally visible enumerations
=======================================
"""
from enum import Enum
# _KeyStrength():
#
# Strength of cache key
#
class _KeyStrength(Enum):
# Includes strong cache keys of all build dependencies and their
# runtime dependencies.
STRONG = 1
# Includes names of direct build dependencies but does not include
# cache keys of dependencies.
WEAK = 2
class Scope(Enum):
"""Types of scope for a given element"""
ALL = 1
"""All elements which the given element depends on, following
all elements required for building. Including the element itself.
"""
BUILD = 2
"""All elements required for building the element, including their
respective run dependencies. Not including the given element itself.
"""
RUN = 3
"""All elements required for running the element. Including the element
itself.
"""
......@@ -47,6 +47,23 @@ it is mandatory to implement the following abstract methods:
Once all configuration has been loaded and preflight checks have passed,
this method is used to inform the core of a plugin's unique configuration.
Configurable Warnings
---------------------
Warnings raised through calling :func:`Plugin.warn() <buildstream.plugin.Plugin.warn>` can provide an optional
parameter ``warning_token``, this will raise a :class:`PluginError` if the warning is configured as fatal within
the project configuration.
Configurable warnings will be prefixed with :func:`Plugin.get_kind() <buildstream.plugin.Plugin.get_kind>`
within buildstream and must be prefixed as such in project configurations. For more detail on project configuration
see :ref:`Configurable Warnings <configurable_warnings>`.
It is important to document these warnings in your plugin documentation to allow users to make full use of them
while configuring their projects.
Example
~~~~~~~
If the :class:`git <buildstream.plugins.sources.git.GitSource>` plugin uses the warning ``"inconsistent-submodule"``
then it could be referenced in project configuration as ``"git:inconsistent-submodule"``.
Plugin Structure
----------------
......@@ -166,7 +183,6 @@ class Plugin():
# Infer the kind identifier
modulename = type(self).__module__
self.__kind = modulename.split('.')[-1]
self.debug("Created: {}".format(self))
def __del__(self):
......@@ -473,14 +489,28 @@ class Plugin():
"""
self.__message(MessageType.INFO, brief, detail=detail)
def warn(self, brief, *, detail=None):
"""Print a warning message
def warn(self, brief, *, detail=None, warning_token=None):
"""Print a warning message, checks warning_token against project configuration
Args:
brief (str): The brief message
detail (str): An optional detailed message, can be multiline output
warning_token (str): An optional configurable warning assosciated with this warning,
this will cause PluginError to be raised if this warning is configured as fatal.
(*Since 1.4*)
Raises:
(:class:`.PluginError`): When warning_token is considered fatal by the project configuration
"""
self.__message(MessageType.WARN, brief, detail=detail)
if warning_token:
warning_token = _prefix_warning(self, warning_token)
brief = "[{}]: {}".format(warning_token, brief)
project = self._get_project()
if project._warning_is_fatal(warning_token):
raise PluginError(message="{}\n{}".format(brief, detail), reason=warning_token)
self.__message(MessageType.WARN, brief=brief, detail=detail)
def log(self, brief, *, detail=None):
"""Log a message into the plugin's log file
......@@ -709,6 +739,32 @@ class Plugin():
return self.name
class CoreWarnings():
"""CoreWarnings()
Some common warnings which are raised by core functionalities within BuildStream are found in this class.
"""
OVERLAPS = "overlaps"
"""
This warning will be produced when buildstream detects an overlap on an element
which is not whitelisted. See :ref:`Overlap Whitelist <public_overlap_whitelist>`
"""
REF_NOT_IN_TRACK = "ref-not-in-track"
"""
This warning will be produced when a source is configured with a reference
which is found to be invalid based on the configured track
"""
__CORE_WARNINGS = [
value
for name, value in CoreWarnings.__dict__.items()
if not name.startswith("__")
]
# Hold on to a lookup table by counter of all instantiated plugins.
# We use this to send the id back from child processes so we can lookup
# corresponding element/source in the master process.
......@@ -739,6 +795,23 @@ def _plugin_lookup(unique_id):
return __PLUGINS_TABLE[unique_id]
# _prefix_warning():
#
# Prefix a warning with the plugin kind. CoreWarnings are not prefixed.
#
# Args:
# plugin (Plugin): The plugin which raised the warning
# warning (str): The warning to prefix
#
# Returns:
# (str): A prefixed warning
#
def _prefix_warning(plugin, warning):
if any((warning is core_warning for core_warning in __CORE_WARNINGS)):
return warning
return "{}:{}".format(plugin.get_kind(), warning)
# No need for unregister, WeakValueDictionary() will remove entries
# in itself when the referenced plugins are garbage collected.
def _plugin_register(plugin):
......
......@@ -68,6 +68,12 @@ git - stage files from a git repository
url: upstream:baz.git
checkout: False
**Configurable Warnings:**
This plugin provides the following configurable warnings:
- 'git:inconsistent-submodule' - A submodule was found to be missing from the underlying git repository.
"""
import os
......@@ -84,6 +90,9 @@ from buildstream import utils
GIT_MODULES = '.gitmodules'
# Warnings
INCONSISTENT_SUBMODULE = "inconsistent-submodules"
# Because of handling of submodules, we maintain a GitMirror
# for the primary git source and also for each submodule it
......@@ -283,7 +292,7 @@ class GitMirror(SourceFetcher):
"underlying git repository with `git submodule add`."
self.source.warn("{}: Ignoring inconsistent submodule '{}'"
.format(self.source, submodule), detail=detail)
.format(self.source, submodule), detail=detail, warning_token=INCONSISTENT_SUBMODULE)
return None
......
......@@ -71,7 +71,7 @@ class OSTreeSource(Source):
self.ref = self.node_get_member(node, str, 'ref', None)
self.tracking = self.node_get_member(node, str, 'track', None)
self.mirror = os.path.join(self.get_mirror_directory(),
utils.url_directory_name(self.url))
utils.url_directory_name(self.original_url))
# (optional) Not all repos are signed. But if they are, get the gpg key
self.gpg_key_path = None
......@@ -104,10 +104,11 @@ class OSTreeSource(Source):
return None
self.ensure()
remote_name = self.ensure_remote(self.url)
with self.timed_activity("Fetching tracking ref '{}' from origin: {}"
.format(self.tracking, self.url)):
try:
_ostree.fetch(self.repo, ref=self.tracking, progress=self.progress)
_ostree.fetch(self.repo, remote=remote_name, ref=self.tracking, progress=self.progress)
except OSTreeError as e:
raise SourceError("{}: Failed to fetch tracking ref '{}' from origin {}\n\n{}"
.format(self, self.tracking, self.url, e)) from e
......@@ -116,11 +117,12 @@ class OSTreeSource(Source):
def fetch(self):
self.ensure()
remote_name = self.ensure_remote(self.url)
if not _ostree.exists(self.repo, self.ref):
with self.timed_activity("Fetching remote ref: {} from origin: {}"
.format(self.ref, self.url)):
try:
_ostree.fetch(self.repo, ref=self.ref, progress=self.progress)
_ostree.fetch(self.repo, remote=remote_name, ref=self.ref, progress=self.progress)
except OSTreeError as e:
raise SourceError("{}: Failed to fetch ref '{}' from origin: {}\n\n{}"
.format(self, self.ref, self.url, e)) from e
......@@ -171,14 +173,22 @@ class OSTreeSource(Source):
self.status("Creating local mirror for {}".format(self.url))
self.repo = _ostree.ensure(self.mirror, True)
gpg_key = None
if self.gpg_key_path:
gpg_key = 'file://' + self.gpg_key_path
try:
_ostree.configure_remote(self.repo, "origin", self.url, key_url=gpg_key)
except OSTreeError as e:
raise SourceError("{}: Failed to configure origin {}\n\n{}".format(self, self.url, e)) from e
def ensure_remote(self, url):
if self.original_url == self.url:
remote_name = 'origin'
else:
remote_name = utils.url_directory_name(url)
gpg_key = None
if self.gpg_key_path:
gpg_key = 'file://' + self.gpg_key_path
try:
_ostree.configure_remote(self.repo, remote_name, url, key_url=gpg_key)
except OSTreeError as e:
raise SourceError("{}: Failed to configure origin {}\n\n{}".format(self, self.url, e)) from e
return remote_name
def progress(self, percent, message):
self.status(message)
......
#
# Copyright 2018 Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Chandan Singh <csingh43@bloomberg.net>
"""
pip - stage python packages using pip
=====================================
**Host depndencies:**
* ``pip`` python module
This plugin will download source distributions for specified packages using
``pip`` but will not install them. It is expected that the elements using this
source will install the downloaded packages.
Downloaded tarballs will be stored in a directory called ".bst_pip_downloads".
**Usage:**
.. code:: yaml
# Specify the pip source kind
kind: pip
# Optionally specify index url, defaults to PyPi
# This url is used to discover new versions of packages and download them
# Projects intending to mirror their sources to a permanent location should
# use an aliased url, and declare the alias in the project configuration
url: https://mypypi.example.com/simple
# Optionally specify the path to requirements files
# Note that either 'requirements-files' or 'packages' must be defined
requirements-files:
- requirements.txt
# Optionally specify a list of additional packages
# Note that either 'requirements-files' or 'packages' must be defined
packages:
- flake8
# Optionally specify a relative staging directory
directory: path/to/stage
# Specify the ref. It is a list of strings of format
# "<package-name>==<version>", separated by "\\n".
# Usually this will be contents of a requirements.txt file where all
# package versions have been frozen.
ref: "flake8==3.5.0\\nmccabe==0.6.1\\npkg-resources==0.0.0\\npycodestyle==2.3.1\\npyflakes==1.6.0"
.. note::
The ``pip`` plugin is available since :ref:`format version 16 <project_format_version>`
"""
import errno
import hashlib
import os
import re
from buildstream import Consistency, Source, SourceError, utils
_OUTPUT_DIRNAME = '.bst_pip_downloads'
_PYPI_INDEX_URL = 'https://pypi.org/simple/'
# Used only for finding pip command
_PYTHON_VERSIONS = [
'python2.7',
'python3.0',
'python3.1',
'python3.2',
'python3.3',
'python3.4',
'python3.5',
'python3.6',
'python3.7',
]
# List of allowed extensions taken from
# https://docs.python.org/3/distutils/sourcedist.html.
# Names of source distribution archives must be of the form
# '%{package-name}-%{version}.%{extension}'.
_SDIST_RE = re.compile(
r'^([a-zA-Z0-9]+?)-(.+).(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$',
re.IGNORECASE)
class PipSource(Source):
# pylint: disable=attribute-defined-outside-init
# We need access to previous sources at track time to use requirements.txt
# but not at fetch time as self.ref should contain sufficient information
# for this plugin
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = True
def configure(self, node):
self.node_validate(node, ['url', 'packages', 'ref', 'requirements-files'] +
Source.COMMON_CONFIG_KEYS)
self.ref = self.node_get_member(node, str, 'ref', None)
self.original_url = self.node_get_member(node, str, 'url', _PYPI_INDEX_URL)
self.index_url = self.translate_url(self.original_url)
self.packages = self.node_get_member(node, list, 'packages', [])
self.requirements_files = self.node_get_member(node, list, 'requirements-files', [])
if not (self.packages or self.requirements_files):
raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified". format(self))
def preflight(self):
# Try to find a pip version that supports download command
self.host_pip = None
for python in reversed(_PYTHON_VERSIONS):
try:
host_python = utils.get_host_tool(python)
rc = self.call([host_python, '-m', 'pip', 'download', '--help'])
if rc == 0:
self.host_pip = [host_python, '-m', 'pip']
break
except utils.ProgramNotFoundError:
pass
if self.host_pip is None:
raise SourceError("{}: Unable to find a suitable pip command".format(self))
def get_unique_key(self):
return [self.original_url, self.ref]
def get_consistency(self):
if not self.ref:
return Consistency.INCONSISTENT
if os.path.exists(self._mirror) and os.listdir(self._mirror):
return Consistency.CACHED
return Consistency.RESOLVED
def get_ref(self):
return self.ref
def load_ref(self, node):
self.ref = self.node_get_member(node, str, 'ref', None)
def set_ref(self, ref, node):
node['ref'] = self.ref = ref
def track(self, previous_sources_dir):
# XXX pip does not offer any public API other than the CLI tool so it
# is not feasible to correctly parse the requirements file or to check
# which package versions pip is going to install.
# See https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program
# for details.
# As a result, we have to wastefully install the packages during track.
with self.tempdir() as tmpdir:
install_args = self.host_pip + ['download',
'--no-binary', ':all:',
'--index-url', self.index_url,
'--dest', tmpdir]
for requirement_file in self.requirements_files:
fpath = os.path.join(previous_sources_dir, requirement_file)
install_args += ['-r', fpath]
install_args += self.packages
self.call(install_args, fail="Failed to install python packages")
reqs = self._parse_sdist_names(tmpdir)
return '\n'.join(["{}=={}".format(pkg, ver) for pkg, ver in reqs])
def fetch(self):
with self.tempdir() as tmpdir:
packages = self.ref.strip().split('\n')
package_dir = os.path.join(tmpdir, 'packages')
os.makedirs(package_dir)
self.call(self.host_pip + ['download',
'--no-binary', ':all:',
'--index-url', self.index_url,
'--dest', package_dir] + packages,
fail="Failed to install python packages: {}".format(packages))
# If the mirror directory already exists, assume that some other
# process has fetched the sources before us and ensure that we do
# not raise an error in that case.
try:
os.makedirs(self._mirror)
os.rename(package_dir, self._mirror)
except FileExistsError:
return
except OSError as e:
if e.errno != errno.ENOTEMPTY:
raise
def stage(self, directory):
with self.timed_activity("Staging Python packages", silent_nested=True):
utils.copy_files(self._mirror, os.path.join(directory, _OUTPUT_DIRNAME))
# Directory where this source should stage its files
#
@property
def _mirror(self):
if not self.ref:
return None
return os.path.join(self.get_mirror_directory(),
utils.url_directory_name(self.original_url),
hashlib.sha256(self.ref.encode()).hexdigest())
# Parse names of downloaded source distributions
#
# Args:
# basedir (str): Directory containing source distribution archives
#
# Returns:
# (list): List of (package_name, version) tuples in sorted order
#
def _parse_sdist_names(self, basedir):
reqs = []
for f in os.listdir(basedir):
pkg_match = _SDIST_RE.match(f)
if pkg_match:
reqs.append(pkg_match.groups())
return sorted(reqs)
def setup():
return PipSource
......@@ -32,8 +32,10 @@ from .._fuse import SafeHardlinks
class Mount():
def __init__(self, sandbox, mount_point, safe_hardlinks):
scratch_directory = sandbox._get_scratch_directory()
# Getting external_directory here is acceptable as we're part of the sandbox code.
root_directory = sandbox.get_virtual_directory().external_directory
# Getting _get_underlying_directory() here is acceptable as
# we're part of the sandbox code. This will fail if our
# directory is CAS-based.
root_directory = sandbox.get_virtual_directory()._get_underlying_directory()
self.mount_point = mount_point
self.safe_hardlinks = safe_hardlinks
......
......@@ -58,7 +58,7 @@ class SandboxBwrap(Sandbox):
stdout, stderr = self._get_output()
# Allowable access to underlying storage as we're part of the sandbox
root_directory = self.get_virtual_directory().external_directory
root_directory = self.get_virtual_directory()._get_underlying_directory()
# Fallback to the sandbox default settings for
# the cwd and env.
......@@ -248,6 +248,7 @@ class SandboxBwrap(Sandbox):
# a bug, bwrap mounted a tempfs here and when it exits, that better be empty.
pass
self._vdir._mark_changed()
return exit_code
def run_bwrap(self, argv, stdin, stdout, stderr, interactive):
......
......@@ -106,6 +106,7 @@ class SandboxChroot(Sandbox):
status = self.chroot(rootfs, command, stdin, stdout,
stderr, cwd, env, flags)
self._vdir._mark_changed()
return status
# chroot()
......
......@@ -31,6 +31,7 @@ See also: :ref:`sandboxing`.
import os
from .._exceptions import ImplError, BstError
from ..storage._filebaseddirectory import FileBasedDirectory
from ..storage._casbaseddirectory import CasBasedDirectory
class SandboxFlags():
......@@ -105,6 +106,7 @@ class Sandbox():
self.__scratch = os.path.join(self.__directory, 'scratch')
for directory_ in [self._root, self.__scratch]:
os.makedirs(directory_, exist_ok=True)
self._vdir = None
def get_directory(self):
"""Fetches the sandbox root directory
......@@ -133,8 +135,14 @@ class Sandbox():
(str): The sandbox root directory
"""
# For now, just create a new Directory every time we're asked
return FileBasedDirectory(self._root)
if not self._vdir:
# BST_CAS_DIRECTORIES is a deliberately hidden environment variable which
# can be used to switch on CAS-based directories for testing.
if 'BST_CAS_DIRECTORIES' in os.environ:
self._vdir = CasBasedDirectory(self.__context, ref=None)
else:
self._vdir = FileBasedDirectory(self._root)
return self._vdir
def set_environment(self, environment):
"""Sets the environment variables for the sandbox
......