Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (16)
Showing
with 481 additions and 85 deletions
......@@ -26,15 +26,6 @@ source_dist:
- tar -ztf dist/*
- tarball=$(cd dist && echo $(ls *))
# Create an installer script
- |
cat > dist/install.sh << EOF
#!/bin/sh
tar -zxf ${tarball}
cd ${tarball%.tar.gz}
pip3 install --no-index .
EOF
# unpack tarball as `dist/buildstream` directory
- |
cat > dist/unpack.sh << EOF
......@@ -44,7 +35,6 @@ source_dist:
EOF
# Make our helpers executable
- chmod +x dist/install.sh
- chmod +x dist/unpack.sh
artifacts:
paths:
......
......@@ -80,6 +80,8 @@ class ArtifactCache():
self.context = context
self.required_artifacts = set()
self.extractdir = os.path.join(context.artifactdir, 'extract')
self.tmpdir = os.path.join(context.artifactdir, 'tmp')
self.max_size = context.cache_quota
self.estimated_size = None
......@@ -89,7 +91,8 @@ class ArtifactCache():
self._local = False
self.cache_size = None
os.makedirs(context.artifactdir, exist_ok=True)
os.makedirs(self.extractdir, exist_ok=True)
os.makedirs(self.tmpdir, exist_ok=True)
################################################
# Methods implemented on the abstract class #
......
......@@ -56,7 +56,8 @@ class CASCache(ArtifactCache):
super().__init__(context)
self.casdir = os.path.join(context.artifactdir, 'cas')
os.makedirs(os.path.join(self.casdir, 'tmp'), exist_ok=True)
os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
self._enable_push = enable_push
......@@ -85,8 +86,6 @@ class CASCache(ArtifactCache):
# artifact has already been extracted
return dest
os.makedirs(self.extractdir, exist_ok=True)
with tempfile.TemporaryDirectory(prefix='tmp', dir=self.extractdir) as tmpdir:
checkoutdir = os.path.join(tmpdir, ref)
self._checkout(checkoutdir, tree)
......@@ -394,7 +393,7 @@ class CASCache(ArtifactCache):
try:
h = hashlib.sha256()
# Always write out new file to avoid corruption if input file is modified
with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as out:
with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
# Set mode bits to 0644
os.chmod(out.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
......@@ -764,7 +763,7 @@ class CASCache(ArtifactCache):
# already in local cache
return
with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as out:
with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
self._fetch_blob(remote, tree, out)
directory = remote_execution_pb2.Directory()
......@@ -778,7 +777,7 @@ class CASCache(ArtifactCache):
# already in local cache
continue
with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as f:
with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
self._fetch_blob(remote, filenode.digest, f)
digest = self.add_object(path=f.name)
......@@ -846,6 +845,9 @@ class _CASRemote():
def _grouper(iterable, n):
# pylint: disable=stop-iteration-return
while True:
yield itertools.chain([next(iterable)], itertools.islice(iterable, n - 1))
try:
current = next(iterable)
except StopIteration:
return
yield itertools.chain([current], itertools.islice(iterable, n - 1))
......@@ -161,7 +161,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
offset = 0
finished = False
resource_name = None
with tempfile.NamedTemporaryFile(dir=os.path.join(self.cas.casdir, 'tmp')) as out:
with tempfile.NamedTemporaryFile(dir=self.cas.tmpdir) as out:
for request in request_iterator:
assert not finished
assert request.write_offset == offset
......
......@@ -757,7 +757,11 @@ class FUSE(object):
if self.raw_fi:
return self.operations('create', path, mode, fi)
else:
fi.fh = self.operations('create', path, mode)
# This line is different from upstream to fix issues
# reading file opened with O_CREAT|O_RDWR.
# See issue #143.
fi.fh = self.operations('create', path, mode, fi.flags)
# END OF MODIFICATION
return 0
def ftruncate(self, path, length, fip):
......
......@@ -185,12 +185,12 @@ class SafeHardlinkOps(Operations):
return os.open(full_path, flags)
def create(self, path, mode, fi=None):
def create(self, path, mode, flags):
full_path = self._full_path(path)
# If it already exists, ensure it's a copy first
self._ensure_copy(full_path)
return os.open(full_path, os.O_WRONLY | os.O_CREAT, mode)
return os.open(full_path, flags, mode)
def read(self, path, length, offset, fh):
os.lseek(fh, offset, os.SEEK_SET)
......
......@@ -522,14 +522,15 @@ class Loader():
element = Element._new_from_meta(meta_element, platform.artifactcache)
element._preflight()
for source in element.sources():
sources = list(element.sources())
for idx, source in enumerate(sources):
# Handle the case where a subproject needs to be fetched
#
if source.get_consistency() == Consistency.RESOLVED:
if fetch_subprojects:
if ticker:
ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
source._fetch()
source._fetch(sources[0:idx])
else:
detail = "Try fetching the project with `bst fetch {}`".format(filename)
raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,
......
......@@ -19,7 +19,6 @@
# Tiago Gomes <tiago.gomes@codethink.co.uk>
import os
import multiprocessing # for cpu_count()
from collections import Mapping, OrderedDict
from pluginbase import PluginBase
from . import utils
......@@ -572,7 +571,10 @@ class Project():
# Extend variables with automatic variables and option exports
# Initialize it as a string as all variables are processed as strings.
output.base_variables['max-jobs'] = str(multiprocessing.cpu_count())
# Based on some testing (mainly on AWS), maximum effective
# max-jobs value seems to be around 8-10 if we have enough cores
# users should set values based on workload and build infrastructure
output.base_variables['max-jobs'] = str(min(len(os.sched_getaffinity(0)), 8))
# Export options into variables, if that was requested
output.options.export_variables(output.base_variables)
......
......@@ -40,8 +40,10 @@ class FetchQueue(Queue):
self._skip_cached = skip_cached
def process(self, element):
previous_sources = []
for source in element.sources():
source._fetch()
source._fetch(previous_sources)
previous_sources.append(source)
def status(self, element):
# state of dependencies may have changed, recalculate element state
......
......@@ -23,7 +23,7 @@
# This version is bumped whenever enhancements are made
# to the `project.conf` format or the core element format.
#
BST_FORMAT_VERSION = 13
BST_FORMAT_VERSION = 14
# The base BuildStream artifact version
......
......@@ -1270,6 +1270,12 @@ class Element(Plugin):
# Prepend provenance to the error
raise ElementError("{}: {}".format(self, e), reason=e.reason) from e
# Ensure that the first source does not need access to previous soruces
if self.__sources and self.__sources[0]._requires_previous_sources():
raise ElementError("{}: {} cannot be the first source of an element "
"as it requires access to previous sources"
.format(self, self.__sources[0]))
# Preflight the sources
for source in self.sources():
source._preflight()
......@@ -1313,9 +1319,9 @@ class Element(Plugin):
#
def _track(self):
refs = []
for source in self.__sources:
for index, source in enumerate(self.__sources):
old_ref = source.get_ref()
new_ref = source._track()
new_ref = source._track(self.__sources[0:index])
refs.append((source._get_unique_id(), new_ref))
# Complimentary warning that the new ref will be unused.
......
#
# Copyright 2018 Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Chandan Singh <csingh43@bloomberg.net>
"""
pip - stage python packages using pip
=====================================
**Host depndencies:**
* ``virtualenv`` CLI tool or ``venv`` python module
**Usage:**
.. code:: yaml
# Specify the pip source kind
kind: pip
# Optionally specify the python executable, defaults to system "python"
# Note that either the venv module or the virtualenv CLI tool must be
# available
python-exe: python3.6
# Optionally specify index url, defaults to PyPi
# This url is used to discover new versions of packages and download them
# Projects intending to mirror their sources to a permanent location should
# use an aliased url, and declare the alias in the project configuration
url: https://mypypi.example.com/simple
# Optionally specify the path to requirements files
# Note that either 'requirements-files' or 'packages' must be defined
requirements-files:
- requirements.txt
# Optionally specify a list of additional packages
# Note that either 'requirements-files' or 'packages' must be defined
packages:
- flake8
# Optionally specify a relative staging directory
directory: path/to/stage
# Specify the ref. It is a list of strings of format
# "<package-name>==<version>", separated by "\\n".
# Usually this will be contents of a requirements.txt file where all
# package versions have been frozen.
ref: "flake8==3.5.0\\nmccabe==0.6.1\\npkg-resources==0.0.0\\npycodestyle==2.3.1\\npyflakes==1.6.0"
.. note::
The ``pip`` plugin is available since :ref:`format version 14 <project_format_version>`
"""
import errno
import hashlib
import os
from buildstream import Consistency, Source, SourceError, utils
_PYPI_INDEX_URL = 'https://pypi.org/simple/'
class PipSource(Source):
# pylint: disable=attribute-defined-outside-init
# We need access to previous sources at track time to use requirements.txt
# but not at fetch time as self.ref should contain sufficient information
# for this plugin
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = True
def configure(self, node):
self.node_validate(node, ['url', 'packages', 'python-exe', 'ref', 'requirements-files'] +
Source.COMMON_CONFIG_KEYS)
self.ref = self.node_get_member(node, str, 'ref', None)
self.python_exe = self.node_get_member(node, str, 'python-exe', 'python')
self.original_url = self.node_get_member(node, str, 'url', _PYPI_INDEX_URL)
self.index_url = self.translate_url(self.original_url)
self.packages = self.node_get_member(node, list, 'packages', [])
self.requirements_files = self.node_get_member(node, list, 'requirements-files', [])
if not (self.packages or self.requirements_files):
raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified". format(self))
def preflight(self):
# Try to find a way to open virtual environments on the host
try:
# Look for the virtualenv CLI first
venv = utils.get_host_tool('virtualenv')
self.venv_cmd = [venv, '--python', self.python_exe]
except utils.ProgramNotFoundError:
# Fall back to venv module if it is installed
python_exe = utils.get_host_tool(self.python_exe)
rc = self.call([python_exe, '-m', 'venv', '--help'])
if rc == 0:
self.venv_cmd = [python_exe, '-m', 'venv']
else:
raise SourceError("{}: venv module not found using python: {}"
.format(self, python_exe))
def get_unique_key(self):
return [self.python_exe, self.original_url, self.ref]
def get_consistency(self):
if not self.ref:
return Consistency.INCONSISTENT
if os.path.exists(self._mirror) and os.listdir(self._mirror):
return Consistency.CACHED
return Consistency.RESOLVED
def get_ref(self):
return self.ref
def load_ref(self, node):
self.ref = self.node_get_member(node, str, 'ref', None)
def set_ref(self, ref, node):
node['ref'] = self.ref = ref
def track(self, previous_sources_dir):
# XXX pip does not offer any public API other than the CLI tool so it
# is not feasible to correctly parse the requirements file or to check
# which package versions pip is going to install.
# See https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program
# for details.
# As a result, we have to wastefully install the packages during track.
with self.tempdir() as tmpdir:
pip = self._venv_pip(tmpdir)
install_args = [pip, 'install', '--index-url', self.index_url]
for requirement_file in self.requirements_files:
fpath = os.path.join(previous_sources_dir, requirement_file)
install_args += ['-r', fpath]
install_args += self.packages
self.call(install_args, fail="Failed to install python packages")
_, reqs = self.check_output([pip, 'freeze'])
return reqs.strip()
def fetch(self):
with self.tempdir() as venvdir, self.tempdir() as tmpdir:
pip = self._venv_pip(venvdir)
packages = self.ref.strip().split('\n')
package_dir = os.path.join(tmpdir, 'packages')
self.call([pip, 'install',
'--index-url', self.index_url,
'--prefix', package_dir] +
packages,
fail="Failed to install python packages: {}".format(packages))
# If the mirror directory already exists, assume that some other
# process has fetched the sources before us and ensure that we do
# not raise an error in that case.
try:
os.rename(package_dir, self._mirror)
except OSError as e:
if e.errno != errno.ENOTEMPTY:
raise
def stage(self, directory):
with self.timed_activity("Staging Python packages", silent_nested=True):
utils.copy_files(self._mirror, directory)
# Directory where this source should stage its files
#
@property
def _mirror(self):
if not self.ref:
return None
return os.path.join(self.get_mirror_directory(),
self.original_url,
hashlib.sha256(self.ref.encode()).hexdigest())
# Open a virtual environment in given directory and return pip path
#
def _venv_pip(self, directory):
self.call(self.venv_cmd + [directory], fail="Failed to initialize virtual environment")
pip_exe = os.path.join(directory, 'bin', 'pip')
if not os.path.isfile(pip_exe):
raise SourceError("Failed to initialize virtual environment")
return pip_exe
def setup():
return PipSource
......@@ -76,6 +76,39 @@ these methods are mandatory to implement.
:ref:`SourceFetcher <core_source_fetcher>`.
Accessing previous sources
--------------------------
*Since: 1.4*
In the general case, all sources are fetched and tracked independently of one
another. In situations where a source needs to access previous source(s) in
order to perform its own track and/or fetch, following attributes can be set to
request access to previous sources:
* :attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_TRACK`
Indicate that access to previous sources is required during track
* :attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_FETCH`
Indicate that access to previous sources is required during fetch
The intended use of such plugins is to fetch external dependencies of other
sources, typically using some kind of package manager, such that all the
dependencies of the original source(s) are available at build time.
When implementing such a plugin, implementors should adhere to the following
guidelines:
* Implementations must be able to store the obtained artifacts in a
subdirectory.
* Implementations must be able to deterministically generate a unique ref, such
that two refs are different if and only if they produce different outputs.
* Implementations must not introduce host contamination.
.. _core_source_fetcher:
SourceFetcher - Object for fetching individual URLs
......@@ -92,6 +125,8 @@ mentioned, these methods are mandatory to implement.
Fetches the URL associated with this SourceFetcher, optionally taking an
alias override.
Class Reference
---------------
"""
import os
......@@ -156,7 +191,7 @@ class SourceFetcher():
#############################################################
# Abstract Methods #
#############################################################
def fetch(self, alias_override=None):
def fetch(self, alias_override=None, **kwargs):
"""Fetch remote sources and mirror them locally, ensuring at least
that the specific reference is cached locally.
......@@ -209,6 +244,32 @@ class Source(Plugin):
__defaults = {} # The defaults from the project
__defaults_set = False # Flag, in case there are not defaults at all
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = False
"""Whether access to previous sources is required during track
When set to True:
* all sources listed before this source in the given element will be
fetched before this source is tracked
* Source.track() will be called with an additional keyword argument
`previous_sources_dir` where previous sources will be staged
* this source can not be the first source for an element
*Since: 1.4*
"""
BST_REQUIRES_PREVIOUS_SOURCES_FETCH = False
"""Whether access to previous sources is required during fetch
When set to True:
* all sources listed before this source in the given element will be
fetched before this source is fetched
* Source.fetch() will be called with an additional keyword argument
`previous_sources_dir` where previous sources will be staged
* this source can not be the first source for an element
*Since: 1.4*
"""
def __init__(self, context, project, meta, *, alias_override=None):
provenance = _yaml.node_get_provenance(meta.config)
super().__init__("{}-{}".format(meta.element_name, meta.element_index),
......@@ -305,9 +366,15 @@ class Source(Plugin):
"""
raise ImplError("Source plugin '{}' does not implement set_ref()".format(self.get_kind()))
def track(self):
def track(self, **kwargs):
"""Resolve a new ref from the plugin's track option
Args:
previous_sources_dir (str): directory where previous sources are staged.
Note that this keyword argument is available only when
:attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_TRACK`
is set to True.
Returns:
(simple object): A new internal source reference, or None
......@@ -326,10 +393,16 @@ class Source(Plugin):
# Allow a non implementation
return None
def fetch(self):
def fetch(self, **kwargs):
"""Fetch remote sources and mirror them locally, ensuring at least
that the specific reference is cached locally.
Args:
previous_sources_dir (str): directory where previous sources are staged.
Note that this keyword argument is available only when
:attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_FETCH`
is set to True.
Raises:
:class:`.SourceError`
......@@ -519,50 +592,19 @@ class Source(Plugin):
# Wrapper function around plugin provided fetch method
#
def _fetch(self):
project = self._get_project()
source_fetchers = self.get_source_fetchers()
if source_fetchers:
for fetcher in source_fetchers:
alias = fetcher._get_alias()
success = False
for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
try:
fetcher.fetch(uri)
# FIXME: Need to consider temporary vs. permanent failures,
# and how this works with retries.
except BstError as e:
last_error = e
continue
success = True
break
if not success:
raise last_error
# Args:
# previous_sources (list): List of Sources listed prior to this source
#
def _fetch(self, previous_sources):
if self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH:
self.__ensure_previous_sources(previous_sources)
with self.tempdir() as staging_directory:
for src in previous_sources:
src._stage(staging_directory)
self.__do_fetch(previous_sources_dir=staging_directory)
else:
alias = self._get_alias()
if self.__first_pass:
mirrors = project.first_pass_config.mirrors
else:
mirrors = project.config.mirrors
if not mirrors or not alias:
self.fetch()
return
context = self._get_context()
source_kind = type(self)
for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
new_source = source_kind(context, project, self.__meta,
alias_override=(alias, uri))
new_source._preflight()
try:
new_source.fetch()
# FIXME: Need to consider temporary vs. permanent failures,
# and how this works with retries.
except BstError as e:
last_error = e
continue
return
raise last_error
self.__do_fetch()
# Wrapper for stage() api which gives the source
# plugin a fully constructed path considering the
......@@ -773,8 +815,19 @@ class Source(Plugin):
# Wrapper for track()
#
def _track(self):
new_ref = self.__do_track()
# Args:
# previous_sources (list): List of Sources listed prior to this source
#
def _track(self, previous_sources):
if self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK:
self.__ensure_previous_sources(previous_sources)
with self.tempdir() as staging_directory:
for src in previous_sources:
src._stage(staging_directory)
new_ref = self.__do_track(previous_sources_dir=staging_directory)
else:
new_ref = self.__do_track()
current_ref = self.get_ref()
if new_ref is None:
......@@ -786,6 +839,17 @@ class Source(Plugin):
return new_ref
# _requires_previous_sources()
#
# If a plugin requires access to previous sources at track or fetch time,
# then it cannot be the first source of an elemenet.
#
# Returns:
# (bool): Whether this source requires access to previous sources
#
def _requires_previous_sources(self):
return self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK or self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH
# Returns the alias if it's defined in the project
def _get_alias(self):
alias = self.__expected_alias
......@@ -801,8 +865,54 @@ class Source(Plugin):
# Local Private Methods #
#############################################################
# Tries to call fetch for every mirror, stopping once it succeeds
def __do_fetch(self, **kwargs):
project = self._get_project()
source_fetchers = self.get_source_fetchers()
if source_fetchers:
for fetcher in source_fetchers:
alias = fetcher._get_alias()
success = False
for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
try:
fetcher.fetch(uri)
# FIXME: Need to consider temporary vs. permanent failures,
# and how this works with retries.
except BstError as e:
last_error = e
continue
success = True
break
if not success:
raise last_error
else:
alias = self._get_alias()
if self.__first_pass:
mirrors = project.first_pass_config.mirrors
else:
mirrors = project.config.mirrors
if not mirrors or not alias:
self.fetch(**kwargs)
return
context = self._get_context()
source_kind = type(self)
for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
new_source = source_kind(context, project, self.__meta,
alias_override=(alias, uri))
new_source._preflight()
try:
new_source.fetch(**kwargs)
# FIXME: Need to consider temporary vs. permanent failures,
# and how this works with retries.
except BstError as e:
last_error = e
continue
return
raise last_error
# Tries to call track for every mirror, stopping once it succeeds
def __do_track(self):
def __do_track(self, **kwargs):
project = self._get_project()
# If there are no mirrors, or no aliases to replace, there's nothing to do here.
alias = self._get_alias()
......@@ -811,7 +921,7 @@ class Source(Plugin):
else:
mirrors = project.config.mirrors
if not mirrors or not alias:
return self.track()
return self.track(**kwargs)
context = self._get_context()
source_kind = type(self)
......@@ -823,7 +933,7 @@ class Source(Plugin):
alias_override=(alias, uri))
new_source._preflight()
try:
ref = new_source.track()
ref = new_source.track(**kwargs)
# FIXME: Need to consider temporary vs. permanent failures,
# and how this works with retries.
except BstError as e:
......@@ -867,3 +977,14 @@ class Source(Plugin):
_yaml.node_final_assertions(config)
return config
# Ensures that previous sources have been tracked and fetched.
#
def __ensure_previous_sources(self, previous_sources):
for index, src in enumerate(previous_sources):
if src.get_consistency() == Consistency.RESOLVED:
src._fetch(previous_sources[0:index])
elif src.get_consistency() == Consistency.INCONSISTENT:
new_ref = src._track(previous_sources[0:index])
src._save_ref(new_ref)
src._fetch(previous_sources[0:index])
......@@ -58,6 +58,7 @@ Sources
sources/ostree
sources/patch
sources/deb
sources/pip
External plugins
......
......@@ -251,7 +251,7 @@ setup(name='BuildStream',
install_requires=[
'setuptools',
'psutil',
'ruamel.yaml',
'ruamel.yaml <= 0.15',
'pluginbase',
'Click',
'blessings',
......@@ -272,6 +272,5 @@ setup(name='BuildStream',
'pytest-cov >= 2.5.0',
# Provide option to run tests in parallel, less reliable
'pytest-xdist',
'pytest >= 3.1.0',
'pylint >= 1.8 , < 2'],
'pytest >= 3.1.0'],
zip_safe=False)
import os
import pytest
from buildstream import _yaml
from tests.testutils import cli_integration as cli
from tests.testutils.integration import assert_contains
pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"project"
)
@pytest.mark.datafiles(DATA_DIR)
def test_pip_source(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
checkout = os.path.join(cli.directory, 'checkout')
element_path = os.path.join(project, 'elements')
element_name = 'pip/hello.bst'
element = {
'kind': 'import',
'sources': [
{
'kind': 'local',
'path': 'files/pip-source'
},
{
'kind': 'pip',
'python-exe': 'python3',
'url': 'file://{}'.format(os.path.realpath(os.path.join(project, 'files', 'pypi-repo'))),
'requirements-files': ['myreqs.txt'],
'packages': ['app2']
}
]
}
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
_yaml.dump(element, os.path.join(element_path, element_name))
result = cli.run(project=project, args=['track', element_name])
assert result.exit_code == 0
result = cli.run(project=project, args=['build', element_name])
assert result.exit_code == 0
result = cli.run(project=project, args=['checkout', element_name, checkout])
assert result.exit_code == 0
assert_contains(checkout, ['/bin', '/bin/app1', '/bin/app2'])
app1
File added
<html>
<head>
<title>Links for app1</title>
</head>
<body>
<a href='App1-0.1.tar.gz'>App1-0.1.tar.gz</a><br />
</body>
</html>