Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (16)
Showing
with 328 additions and 48 deletions
......@@ -294,7 +294,7 @@ committed with that.
To do this, first ensure you have ``click_man`` installed, possibly
with::
pip install --user click_man
pip3 install --user click_man
Then, in the toplevel directory of buildstream, run the following::
......@@ -450,7 +450,7 @@ To run the tests, just type::
At the toplevel.
When debugging a test, it can be desirable to see the stdout
and stderr generated by a test, to do this use the --addopts
and stderr generated by a test, to do this use the ``--addopts``
function to feed arguments to pytest as such::
./setup.py test --addopts -s
......@@ -530,7 +530,7 @@ tool.
Python provides `cProfile <https://docs.python.org/3/library/profile.html>`_
which gives you a list of all functions called during execution and how much
time was spent in each function. Here is an example of running `bst --help`
time was spent in each function. Here is an example of running ``bst --help``
under cProfile:
python3 -m cProfile -o bst.cprofile -- $(which bst) --help
......
......@@ -240,7 +240,8 @@ class CASCache(ArtifactCache):
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
raise
raise ArtifactError("Failed to pull artifact {}: {}".format(
element._get_brief_display_key(), e)) from e
return False
......@@ -285,6 +286,7 @@ class CASCache(ArtifactCache):
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
# Intentionally re-raise RpcError for outer except block.
raise
missing_blobs = {}
......@@ -340,7 +342,7 @@ class CASCache(ArtifactCache):
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
raise ArtifactError("Failed to push artifact {}: {}".format(refs, e)) from e
raise ArtifactError("Failed to push artifact {}: {}".format(refs, e), temporary=True) from e
return pushed
......
......@@ -99,7 +99,7 @@ class ErrorDomain(Enum):
#
class BstError(Exception):
def __init__(self, message, *, detail=None, domain=None, reason=None):
def __init__(self, message, *, detail=None, domain=None, reason=None, temporary=False):
global _last_exception
super().__init__(message)
......@@ -114,6 +114,11 @@ class BstError(Exception):
#
self.sandbox = None
# When this exception occurred during the handling of a job, indicate
# whether or not there is any point retrying the job.
#
self.temporary = temporary
# Error domain and reason
#
self.domain = domain
......@@ -131,8 +136,8 @@ class BstError(Exception):
# or by the base :class:`.Plugin` element itself.
#
class PluginError(BstError):
def __init__(self, message, reason=None):
super().__init__(message, domain=ErrorDomain.PLUGIN, reason=reason)
def __init__(self, message, reason=None, temporary=False):
super().__init__(message, domain=ErrorDomain.PLUGIN, reason=reason, temporary=False)
# LoadErrorReason
......@@ -249,8 +254,8 @@ class SandboxError(BstError):
# Raised when errors are encountered in the artifact caches
#
class ArtifactError(BstError):
def __init__(self, message, *, detail=None, reason=None):
super().__init__(message, detail=detail, domain=ErrorDomain.ARTIFACT, reason=reason)
def __init__(self, message, *, detail=None, reason=None, temporary=False):
super().__init__(message, detail=detail, domain=ErrorDomain.ARTIFACT, reason=reason, temporary=True)
# PipelineError
......
......@@ -35,6 +35,12 @@ from ..._exceptions import ImplError, BstError, set_last_task_error
from ..._message import Message, MessageType, unconditional_messages
from ... import _signals, utils
# Return code values shutdown of job handling child processes
#
RC_OK = 0
RC_FAIL = 1
RC_PERM_FAIL = 2
# Used to distinguish between status messages and return values
class Envelope():
......@@ -111,6 +117,10 @@ class Job():
self._max_retries = max_retries # Maximum number of automatic retries
self._result = None # Return value of child action in the parent
self._tries = 0 # Try count, for retryable jobs
# If False, a retry will not be attempted regardless of whether _tries is less than _max_retries.
#
self._retry_flag = True
self._logfile = logfile
self._task_id = None
......@@ -388,8 +398,9 @@ class Job():
result = self.child_process()
except BstError as e:
elapsed = datetime.datetime.now() - starttime
self._retry_flag = e.temporary
if self._tries <= self._max_retries:
if self._retry_flag and (self._tries <= self._max_retries):
self.message(MessageType.FAIL,
"Try #{} failed, retrying".format(self._tries),
elapsed=elapsed)
......@@ -402,7 +413,10 @@ class Job():
# Report the exception to the parent (for internal testing purposes)
self._child_send_error(e)
self._child_shutdown(1)
# Set return code based on whether or not the error was temporary.
#
self._child_shutdown(RC_FAIL if self._retry_flag else RC_PERM_FAIL)
except Exception as e: # pylint: disable=broad-except
......@@ -416,7 +430,7 @@ class Job():
self.message(MessageType.BUG, self.action_name,
elapsed=elapsed, detail=detail,
logfile=filename)
self._child_shutdown(1)
self._child_shutdown(RC_FAIL)
else:
# No exception occurred in the action
......@@ -430,7 +444,7 @@ class Job():
# Shutdown needs to stay outside of the above context manager,
# make sure we dont try to handle SIGTERM while the process
# is already busy in sys.exit()
self._child_shutdown(0)
self._child_shutdown(RC_OK)
# _child_send_error()
#
......@@ -495,7 +509,8 @@ class Job():
message.action_name = self.action_name
message.task_id = self._task_id
if message.message_type == MessageType.FAIL and self._tries <= self._max_retries:
if (message.message_type == MessageType.FAIL and
self._tries <= self._max_retries and self._retry_flag):
# Job will be retried, display failures as warnings in the frontend
message.message_type = MessageType.WARN
......@@ -529,12 +544,17 @@ class Job():
def _parent_child_completed(self, pid, returncode):
self._parent_shutdown()
if returncode != 0 and self._tries <= self._max_retries:
# We don't want to retry if we got OK or a permanent fail.
# This is set in _child_action but must also be set for the parent.
#
self._retry_flag = returncode not in (RC_OK, RC_PERM_FAIL)
if self._retry_flag and (self._tries <= self._max_retries):
self.spawn()
return
self.parent_complete(returncode == 0, self._result)
self._scheduler.job_completed(self, returncode == 0)
self.parent_complete(returncode == RC_OK, self._result)
self._scheduler.job_completed(self, returncode == RC_OK)
# _parent_process_envelope()
#
......
......@@ -23,7 +23,7 @@
# This version is bumped whenever enhancements are made
# to the `project.conf` format or the core element format.
#
BST_FORMAT_VERSION = 9
BST_FORMAT_VERSION = 10
# The base BuildStream artifact version
......
......@@ -140,9 +140,10 @@ class ElementError(BstError):
message (str): The error message to report to the user
detail (str): A possibly multiline, more detailed error message
reason (str): An optional machine readable reason string, used for test cases
temporary (bool): An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
"""
def __init__(self, message, *, detail=None, reason=None):
super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason)
def __init__(self, message, *, detail=None, reason=None, temporary=False):
super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=temporary)
class Element(Plugin):
......
......@@ -478,13 +478,15 @@ class Plugin():
silent_nested=silent_nested):
yield
def call(self, *popenargs, fail=None, **kwargs):
def call(self, *popenargs, fail=None, fail_temporarily=False, **kwargs):
"""A wrapper for subprocess.call()
Args:
popenargs (list): Popen() arguments
fail (str): A message to display if the process returns
a non zero exit code
fail_temporarily (bool): Whether any exceptions should
be raised as temporary. (*Since: 1.2*)
rest_of_args (kwargs): Remaining arguments to subprocess.call()
Returns:
......@@ -507,16 +509,18 @@ class Plugin():
"Failed to download ponies from {}".format(
self.mirror_directory))
"""
exit_code, _ = self.__call(*popenargs, fail=fail, **kwargs)
exit_code, _ = self.__call(*popenargs, fail=fail, fail_temporarily=fail_temporarily, **kwargs)
return exit_code
def check_output(self, *popenargs, fail=None, **kwargs):
def check_output(self, *popenargs, fail=None, fail_temporarily=False, **kwargs):
"""A wrapper for subprocess.check_output()
Args:
popenargs (list): Popen() arguments
fail (str): A message to display if the process returns
a non zero exit code
fail_temporarily (bool): Whether any exceptions should
be raised as temporary. (*Since: 1.2*)
rest_of_args (kwargs): Remaining arguments to subprocess.call()
Returns:
......@@ -555,7 +559,7 @@ class Plugin():
raise SourceError(
fmt.format(plugin=self, track=tracking)) from e
"""
return self.__call(*popenargs, collect_stdout=True, fail=fail, **kwargs)
return self.__call(*popenargs, collect_stdout=True, fail=fail, fail_temporarily=fail_temporarily, **kwargs)
#############################################################
# Private Methods used in BuildStream #
......@@ -619,7 +623,7 @@ class Plugin():
# Internal subprocess implementation for the call() and check_output() APIs
#
def __call(self, *popenargs, collect_stdout=False, fail=None, **kwargs):
def __call(self, *popenargs, collect_stdout=False, fail=None, fail_temporarily=False, **kwargs):
with self._output_file() as output_file:
if 'stdout' not in kwargs:
......@@ -634,7 +638,8 @@ class Plugin():
exit_code, output = utils._call(*popenargs, **kwargs)
if fail and exit_code:
raise PluginError("{plugin}: {message}".format(plugin=self, message=fail))
raise PluginError("{plugin}: {message}".format(plugin=self, message=fail),
temporary=fail_temporarily)
return (exit_code, output)
......
......@@ -150,11 +150,11 @@ class DownloadableFileSource(Source):
# we would have downloaded.
return self.ref
raise SourceError("{}: Error mirroring {}: {}"
.format(self, self.url, e)) from e
.format(self, self.url, e), temporary=True) from e
except (urllib.error.URLError, urllib.error.ContentTooShortError, OSError) as e:
raise SourceError("{}: Error mirroring {}: {}"
.format(self, self.url, e)) from e
.format(self, self.url, e), temporary=True) from e
def _get_mirror_dir(self):
return os.path.join(self.get_mirror_directory(),
......
......@@ -113,7 +113,8 @@ class GitMirror():
#
with self.source.tempdir() as tmpdir:
self.source.call([self.source.host_git, 'clone', '--mirror', '-n', self.url, tmpdir],
fail="Failed to clone git repository {}".format(self.url))
fail="Failed to clone git repository {}".format(self.url),
fail_temporarily=True)
try:
shutil.move(tmpdir, self.mirror)
......@@ -124,6 +125,7 @@ class GitMirror():
def fetch(self):
self.source.call([self.source.host_git, 'fetch', 'origin', '--prune'],
fail="Failed to fetch from remote git repository: {}".format(self.url),
fail_temporarily=True,
cwd=self.mirror)
def has_ref(self):
......@@ -157,7 +159,8 @@ class GitMirror():
# case we're just checking out a specific commit and then removing the .git/
# directory.
self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', self.mirror, fullpath],
fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath))
fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
fail_temporarily=True)
self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
fail="Failed to checkout git ref {}".format(self.ref),
......@@ -170,7 +173,8 @@ class GitMirror():
fullpath = os.path.join(directory, self.path)
self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath))
fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
fail_temporarily=True)
self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', self.url],
fail='Failed to add remote origin "{}"'.format(self.url),
......
#
# Copyright Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Ed Baunton <ebaunton1@bloomberg.net>
"""
remote - stage files from remote urls
=====================================
**Usage:**
.. code:: yaml
# Specify the remote source kind
kind: remote
# Optionally specify a relative staging directory
# directory: path/to/stage
# Optionally specify a relative staging filename.
# If not specified, the basename of the url will be used.
# filename: customfilename
# Specify the url. Using an alias defined in your project
# configuration is encouraged. 'bst track' will update the
# sha256sum in 'ref' to the downloaded file's sha256sum.
url: upstream:foo
# Specify the ref. It's a sha256sum of the file you download.
ref: 6c9f6f68a131ec6381da82f2bff978083ed7f4f7991d931bfa767b7965ebc94b
.. note::
The ``remote`` plugin is available since :ref:`format version 10 <project_format_version>`
"""
import os
from buildstream import SourceError, utils
from ._downloadablefilesource import DownloadableFileSource
class RemoteSource(DownloadableFileSource):
# pylint: disable=attribute-defined-outside-init
def configure(self, node):
super().configure(node)
self.filename = self.node_get_member(node, str, 'filename', os.path.basename(self.url))
if os.sep in self.filename:
raise SourceError('{}: filename parameter cannot contain directories'.format(self),
reason="filename-contains-directory")
self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename'])
def get_unique_key(self):
return super().get_unique_key() + [self.filename]
def stage(self, directory):
# Same as in local plugin, don't use hardlinks to stage sources, they
# are not write protected in the sandbox.
dest = os.path.join(directory, self.filename)
with self.timed_activity("Staging remote file to {}".format(dest)):
utils.safe_copy(self._get_mirror_file(), dest)
def setup():
return RemoteSource
......@@ -108,9 +108,10 @@ class SourceError(BstError):
message (str): The breif error description to report to the user
detail (str): A possibly multiline, more detailed error message
reason (str): An optional machine readable reason string, used for test cases
temporary (bool): An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
"""
def __init__(self, message, *, detail=None, reason=None):
super().__init__(message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason)
def __init__(self, message, *, detail=None, reason=None, temporary=False):
super().__init__(message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason, temporary=temporary)
class Source(Plugin):
......
......@@ -50,6 +50,7 @@ Sources
:maxdepth: 1
sources/local
sources/remote
sources/tar
sources/zip
sources/git
......
......@@ -58,7 +58,7 @@ it's important to note two things:
* **source**: We only want to extract the ``files/`` directory from the SDK,
This is becase Flatpak runtimes dont start at the root of the OSTree checkout,
instead the actual files start in the ``files//`` subdirectory
instead the actual files start in the ``files/`` subdirectory
* **target**: The content we've extracted should be staged at ``/usr``
......
......@@ -400,7 +400,7 @@ dependency and that all referenced variables are declared, the following is fine
It should be noted that variable resolution only happens after all
:ref:`Element Composition <format_composition>` has already taken place.
This is to say that overriding ``%{version}`` at a higher priority will effect
This is to say that overriding ``%{version}`` at a higher priority will affect
the final result of ``%{release-text}``.
......
......@@ -548,7 +548,7 @@ The defaults environment for the build sandbox is defined here.
PATH: /usr/bin:/bin:/usr/sbin:/sbin
Additionally, the special ``environment-nocache`` list which specifies
which environment variables do not effect build output, and are thus
which environment variables do not affect build output, and are thus
not considered in the calculation of artifact keys can be defined here.
.. code:: yaml
......@@ -558,7 +558,7 @@ not considered in the calculation of artifact keys can be defined here.
Note that the ``environment-nocache`` list only exists so that we can
control parameters such as ``make -j ${MAXJOBS}``, allowing us to control
the number of jobs for a given build without effecting the resulting
the number of jobs for a given build without affecting the resulting
cache key.
......
......@@ -38,8 +38,8 @@ be able to integrate with Bash tab-completion if you invoke it in this way.
Two Docker volumes are set up by the ``bst-here`` script:
* buildstream-cache -- mounted at ``~/.cache/buildstream``
* buildstream-config -- mounted at ``~/.config/``
* ``buildstream-cache --`` mounted at ``~/.cache/buildstream``
* ``buildstream-config --`` mounted at ``~/.config/``
These are necessary so that your BuildStream cache and configuration files
persist between invocations of ``bst-here``.
......@@ -77,17 +77,17 @@ setup as described `here <https://backports.debian.org/Instructions/>`_
By adding the following line to your sources.list::
deb http://ftp.debian.org/debian stretch-backports main
deb http://deb.debian.org/debian stretch-backports main
And then running::
sudo apt-get update
sudo apt update
At this point you should be able to get the system requirements for the default plugins with::
sudo apt-get install \
sudo apt install \
bzr git lzip patch python3-arpy python3-gi
sudo apt-get install -t stretch-backports \
sudo apt install -t stretch-backports \
gir1.2-ostree-1.0 ostree
Buster or Sid
......@@ -98,7 +98,32 @@ to get the system requirements for the default plugins installed::
sudo apt-get install \
lzip gir1.2-ostree-1.0 git bzr ostree patch python3-arpy python3-gi
Ubuntu
++++++
Ubuntu 17.10 or higher
^^^^^^^^^^^^^^^^^^^^^^
For Ubuntu 17.10 or 18.04, all the base system dependencies can be installed with::
sudo apt-get install \
fuse bubblewrap git python3 python3-pip python3-psutil
To install the dependencies for the default plugins::
sudo apt-get install \
lzip git bzr patch python3-arpy ostree gir1.2-ostree-1.0 python3-gi
Ubuntu 17.04 and 16.X
^^^^^^^^^^^^^^^^^^^^^
On Ubuntu 16.X, neither `bubblewrap<https://github.com/projectatomic/bubblewrap/>`
or `ostree<https://github.com/ostreedev/ostree>` are available in the official repositories.
You will need to install them in whichever way you see fit. Refer the the upstream documentation
for advice on this.
Bubblewrap is available on Ubuntu 17.04, however as with Ubuntu 16.X Ostree is unsuported by the
the official repositories and you will need to install it yourself.
Fedora
++++++
For recent fedora systems, the following line should get you the system
......@@ -177,8 +202,8 @@ checkout::
cd /path/to/buildstream
git pull --rebase
If you did not specify the ``-e`` option at install time, you will
need to cleanly reinstall BuildStream::
If you did not specify the ``-e`` option at install time or the dependancies
have changed, you will need to cleanly reinstall BuildStream::
pip3 uninstall buildstream
cd /path/to/buildstream
......@@ -197,3 +222,4 @@ from `AUR <https://wiki.archlinux.org/index.php/Arch_User_Repository#Installing_
Alternatively, use
`buildstream-git <https://aur.archlinux.org/packages/buildstream-git>`_
for the lastest version of the development branch.
......@@ -109,7 +109,7 @@ changes the output.
Observe where the variables are declared in the :ref:`builtin defaults
<project_builtin_defaults>` and :mod:`autotools <elements.autotools>` element
documentation, and how overriding these affects the resolved set of variables.
documentation, and how overriding these effects the resolved set of variables.
Using the project
......
......@@ -45,7 +45,7 @@ def test_no_ref(cli, tmpdir, datafiles):
assert cli.get_element_state(project, 'target.bst') == 'no reference'
# Test that when I fetch a nonexistent URL, errors are handled gracefully.
# Test that when I fetch a nonexistent URL, errors are handled gracefully and a retry is performed.
@pytest.mark.skipif(HAVE_ARPY is False, reason="arpy is not available")
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
def test_fetch_bad_url(cli, tmpdir, datafiles):
......@@ -56,6 +56,7 @@ def test_fetch_bad_url(cli, tmpdir, datafiles):
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
assert "Try #" in result.stderr
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
......
import os
import pytest
from buildstream._exceptions import ErrorDomain
from buildstream import _yaml
from tests.testutils import cli
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'remote',
)
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
_yaml.dump({
'name': 'foo',
'aliases': {
'tmpdir': "file:///" + str(tmpdir)
}
}, project_file)
# Test that without ref, consistency is set appropriately.
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-ref'))
def test_no_ref(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
assert cli.get_element_state(project, 'target.bst') == 'no reference'
# Here we are doing a fetch on a file that doesn't exist. target.bst
# refers to 'file' but that file is not present.
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'missing-file'))
def test_missing_file(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
# Try to fetch it
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'path-in-filename'))
def test_path_in_filename(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
# Try to fetch it
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
# The bst file has a / in the filename param
result.assert_main_error(ErrorDomain.SOURCE, "filename-contains-directory")
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'single-file'))
def test_simple_file_build(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Try to fetch it
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
result.assert_success()
result = cli.run(project=project, args=[
'build', 'target.bst'
])
result.assert_success()
result = cli.run(project=project, args=[
'checkout', 'target.bst', checkoutdir
])
result.assert_success()
# Note that the url of the file in target.bst is actually /dir/file
# but this tests confirms we take the basename
assert(os.path.exists(os.path.join(checkoutdir, 'file')))
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'single-file-custom-name'))
def test_simple_file_custom_name_build(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Try to fetch it
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
result.assert_success()
result = cli.run(project=project, args=[
'build', 'target.bst'
])
result.assert_success()
result = cli.run(project=project, args=[
'checkout', 'target.bst', checkoutdir
])
result.assert_success()
assert(not os.path.exists(os.path.join(checkoutdir, 'file')))
assert(os.path.exists(os.path.join(checkoutdir, 'custom-file')))
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'unique-keys'))
def test_unique_key(cli, tmpdir, datafiles):
'''This test confirms that the 'filename' parameter is honoured when it comes
to generating a cache key for the source.
'''
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
assert cli.get_element_state(project, 'target.bst') == "fetch needed"
assert cli.get_element_state(project, 'target-custom.bst') == "fetch needed"
# Try to fetch it
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
# We should download the file only once
assert cli.get_element_state(project, 'target.bst') == 'buildable'
assert cli.get_element_state(project, 'target-custom.bst') == 'buildable'
# But the cache key is different because the 'filename' is different.
assert cli.get_element_key(project, 'target.bst') != \
cli.get_element_key(project, 'target-custom.bst')