...
 
Commits (23)
......@@ -31,6 +31,7 @@ variables:
- df -h
script:
- mkdir -p "${INTEGRATION_CACHE}"
- useradd -Um buildstream
- chown -R buildstream:buildstream .
......@@ -70,6 +71,10 @@ tests-python-3.7-stretch:
# some of our base dependencies declare it as their runtime dependency.
TOXENV: py37
tests-centos-7.6:
<<: *tests
image: buildstream/testsuite-centos:7.6-5da27168-32c47d1c
overnight-fedora-28-aarch64:
image: buildstream/testsuite-fedora:aarch64-28-5da27168-32c47d1c
tags:
......
......@@ -21,7 +21,7 @@ import hashlib
import itertools
import os
import stat
import tempfile
import errno
import uuid
import contextlib
......@@ -129,7 +129,7 @@ class CASCache():
else:
return dest
with tempfile.TemporaryDirectory(prefix='tmp', dir=self.tmpdir) as tmpdir:
with utils._tempdir(prefix='tmp', dir=self.tmpdir) as tmpdir:
checkoutdir = os.path.join(tmpdir, ref)
self._checkout(checkoutdir, tree)
......@@ -374,7 +374,7 @@ class CASCache():
for chunk in iter(lambda: tmp.read(4096), b""):
h.update(chunk)
else:
tmp = stack.enter_context(tempfile.NamedTemporaryFile(dir=self.tmpdir))
tmp = stack.enter_context(utils._tempnamedfile(dir=self.tmpdir))
# Set mode bits to 0644
os.chmod(tmp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
......@@ -545,11 +545,7 @@ class CASCache():
def remove(self, ref, *, defer_prune=False):
# Remove cache ref
refpath = self._refpath(ref)
if not os.path.exists(refpath):
raise CASCacheError("Could not find ref '{}'".format(ref))
os.unlink(refpath)
self._remove_ref(ref)
if not defer_prune:
pruned = self.prune()
......@@ -626,6 +622,55 @@ class CASCache():
def _refpath(self, ref):
return os.path.join(self.casdir, 'refs', 'heads', ref)
# _remove_ref()
#
# Removes a ref.
#
# This also takes care of pruning away directories which can
# be removed after having removed the given ref.
#
# Args:
# ref (str): The ref to remove
#
# Raises:
# (CASCacheError): If the ref didnt exist, or a system error
# occurred while removing it
#
def _remove_ref(self, ref):
# Remove the ref itself
refpath = self._refpath(ref)
try:
os.unlink(refpath)
except FileNotFoundError as e:
raise CASCacheError("Could not find ref '{}'".format(ref)) from e
# Now remove any leading directories
basedir = os.path.join(self.casdir, 'refs', 'heads')
components = list(os.path.split(ref))
while components:
components.pop()
refdir = os.path.join(basedir, *components)
# Break out once we reach the base
if refdir == basedir:
break
try:
os.rmdir(refdir)
except FileNotFoundError:
# The parent directory did not exist, but it's
# parent directory might still be ready to prune
pass
except OSError as e:
if e.errno == errno.ENOTEMPTY:
# The parent directory was not empty, so we
# cannot prune directories beyond this point
break
# Something went wrong here
raise CASCacheError("System error while removing ref '{}': {}".format(ref, e)) from e
# _commit_directory():
#
# Adds local directory to content addressable store.
......@@ -797,7 +842,7 @@ class CASCache():
# already in local repository
return objpath
with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
with utils._tempnamedfile(dir=self.tmpdir) as f:
remote._fetch_blob(digest, f)
added_digest = self.add_object(path=f.name, link_directly=True)
......@@ -807,7 +852,7 @@ class CASCache():
def _batch_download_complete(self, batch):
for digest, data in batch.send():
with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
with utils._tempnamedfile(dir=self.tmpdir) as f:
f.write(data)
f.flush()
......@@ -904,7 +949,7 @@ class CASCache():
def _fetch_tree(self, remote, digest):
# download but do not store the Tree object
with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
with utils._tempnamedfile(dir=self.tmpdir) as out:
remote._fetch_blob(digest, out)
tree = remote_execution_pb2.Tree()
......
......@@ -554,6 +554,12 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, command)
element, assuming it has already been built and all required
artifacts are in the local cache.
Use '--' to separate a command from the options to bst,
otherwise bst may respond to them instead. e.g.
\b
bst shell example.bst -- df -h
Use the --build option to create a temporary sysroot for
building the element instead.
......
......@@ -647,8 +647,9 @@ class LogLine(Widget):
abbrev = False
if message.message_type not in ERROR_MESSAGES \
and not frontend_message and n_lines > self._message_lines:
abbrev = True
lines = lines[0:self._message_lines]
if self._message_lines > 0:
abbrev = True
else:
lines[n_lines - 1] = lines[n_lines - 1].rstrip('\n')
......@@ -674,7 +675,7 @@ class LogLine(Widget):
if self.context is not None and not self.context.log_verbose:
text += self._indent + self._err_profile.fmt("Log file: ")
text += self._indent + self._logfile_widget.render(message) + '\n'
else:
elif self._log_lines > 0:
text += self._indent + self._err_profile.fmt("Printing the last {} lines from log file:"
.format(self._log_lines)) + '\n'
text += self._indent + self._logfile_widget.render(message, abbrev=False) + '\n'
......
......@@ -112,7 +112,8 @@ class GitMirror(SourceFetcher):
else:
remote_name = "origin"
self.source.call([self.source.host_git, 'fetch', remote_name, '--prune', '--force', '--tags'],
self.source.call([self.source.host_git, 'fetch', remote_name, '--prune',
'+refs/heads/*:refs/heads/*', '+refs/tags/*:refs/tags/*'],
fail="Failed to fetch from remote git repository: {}".format(url),
fail_temporarily=True,
cwd=self.mirror)
......
......@@ -28,17 +28,14 @@ some configuration data.
The empty configuration is as such:
.. literalinclude:: ../../../buildstream/plugins/elements/import.yaml
:language: yaml
See :ref:`built-in functionality documentation <core_buildelement_builtins>` for
details on common configuration options for build elements.
"""
import os
from buildstream import Element, BuildElement, ElementError
from buildstream import Element, ElementError
# Element implementation for the 'import' kind.
class ImportElement(BuildElement):
class ImportElement(Element):
# pylint: disable=attribute-defined-outside-init
# This plugin has been modified to avoid the use of Sandbox.get_directory
......@@ -93,10 +90,6 @@ class ImportElement(BuildElement):
# And we're done
return '/output'
def prepare(self, sandbox):
# We inherit a non-default prepare from BuildElement.
Element.prepare(self, sandbox)
def generate_script(self):
build_root = self.get_variable('build-root')
install_root = self.get_variable('install-root')
......
......@@ -56,6 +56,7 @@ details on common configuration options for sources.
import os
import shutil
import fcntl
from contextlib import contextmanager
from buildstream import Source, SourceError, Consistency
......@@ -84,10 +85,12 @@ class BzrSource(Source):
if self.ref is None or self.tracking is None:
return Consistency.INCONSISTENT
if self._check_ref():
return Consistency.CACHED
else:
return Consistency.RESOLVED
# Lock for the _check_ref()
with self._locked():
if self._check_ref():
return Consistency.CACHED
else:
return Consistency.RESOLVED
def load_ref(self, node):
self.ref = self.node_get_member(node, str, 'ref', None)
......@@ -100,7 +103,7 @@ class BzrSource(Source):
def track(self):
with self.timed_activity("Tracking {}".format(self.url),
silent_nested=True):
silent_nested=True), self._locked():
self._ensure_mirror(skip_ref_check=True)
ret, out = self.check_output([self.host_bzr, "version-info",
"--custom", "--template={revno}",
......@@ -114,7 +117,7 @@ class BzrSource(Source):
def fetch(self):
with self.timed_activity("Fetching {}".format(self.url),
silent_nested=True):
silent_nested=True), self._locked():
self._ensure_mirror()
def stage(self, directory):
......@@ -141,6 +144,26 @@ class BzrSource(Source):
"--directory={}".format(directory), url],
fail="Failed to switch workspace's parent branch to {}".format(url))
# _locked()
#
# This context manager ensures exclusive access to the
# bzr repository.
#
@contextmanager
def _locked(self):
lockdir = os.path.join(self.get_mirror_directory(), 'locks')
lockfile = os.path.join(
lockdir,
utils.url_directory_name(self.original_url) + '.lock'
)
os.makedirs(lockdir, exist_ok=True)
with open(lockfile, 'w') as lock:
fcntl.flock(lock, fcntl.LOCK_EX)
try:
yield
finally:
fcntl.flock(lock, fcntl.LOCK_UN)
def _check_ref(self):
# If the mirror doesnt exist yet, then we dont have the ref
if not os.path.exists(self._get_branch_dir()):
......@@ -157,83 +180,27 @@ class BzrSource(Source):
return os.path.join(self.get_mirror_directory(),
utils.url_directory_name(self.original_url))
def _atomic_replace_mirrordir(self, srcdir):
"""Helper function to safely replace the mirror dir"""
def _ensure_mirror(self, skip_ref_check=False):
mirror_dir = self._get_mirror_dir()
bzr_metadata_dir = os.path.join(mirror_dir, ".bzr")
if not os.path.exists(bzr_metadata_dir):
self.call([self.host_bzr, "init-repo", "--no-trees", mirror_dir],
fail="Failed to initialize bzr repository")
branch_dir = os.path.join(mirror_dir, self.tracking)
branch_url = self.url + "/" + self.tracking
if not os.path.exists(branch_dir):
# `bzr branch` the branch if it doesn't exist
# to get the upstream code
self.call([self.host_bzr, "branch", branch_url, branch_dir],
fail="Failed to branch from {} to {}".format(branch_url, branch_dir))
if not os.path.exists(self._get_mirror_dir()):
# Just move the srcdir to the mirror dir
try:
os.rename(srcdir, self._get_mirror_dir())
except OSError as e:
raise SourceError("{}: Failed to move srcdir '{}' to mirror dir '{}'"
.format(str(self), srcdir, self._get_mirror_dir())) from e
else:
# Atomically swap the backup dir.
backupdir = self._get_mirror_dir() + ".bak"
try:
os.rename(self._get_mirror_dir(), backupdir)
except OSError as e:
raise SourceError("{}: Failed to move mirrordir '{}' to backup dir '{}'"
.format(str(self), self._get_mirror_dir(), backupdir)) from e
# `bzr pull` the branch if it does exist
# to get any changes to the upstream code
self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
fail="Failed to pull new changes for {}".format(branch_dir))
try:
os.rename(srcdir, self._get_mirror_dir())
except OSError as e:
# Attempt to put the backup back!
os.rename(backupdir, self._get_mirror_dir())
raise SourceError("{}: Failed to replace bzr repo '{}' with '{}"
.format(str(self), srcdir, self._get_mirror_dir())) from e
finally:
if os.path.exists(backupdir):
shutil.rmtree(backupdir)
@contextmanager
def _atomic_repodir(self):
"""Context manager for working in a copy of the bzr repository
Yields:
(str): A path to the copy of the bzr repo
This should be used because bzr does not give any guarantees of
atomicity, and aborting an operation at the wrong time (or
accidentally running multiple concurrent operations) can leave the
repo in an inconsistent state.
"""
with self.tempdir() as repodir:
mirror_dir = self._get_mirror_dir()
if os.path.exists(mirror_dir):
try:
# shutil.copytree doesn't like it if destination exists
shutil.rmtree(repodir)
shutil.copytree(mirror_dir, repodir)
except (shutil.Error, OSError) as e:
raise SourceError("{}: Failed to copy bzr repo from '{}' to '{}'"
.format(str(self), mirror_dir, repodir)) from e
yield repodir
self._atomic_replace_mirrordir(repodir)
def _ensure_mirror(self, skip_ref_check=False):
with self._atomic_repodir() as repodir:
# Initialize repo if no metadata
bzr_metadata_dir = os.path.join(repodir, ".bzr")
if not os.path.exists(bzr_metadata_dir):
self.call([self.host_bzr, "init-repo", "--no-trees", repodir],
fail="Failed to initialize bzr repository")
branch_dir = os.path.join(repodir, self.tracking)
branch_url = self.url + "/" + self.tracking
if not os.path.exists(branch_dir):
# `bzr branch` the branch if it doesn't exist
# to get the upstream code
self.call([self.host_bzr, "branch", branch_url, branch_dir],
fail="Failed to branch from {} to {}".format(branch_url, branch_dir))
else:
# `bzr pull` the branch if it does exist
# to get any changes to the upstream code
self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
fail="Failed to pull new changes for {}".format(branch_dir))
if not skip_ref_check and not self._check_ref():
raise SourceError("Failed to ensure ref '{}' was mirrored".format(self.ref),
reason="ref-not-mirrored")
......
......@@ -1032,6 +1032,36 @@ def _tempdir(suffix="", prefix="tmp", dir=None): # pylint: disable=redefined-bu
cleanup_tempdir()
# _tempnamedfile()
#
# A context manager for doing work on an open temporary file
# which is guaranteed to be named and have an entry in the filesystem.
#
# Args:
# dir (str): A path to a parent directory for the temporary file
# suffix (str): A suffix for the temproary file name
# prefix (str): A prefix for the temporary file name
#
# Yields:
# (str): The temporary file handle
#
# Do not use tempfile.NamedTemporaryFile() directly, as this will
# leak files on the filesystem when BuildStream exits a process
# on SIGTERM.
#
@contextmanager
def _tempnamedfile(suffix="", prefix="tmp", dir=None): # pylint: disable=redefined-builtin
temp = None
def close_tempfile():
if temp is not None:
temp.close()
with _signals.terminator(close_tempfile), \
tempfile.NamedTemporaryFile(suffix=suffix, prefix=prefix, dir=dir) as temp:
yield temp
# _kill_process_tree()
#
# Brutally murder a process and all of its children
......
......@@ -382,6 +382,7 @@ def test_extract_expiry(cli, datafiles, tmpdir):
res = cli.run(project=project, args=['checkout', 'target.bst', os.path.join(str(tmpdir), 'checkout')])
res.assert_success()
# Get a snapshot of the extracts in advance
extractdir = os.path.join(project, 'cache', 'artifacts', 'extract', 'test', 'target')
extracts = os.listdir(extractdir)
assert(len(extracts) == 1)
......@@ -395,3 +396,16 @@ def test_extract_expiry(cli, datafiles, tmpdir):
# Now the extract should be removed.
assert not os.path.exists(extract)
# As an added bonus, let's ensure that no directories have been left behind
#
# Now we should have a directory for the cached target2.bst, which
# replaced target.bst in the cache, we should not have a directory
# for the target.bst
refsdir = os.path.join(project, 'cache', 'artifacts', 'cas', 'refs', 'heads')
refsdirtest = os.path.join(refsdir, 'test')
refsdirtarget = os.path.join(refsdirtest, 'target')
refsdirtarget2 = os.path.join(refsdirtest, 'target2')
assert os.path.isdir(refsdirtarget2)
assert not os.path.exists(refsdirtarget)
......@@ -73,14 +73,36 @@ def test_track(cli, tmpdir, datafiles, ref_storage, kind):
assert not os.path.exists(os.path.join(project, 'project.refs'))
# NOTE:
#
# This test checks that recursive tracking works by observing
# element states after running a recursive tracking operation.
#
# However, this test is ALSO valuable as it stresses the source
# plugins in a situation where many source plugins are operating
# at once on the same backing repository.
#
# Do not change this test to use a separate 'Repo' per element
# as that would defeat the purpose of the stress test, otherwise
# please refactor that aspect into another test.
#
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("amount", [(1), (10)])
@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
def test_track_recurse(cli, tmpdir, datafiles, kind):
def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
project = os.path.join(datafiles.dirname, datafiles.basename)
dev_files_path = os.path.join(project, 'files', 'dev-files')
element_path = os.path.join(project, 'elements')
element_dep_name = 'track-test-dep-{}.bst'.format(kind)
element_target_name = 'track-test-target-{}.bst'.format(kind)
# Try to actually launch as many fetch jobs as possible at the same time
#
# This stresses the Source plugins and helps to ensure that
# they handle concurrent access to the store correctly.
cli.configure({
'scheduler': {
'fetchers': amount,
}
})
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
......@@ -89,18 +111,26 @@ def test_track_recurse(cli, tmpdir, datafiles, kind):
ref = repo.create(dev_files_path)
# Write out our test targets
generate_element(repo, os.path.join(element_path, element_dep_name))
generate_element(repo, os.path.join(element_path, element_target_name),
dep_name=element_dep_name)
element_names = []
last_element_name = None
for i in range(amount + 1):
element_name = 'track-test-{}-{}.bst'.format(kind, i + 1)
filename = os.path.join(element_path, element_name)
element_names.append(element_name)
generate_element(repo, filename, dep_name=last_element_name)
last_element_name = element_name
# Assert that a fetch is needed
assert cli.get_element_state(project, element_dep_name) == 'no reference'
assert cli.get_element_state(project, element_target_name) == 'no reference'
states = cli.get_element_states(project, last_element_name)
for element_name in element_names:
assert states[element_name] == 'no reference'
# Now first try to track it
result = cli.run(project=project, args=[
'source', 'track', '--deps', 'all',
element_target_name])
last_element_name])
result.assert_success()
# And now fetch it: The Source has probably already cached the
......@@ -109,12 +139,16 @@ def test_track_recurse(cli, tmpdir, datafiles, kind):
# is the job of fetch.
result = cli.run(project=project, args=[
'source', 'fetch', '--deps', 'all',
element_target_name])
last_element_name])
result.assert_success()
# Assert that the dependency is buildable and the target is waiting
assert cli.get_element_state(project, element_dep_name) == 'buildable'
assert cli.get_element_state(project, element_target_name) == 'waiting'
# Assert that the base is buildable and the rest are waiting
states = cli.get_element_states(project, last_element_name)
for element_name in element_names:
if element_name == element_names[0]:
assert states[element_name] == 'buildable'
else:
assert states[element_name] == 'waiting'
@pytest.mark.datafiles(DATA_DIR)
......
......@@ -204,6 +204,7 @@ def test_open_multi(cli, tmpdir, datafiles):
assert not ('.bzr' in workspace_lsdir)
@pytest.mark.skipif(os.geteuid() == 0, reason="root may have CAP_DAC_OVERRIDE and ignore permissions")
@pytest.mark.datafiles(DATA_DIR)
def test_open_multi_unwritable(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
......
#
# Copyright (C) 2018 Codethink Limited
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors: Tristan Maat <[email protected]>
#
import os
import pytest
from buildstream import _yaml
from buildstream._exceptions import ErrorDomain
from tests.testutils import cli_integration as cli
from tests.testutils.site import HAVE_BWRAP, IS_LINUX
pytestmark = pytest.mark.integration
# Project directory
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"project",
)
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
def test_disable_message_lines(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
element_path = os.path.join(project, 'elements')
element_name = 'message.bst'
element = {
'kind': 'manual',
'depends': [{
'filename': 'base.bst'
}],
'config': {
'build-commands':
['echo "Silly message"'],
'strip-commands': []
}
}
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
_yaml.dump(element, os.path.join(element_path, element_name))
# First we check that we get the "Silly message"
result = cli.run(project=project, args=["build", element_name])
result.assert_success()
assert 'echo "Silly message"' in result.stderr
# Let's now build it again, but with --message-lines 0
cli.remove_artifact_from_cache(project, element_name)
result = cli.run(project=project, args=["--message-lines", "0",
"build", element_name])
result.assert_success()
assert "Message contains " not in result.stderr
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
def test_disable_error_lines(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
element_path = os.path.join(project, 'elements')
element_name = 'message.bst'
element = {
'kind': 'manual',
'depends': [{
'filename': 'base.bst'
}],
'config': {
'build-commands':
['This is a syntax error > >'],
'strip-commands': []
}
}
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
_yaml.dump(element, os.path.join(element_path, element_name))
# First we check that we get the syntax error
result = cli.run(project=project, args=["--error-lines", "0",
"build", element_name])
result.assert_main_error(ErrorDomain.STREAM, None)
assert "This is a syntax error" in result.stderr
# Let's now build it again, but with --error-lines 0
cli.remove_artifact_from_cache(project, element_name)
result = cli.run(project=project, args=["--error-lines", "0",
"build", element_name])
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Printing the last" not in result.stderr
......@@ -30,7 +30,7 @@ from buildstream import _yaml
from buildstream.plugin import CoreWarnings
from tests.testutils import cli, create_repo
from tests.testutils.site import HAVE_GIT
from tests.testutils.site import HAVE_GIT, HAVE_OLD_GIT
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
......@@ -664,6 +664,7 @@ def test_invalid_submodule(cli, tmpdir, datafiles, fail):
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@pytest.mark.skipif(HAVE_OLD_GIT, reason="old git rm does not update .gitmodules")
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
@pytest.mark.parametrize("fail", ['warn', 'error'])
def test_track_invalid_submodule(cli, tmpdir, datafiles, fail):
......@@ -772,6 +773,7 @@ def test_track_fetch(cli, tmpdir, datafiles, ref_format, tag, extra_commit):
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@pytest.mark.skipif(HAVE_OLD_GIT, reason="old git describe lacks --first-parent")
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
@pytest.mark.parametrize("tag_type", [('annotated'), ('lightweight')])
......
from setuptools.sandbox import run_setup
import os
import pytest
import re
import shutil
import subprocess
SETUP_TEMPLATE = '''\
......@@ -88,7 +88,9 @@ def generate_pip_package(tmpdir, pypi, name, version='0.1'):
f.write(INIT_TEMPLATE.format(name=name))
os.chmod(main_file, 0o644)
run_setup(setup_file, ['sdist'])
# Run sdist with a fresh process
p = subprocess.run(['python3', 'setup.py', 'sdist'], cwd=tmpdir)
assert p.returncode == 0
# create directory for this package in pypi resulting in a directory
# tree resembling the following structure:
......
......@@ -245,8 +245,14 @@ class Cli():
def remove_artifact_from_cache(self, project, element_name,
*, cache_dir=None):
# Read configuration to figure out where artifacts are stored
if not cache_dir:
cache_dir = os.path.join(project, 'cache', 'artifacts')
default = os.path.join(project, 'cache', 'artifacts')
if self.config is not None:
cache_dir = self.config.get('artifactdir', default)
else:
cache_dir = default
cache_dir = os.path.join(cache_dir, 'cas', 'refs', 'heads')
......@@ -375,6 +381,9 @@ class Cli():
# Fetch an element state by name by
# invoking bst show on the project with the CLI
#
# If you need to get the states of multiple elements,
# then use get_element_states(s) instead.
#
def get_element_state(self, project, element_name):
result = self.run(project=project, silent=True, args=[
'show',
......@@ -385,6 +394,25 @@ class Cli():
result.assert_success()
return result.output.strip()
# Fetch the states of elements for a given target / deps
#
# Returns a dictionary with the element names as keys
#
def get_element_states(self, project, target, deps='all'):
result = self.run(project=project, silent=True, args=[
'show',
'--deps', deps,
'--format', '%{name}||%{state}',
target
])
result.assert_success()
lines = result.output.splitlines()
states = {}
for line in lines:
split = line.split(sep='||')
states[split[0]] = split[1]
return states
# Fetch an element's cache key by invoking bst show
# on the project with the CLI
#
......
......@@ -2,6 +2,7 @@
# so we dont have to repeat this everywhere
#
import os
import subprocess
import sys
from buildstream import _site, utils, ProgramNotFoundError
......@@ -16,8 +17,12 @@ except ProgramNotFoundError:
try:
utils.get_host_tool('git')
HAVE_GIT = True
out = str(subprocess.check_output(['git', '--version']), "utf-8")
version = tuple(int(x) for x in out.split(' ', 2)[2].split('.'))
HAVE_OLD_GIT = version < (1, 8, 5)
except ProgramNotFoundError:
HAVE_GIT = False
HAVE_OLD_GIT = False
try:
utils.get_host_tool('ostree')
......