Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (16)
Showing
with 361 additions and 122 deletions
......@@ -79,32 +79,46 @@ source_dist:
- cd ../..
- mkdir -p coverage-linux/
- cp dist/buildstream/.coverage coverage-linux/coverage."${CI_JOB_NAME}"
except:
- schedules
artifacts:
paths:
- coverage-linux/
tests-debian-9:
image: buildstream/testsuite-debian:9-master-119-552f5fc6
image: buildstream/testsuite-debian:9-master-123-7ce6581b
<<: *linux-tests
except:
- schedules
tests-fedora-27:
image: buildstream/testsuite-fedora:27-master-119-552f5fc6
image: buildstream/testsuite-fedora:27-master-123-7ce6581b
<<: *linux-tests
except:
- schedules
tests-fedora-28:
image: buildstream/testsuite-fedora:28-master-119-552f5fc6
image: buildstream/testsuite-fedora:28-master-123-7ce6581b
<<: *linux-tests
except:
- schedules
tests-ubuntu-18.04:
image: buildstream/testsuite-ubuntu:18.04-master-119-552f5fc6
image: buildstream/testsuite-ubuntu:18.04-master-123-7ce6581b
<<: *linux-tests
except:
- schedules
overnight-fedora-28-aarch64:
image: buildstream/testsuite-fedora:aarch64-28-master-123-7ce6581b
tags:
- aarch64
<<: *linux-tests
only:
- schedules
tests-unix:
# Use fedora here, to a) run a test on fedora and b) ensure that we
# can get rid of ostree - this is not possible with debian-8
image: buildstream/testsuite-fedora:27-master-119-552f5fc6
image: buildstream/testsuite-fedora:27-master-123-7ce6581b
stage: test
variables:
BST_FORCE_BACKEND: "unix"
......
......@@ -38,13 +38,23 @@ buildstream 1.3.1
a bug fix to workspaces so they can be build in workspaces too.
o Creating a build shell through the interactive mode or `bst shell --build`
will now use the cached build tree. It is now easier to debug local build
failures.
will now use the cached buildtree if available locally. It is now easier to
debug local build failures.
o `bst shell --sysroot` now takes any directory that contains a sysroot,
instead of just a specially-formatted build-root with a `root` and `scratch`
subdirectory.
o Due to the element `build tree` being cached in the respective artifact their
size in some cases has significantly increased. In *most* cases the build tree
is not utilised when building targets, as such by default bst 'pull' & 'build'
will not fetch buildtrees from remotes. This behaviour can be overriden with
the cli main option '--pull-build-trees', or the user configuration option
'pullbuildtrees = True'. The override will also add the build tree to already
cached artifacts. When attempting to populate an artifactcache server with
cached artifacts, only 'complete' elements can be pushed. If the element is
expected to have a populated build tree then it must be cached before pushing.
=================
buildstream 1.1.5
......
......@@ -476,6 +476,22 @@ class ArtifactCache():
return self.cas.contains(ref)
# contains_subdir_artifact():
#
# Check whether an artifact element contains a digest for a subdir
# which is populated in the cache, i.e non dangling.
#
# Args:
# element (Element): The Element to check
# key (str): The cache key to use
# subdir (str): The subdir to check
#
# Returns: True if the subdir exists & is populated in the cache, False otherwise
#
def contains_subdir_artifact(self, element, key, subdir):
ref = self.get_artifact_fullname(element, key)
return self.cas.contains_subdir_artifact(ref, subdir)
# list_artifacts():
#
# List artifacts in this cache in LRU order.
......@@ -533,6 +549,7 @@ class ArtifactCache():
# Args:
# element (Element): The Element to extract
# key (str): The cache key to use
# subdir (str): Optional specific subdir to extract
#
# Raises:
# ArtifactError: In cases there was an OSError, or if the artifact
......@@ -540,12 +557,12 @@ class ArtifactCache():
#
# Returns: path to extracted artifact
#
def extract(self, element, key):
def extract(self, element, key, subdir=None):
ref = self.get_artifact_fullname(element, key)
path = os.path.join(self.extractdir, element._get_project().name, element.normal_name)
return self.cas.extract(ref, path)
return self.cas.extract(ref, path, subdir=subdir)
# commit():
#
......@@ -666,11 +683,13 @@ class ArtifactCache():
# element (Element): The Element whose artifact is to be fetched
# key (str): The cache key to use
# progress (callable): The progress callback, if any
# subdir (str): The optional specific subdir to pull
# excluded_subdirs (list): The optional list of subdirs to not pull
#
# Returns:
# (bool): True if pull was successful, False if artifact was not available
#
def pull(self, element, key, *, progress=None):
def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
ref = self.get_artifact_fullname(element, key)
project = element._get_project()
......@@ -680,8 +699,13 @@ class ArtifactCache():
display_key = element._get_brief_display_key()
element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
if self.cas.pull(ref, remote, progress=progress):
if self.cas.pull(ref, remote, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
if subdir:
# Attempt to extract subdir into artifact extract dir if it already exists
# without containing the subdir. If the respective artifact extract dir does not
# exist a complete extraction will complete.
self.extract(element, key, subdir)
# no need to pull from additional remotes
return True
else:
......
......@@ -82,6 +82,27 @@ class CASCache():
# This assumes that the repository doesn't have any dangling pointers
return os.path.exists(refpath)
# contains_subdir_artifact():
#
# Check whether the specified artifact element tree has a digest for a subdir
# which is populated in the cache, i.e non dangling.
#
# Args:
# ref (str): The ref to check
# subdir (str): The subdir to check
#
# Returns: True if the subdir exists & is populated in the cache, False otherwise
#
def contains_subdir_artifact(self, ref, subdir):
tree = self.resolve_ref(ref)
# This assumes that the subdir digest is present in the element tree
subdirdigest = self._get_subdir(tree, subdir)
objpath = self.objpath(subdirdigest)
# True if subdir content is cached or if empty as expected
return os.path.exists(objpath)
# extract():
#
# Extract cached directory for the specified ref if it hasn't
......@@ -90,19 +111,30 @@ class CASCache():
# Args:
# ref (str): The ref whose directory to extract
# path (str): The destination path
# subdir (str): Optional specific dir to extract
#
# Raises:
# CASError: In cases there was an OSError, or if the ref did not exist.
#
# Returns: path to extracted directory
#
def extract(self, ref, path):
def extract(self, ref, path, subdir=None):
tree = self.resolve_ref(ref, update_mtime=True)
dest = os.path.join(path, tree.hash)
originaldest = dest = os.path.join(path, tree.hash)
# If artifact is already extracted, check if the optional subdir
# has also been extracted. If the artifact has not been extracted
# a full extraction would include the optional subdir
if os.path.isdir(dest):
# directory has already been extracted
return dest
if subdir:
if not os.path.isdir(os.path.join(dest, subdir)):
dest = os.path.join(dest, subdir)
tree = self._get_subdir(tree, subdir)
else:
return dest
else:
return dest
with tempfile.TemporaryDirectory(prefix='tmp', dir=self.tmpdir) as tmpdir:
checkoutdir = os.path.join(tmpdir, ref)
......@@ -120,7 +152,7 @@ class CASCache():
if e.errno not in [errno.ENOTEMPTY, errno.EEXIST]:
raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
return dest
return originaldest
# commit():
#
......@@ -193,11 +225,13 @@ class CASCache():
# ref (str): The ref to pull
# remote (CASRemote): The remote repository to pull from
# progress (callable): The progress callback, if any
# subdir (str): The optional specific subdir to pull
# excluded_subdirs (list): The optional list of subdirs to not pull
#
# Returns:
# (bool): True if pull was successful, False if ref was not available
#
def pull(self, ref, remote, *, progress=None):
def pull(self, ref, remote, *, progress=None, subdir=None, excluded_subdirs=None):
try:
remote.init()
......@@ -209,7 +243,12 @@ class CASCache():
tree.hash = response.digest.hash
tree.size_bytes = response.digest.size_bytes
self._fetch_directory(remote, tree)
# Check if the element artifact is present, if so just fetch the subdir.
if subdir and os.path.exists(self.objpath(tree)):
self._fetch_subdir(remote, tree, subdir)
else:
# Fetch artifact, excluded_subdirs determined in pullqueue
self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
self.set_ref(ref, tree)
......@@ -607,8 +646,10 @@ class CASCache():
stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
for dirnode in directory.directories:
fullpath = os.path.join(dest, dirnode.name)
self._checkout(fullpath, dirnode.digest)
# Don't try to checkout a dangling ref
if os.path.exists(self.objpath(dirnode.digest)):
fullpath = os.path.join(dest, dirnode.name)
self._checkout(fullpath, dirnode.digest)
for symlinknode in directory.symlinks:
# symlink
......@@ -863,11 +904,14 @@ class CASCache():
# Args:
# remote (Remote): The remote to use.
# dir_digest (Digest): Digest object for the directory to fetch.
# excluded_subdirs (list): The optional list of subdirs to not fetch
#
def _fetch_directory(self, remote, dir_digest):
def _fetch_directory(self, remote, dir_digest, *, excluded_subdirs=None):
fetch_queue = [dir_digest]
fetch_next_queue = []
batch = _CASBatchRead(remote)
if not excluded_subdirs:
excluded_subdirs = []
while len(fetch_queue) + len(fetch_next_queue) > 0:
if not fetch_queue:
......@@ -882,8 +926,9 @@ class CASCache():
directory.ParseFromString(f.read())
for dirnode in directory.directories:
batch = self._fetch_directory_node(remote, dirnode.digest, batch,
fetch_queue, fetch_next_queue, recursive=True)
if dirnode.name not in excluded_subdirs:
batch = self._fetch_directory_node(remote, dirnode.digest, batch,
fetch_queue, fetch_next_queue, recursive=True)
for filenode in directory.files:
batch = self._fetch_directory_node(remote, filenode.digest, batch,
......@@ -892,6 +937,10 @@ class CASCache():
# Fetch final batch
self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
def _fetch_subdir(self, remote, tree, subdir):
subdirdigest = self._get_subdir(tree, subdir)
self._fetch_directory(remote, subdirdigest)
def _fetch_tree(self, remote, digest):
# download but do not store the Tree object
with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
......
......@@ -104,6 +104,9 @@ class Context():
# What to do when a build fails in non interactive mode
self.sched_error_action = 'continue'
# Whether or not to attempt to pull build trees globally
self.pull_build_trees = False
# Whether elements must be rebuilt when their dependencies have changed
self._strict_build_plan = None
......@@ -160,7 +163,7 @@ class Context():
_yaml.node_validate(defaults, [
'sourcedir', 'builddir', 'artifactdir', 'logdir',
'scheduler', 'artifacts', 'logging', 'projects',
'cache'
'cache', 'pullbuildtrees'
])
for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
......@@ -185,6 +188,9 @@ class Context():
# Load artifact share configuration
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
# Load pull build trees configuration
self.pull_build_trees = _yaml.node_get(defaults, bool, 'pullbuildtrees', default_value='False')
# Load logging config
logging = _yaml.node_get(defaults, Mapping, 'logging')
_yaml.node_validate(logging, [
......
......@@ -182,7 +182,8 @@ class App():
'fetchers': 'sched_fetchers',
'builders': 'sched_builders',
'pushers': 'sched_pushers',
'network_retries': 'sched_network_retries'
'network_retries': 'sched_network_retries',
'pull_build_trees': 'pull_build_trees'
}
for cli_option, context_attr in override_map.items():
option_value = self._main_options.get(cli_option)
......
......@@ -219,6 +219,8 @@ def print_version(ctx, param, value):
help="Specify a project option")
@click.option('--default-mirror', default=None,
help="The mirror to fetch from first, before attempting other mirrors")
@click.option('--pull-build-trees', is_flag=True, default=None,
help="Include an element's build trees when pulling remote element artifacts")
@click.pass_context
def cli(context, **kwargs):
"""Build and manipulate BuildStream projects
......
......@@ -18,9 +18,9 @@
# Tristan Maat <tristan.maat@codethink.co.uk>
import os
import shutil
import subprocess
from .. import _site
from .. import utils
from ..sandbox import SandboxDummy
......@@ -38,16 +38,18 @@ class Linux(Platform):
self._have_fuse = os.path.exists("/dev/fuse")
bwrap_version = self._get_bwrap_version()
bwrap_version = _site.get_bwrap_version()
if bwrap_version is None:
self._bwrap_exists = False
self._have_good_bwrap = False
self._die_with_parent_available = False
self._json_status_available = False
else:
self._bwrap_exists = True
self._have_good_bwrap = (0, 1, 2) <= bwrap_version
self._die_with_parent_available = (0, 1, 8) <= bwrap_version
self._json_status_available = (0, 3, 2) <= bwrap_version
self._local_sandbox_available = self._have_fuse and self._have_good_bwrap
......@@ -97,6 +99,7 @@ class Linux(Platform):
# Inform the bubblewrap sandbox as to whether it can use user namespaces or not
kwargs['user_ns_available'] = self._user_ns_available
kwargs['die_with_parent_available'] = self._die_with_parent_available
kwargs['json_status_available'] = self._json_status_available
return SandboxBwrap(*args, **kwargs)
def _check_user_ns_available(self):
......@@ -119,21 +122,3 @@ class Linux(Platform):
output = ''
return output == 'root'
def _get_bwrap_version(self):
# Get the current bwrap version
#
# returns None if no bwrap was found
# otherwise returns a tuple of 3 int: major, minor, patch
bwrap_path = shutil.which('bwrap')
if not bwrap_path:
return None
cmd = [bwrap_path, "--version"]
try:
version = str(subprocess.check_output(cmd).split()[1], "utf-8")
except subprocess.CalledProcessError:
return None
return tuple(int(x) for x in version.split("."))
......@@ -219,6 +219,19 @@ class Project():
return self._cache_key
def _validate_node(self, node):
_yaml.node_validate(node, [
'format-version',
'element-path', 'variables',
'environment', 'environment-nocache',
'split-rules', 'elements', 'plugins',
'aliases', 'name',
'artifacts', 'options',
'fail-on-overlap', 'shell', 'fatal-warnings',
'ref-storage', 'sandbox', 'mirrors', 'remote-execution',
'sources', '(@)'
])
# create_element()
#
# Instantiate and return an element
......@@ -402,6 +415,8 @@ class Project():
"Project requested format version {}, but BuildStream {}.{} only supports up until format version {}"
.format(format_version, major, minor, BST_FORMAT_VERSION))
self._validate_node(pre_config_node)
# FIXME:
#
# Performing this check manually in the absense
......@@ -467,16 +482,7 @@ class Project():
self._load_pass(config, self.config)
_yaml.node_validate(config, [
'format-version',
'element-path', 'variables',
'environment', 'environment-nocache',
'split-rules', 'elements', 'plugins',
'aliases', 'name',
'artifacts', 'options',
'fail-on-overlap', 'shell', 'fatal-warnings',
'ref-storage', 'sandbox', 'mirrors', 'remote-execution'
])
self._validate_node(config)
#
# Now all YAML composition is done, from here on we just load
......
......@@ -32,9 +32,20 @@ class PullQueue(Queue):
complete_name = "Pulled"
resources = [ResourceType.DOWNLOAD, ResourceType.CACHE]
def __init__(self, scheduler):
super().__init__(scheduler)
# Current default exclusions on pull
self._excluded_subdirs = ["buildtree"]
self._subdir = None
# If build trees are to be pulled, remove the value from exclusion list
if self._scheduler.context.pull_build_trees:
self._subdir = "buildtree"
self._excluded_subdirs.remove(self._subdir)
def process(self, element):
# returns whether an artifact was downloaded or not
if not element._pull():
if not element._pull(subdir=self._subdir, excluded_subdirs=self._excluded_subdirs):
raise SkipJob(self.action_name)
def status(self, element):
......@@ -49,7 +60,7 @@ class PullQueue(Queue):
if not element._can_query_cache():
return QueueStatus.WAIT
if element._pull_pending():
if element._pull_pending(subdir=self._subdir):
return QueueStatus.READY
else:
return QueueStatus.SKIP
......
......@@ -18,6 +18,8 @@
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
import os
import shutil
import subprocess
#
# Private module declaring some info about where the buildstream
......@@ -44,3 +46,22 @@ build_all_template = os.path.join(root, 'data', 'build-all.sh.in')
# Module building script template
build_module_template = os.path.join(root, 'data', 'build-module.sh.in')
def get_bwrap_version():
# Get the current bwrap version
#
# returns None if no bwrap was found
# otherwise returns a tuple of 3 int: major, minor, patch
bwrap_path = shutil.which('bwrap')
if not bwrap_path:
return None
cmd = [bwrap_path, "--version"]
try:
version = str(subprocess.check_output(cmd).split()[1], "utf-8")
except subprocess.CalledProcessError:
return None
return tuple(int(x) for x in version.split("."))
......@@ -97,3 +97,5 @@ logging:
[%{elapsed}][%{key}][%{element}] %{action} %{message}
# Whether to pull buildtrees when downloading element artifacts
pullbuildtrees: False
......@@ -1398,9 +1398,18 @@ class Element(Plugin):
.format(workspace.get_absolute_path())):
workspace.stage(temp_staging_directory)
elif self._cached():
# We have a cached buildtree to use, instead
artifact_base, _ = self.__extract()
import_dir = os.path.join(artifact_base, 'buildtree')
# Check if we have a cached buildtree to use
context = self._get_context()
if context.get_strict():
if self.__artifacts.contains_subdir_artifact(self, self.__strict_cache_key, 'buildtree'):
artifact_base, _ = self.__extract()
import_dir = os.path.join(artifact_base, 'buildtree')
elif self.__artifacts.contains_subdir_artifact(self, self.__weak_cache_key, 'buildtree'):
artifact_base, _ = self.__extract()
import_dir = os.path.join(artifact_base, 'buildtree')
else:
self.warn("{} is cached without a buildtree, the source will be staged instead"
.format(self.name))
else:
# No workspace, stage directly
for source in self.sources():
......@@ -1691,18 +1700,26 @@ class Element(Plugin):
# _pull_pending()
#
# Check whether the artifact will be pulled.
# Check whether the artifact will be pulled. If the pull operation is to
# include a specific subdir of the element artifact (from cli or user conf)
# then the local cache is queried for the subdirs existence.
#
# Args:
# subdir (str): Whether the pull has been invoked with a specific subdir set
#
# Returns:
# (bool): Whether a pull operation is pending
#
def _pull_pending(self):
def _pull_pending(self, subdir=None):
if self._get_workspace():
# Workspace builds are never pushed to artifact servers
return False
if self.__strong_cached:
# Artifact already in local cache
if self.__strong_cached and subdir:
# If we've specified a subdir, check if the subdir is cached locally
if self.__artifacts.contains_subdir_artifact(self, self.__strict_cache_key, subdir):
return False
elif self.__strong_cached:
return False
# Pull is pending if artifact remote server available
......@@ -1724,50 +1741,27 @@ class Element(Plugin):
self._update_state()
def _pull_strong(self, *, progress=None):
weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
key = self.__strict_cache_key
if not self.__artifacts.pull(self, key, progress=progress):
return False
# update weak ref by pointing it to this newly fetched artifact
self.__artifacts.link_key(self, key, weak_key)
return True
def _pull_weak(self, *, progress=None):
weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
if not self.__artifacts.pull(self, weak_key, progress=progress):
return False
# extract strong cache key from this newly fetched artifact
self._pull_done()
# create tag for strong cache key
key = self._get_cache_key(strength=_KeyStrength.STRONG)
self.__artifacts.link_key(self, weak_key, key)
return True
# _pull():
#
# Pull artifact from remote artifact repository into local artifact cache.
#
# Args:
# subdir (str): The optional specific subdir to pull
# excluded_subdirs (list): The optional list of subdirs to not pull
#
# Returns: True if the artifact has been downloaded, False otherwise
#
def _pull(self):
def _pull(self, subdir=None, excluded_subdirs=None):
context = self._get_context()
def progress(percent, message):
self.status(message)
# Attempt to pull artifact without knowing whether it's available
pulled = self._pull_strong(progress=progress)
pulled = self.__pull_strong(progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs)
if not pulled and not self._cached() and not context.get_strict():
pulled = self._pull_weak(progress=progress)
pulled = self.__pull_weak(progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs)
if not pulled:
return False
......@@ -1790,10 +1784,21 @@ class Element(Plugin):
if not self._cached():
return True
# Do not push tained artifact
# Do not push tainted artifact
if self.__get_tainted():
return True
# strict_cache_key can't be relied on to be available when running in non strict mode
context = self._get_context()
# Do not push elements that have a dangling buildtree artifact unless element type is
# expected to have an empty buildtree directory
if context.get_strict():
if not self.__artifacts.contains_subdir_artifact(self, self.__strict_cache_key, 'buildtree'):
return True
elif not self.__artifacts.contains_subdir_artifact(self, self.__weak_cache_key, 'buildtree'):
return True
return False
# _push():
......@@ -2674,6 +2679,59 @@ class Element(Plugin):
return utils._deduplicate(keys)
# __pull_strong():
#
# Attempt pulling given element from configured artifact caches with
# the strict cache key
#
# Args:
# progress (callable): The progress callback, if any
# subdir (str): The optional specific subdir to pull
# excluded_subdirs (list): The optional list of subdirs to not pull
#
# Returns:
# (bool): Whether or not the pull was successful
#
def __pull_strong(self, *, progress=None, subdir=None, excluded_subdirs=None):
weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
key = self.__strict_cache_key
if not self.__artifacts.pull(self, key, progress=progress, subdir=subdir,
excluded_subdirs=excluded_subdirs):
return False
# update weak ref by pointing it to this newly fetched artifact
self.__artifacts.link_key(self, key, weak_key)
return True
# __pull_weak():
#
# Attempt pulling given element from configured artifact caches with
# the weak cache key
#
# Args:
# progress (callable): The progress callback, if any
# subdir (str): The optional specific subdir to pull
# excluded_subdirs (list): The optional list of subdirs to not pull
#
# Returns:
# (bool): Whether or not the pull was successful
#
def __pull_weak(self, *, progress=None, subdir=None, excluded_subdirs=None):
weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
if not self.__artifacts.pull(self, weak_key, progress=progress, subdir=subdir,
excluded_subdirs=excluded_subdirs):
return False
# extract strong cache key from this newly fetched artifact
self._pull_done()
# create tag for strong cache key
key = self._get_cache_key(strength=_KeyStrength.STRONG)
self.__artifacts.link_key(self, weak_key, key)
return True
def _overlap_error_detail(f, forbidden_overlap_elements, elements):
if forbidden_overlap_elements:
......
......@@ -17,6 +17,8 @@
# Authors:
# Andrew Leeming <andrew.leeming@codethink.co.uk>
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
import collections
import json
import os
import sys
import time
......@@ -24,7 +26,8 @@ import errno
import signal
import subprocess
import shutil
from contextlib import ExitStack
from contextlib import ExitStack, suppress
from tempfile import TemporaryFile
import psutil
......@@ -53,6 +56,7 @@ class SandboxBwrap(Sandbox):
super().__init__(*args, **kwargs)
self.user_ns_available = kwargs['user_ns_available']
self.die_with_parent_available = kwargs['die_with_parent_available']
self.json_status_available = kwargs['json_status_available']
def run(self, command, flags, *, cwd=None, env=None):
stdout, stderr = self._get_output()
......@@ -160,24 +164,31 @@ class SandboxBwrap(Sandbox):
gid = self._get_config().build_gid
bwrap_command += ['--uid', str(uid), '--gid', str(gid)]
# Add the command
bwrap_command += command
# bwrap might create some directories while being suid
# and may give them to root gid, if it does, we'll want
# to clean them up after, so record what we already had
# there just in case so that we can safely cleanup the debris.
#
existing_basedirs = {
directory: os.path.exists(os.path.join(root_directory, directory))
for directory in ['tmp', 'dev', 'proc']
}
# Use the MountMap context manager to ensure that any redirected
# mounts through fuse layers are in context and ready for bwrap
# to mount them from.
#
with ExitStack() as stack:
pass_fds = ()
# Improve error reporting with json-status if available
if self.json_status_available:
json_status_file = stack.enter_context(TemporaryFile())
pass_fds = (json_status_file.fileno(),)
bwrap_command += ['--json-status-fd', str(json_status_file.fileno())]
# Add the command
bwrap_command += command
# bwrap might create some directories while being suid
# and may give them to root gid, if it does, we'll want
# to clean them up after, so record what we already had
# there just in case so that we can safely cleanup the debris.
#
existing_basedirs = {
directory: os.path.exists(os.path.join(root_directory, directory))
for directory in ['tmp', 'dev', 'proc']
}
# Use the MountMap context manager to ensure that any redirected
# mounts through fuse layers are in context and ready for bwrap
# to mount them from.
#
stack.enter_context(mount_map.mounted(self))
# If we're interactive, we want to inherit our stdin,
......@@ -190,7 +201,7 @@ class SandboxBwrap(Sandbox):
# Run bubblewrap !
exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr,
(flags & SandboxFlags.INTERACTIVE))
(flags & SandboxFlags.INTERACTIVE), pass_fds)
# Cleanup things which bwrap might have left behind, while
# everything is still mounted because bwrap can be creating
......@@ -238,10 +249,27 @@ class SandboxBwrap(Sandbox):
# a bug, bwrap mounted a tempfs here and when it exits, that better be empty.
pass
if self.json_status_available:
json_status_file.seek(0, 0)
child_exit_code = None
# The JSON status file's output is a JSON object per line
# with the keys present identifying the type of message.
# The only message relevant to us now is the exit-code of the subprocess.
for line in json_status_file:
with suppress(json.decoder.JSONDecodeError):
o = json.loads(line)
if isinstance(o, collections.abc.Mapping) and 'exit-code' in o:
child_exit_code = o['exit-code']
break
if child_exit_code is None:
raise SandboxError("`bwrap' terminated during sandbox setup with exitcode {}".format(exit_code),
reason="bwrap-sandbox-fail")
exit_code = child_exit_code
self._vdir._mark_changed()
return exit_code
def run_bwrap(self, argv, stdin, stdout, stderr, interactive):
def run_bwrap(self, argv, stdin, stdout, stderr, interactive, pass_fds):
# Wrapper around subprocess.Popen() with common settings.
#
# This function blocks until the subprocess has terminated.
......@@ -317,6 +345,7 @@ class SandboxBwrap(Sandbox):
# The default is to share file descriptors from the parent process
# to the subprocess, which is rarely good for sandboxing.
close_fds=True,
pass_fds=pass_fds,
stdin=stdin,
stdout=stdout,
stderr=stderr,
......
......@@ -36,7 +36,7 @@
# the result.
#
from tests.testutils.runcli import cli
from tests.testutils.site import HAVE_BZR, HAVE_GIT, HAVE_OSTREE, IS_LINUX
from tests.testutils.site import HAVE_BZR, HAVE_GIT, HAVE_OSTREE, IS_LINUX, MACHINE_ARCH
from buildstream.plugin import CoreWarnings
from buildstream import _yaml
import os
......@@ -144,6 +144,8 @@ DATA_DIR = os.path.join(
# The cache key test uses a project which exercises all plugins,
# so we cant run it at all if we dont have them installed.
#
@pytest.mark.skipif(MACHINE_ARCH != 'x86_64',
reason='Cache keys depend on architecture')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.skipif(HAVE_BZR is False, reason="bzr is not available")
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
......
......@@ -42,6 +42,7 @@ MAIN_OPTIONS = [
"-o ",
"--option ",
"--on-error ",
"--pull-build-trees ",
"--pushers ",
"--strict ",
"--verbose ",
......
......@@ -3,7 +3,7 @@ import pytest
from tests.testutils import cli_integration as cli
from tests.testutils.integration import assert_contains
from tests.testutils.site import IS_LINUX
from tests.testutils.site import IS_LINUX, MACHINE_ARCH
pytestmark = pytest.mark.integration
......@@ -13,6 +13,8 @@ DATA_DIR = os.path.join(
# Tests a build of the autotools amhello project on a alpine-linux base runtime
@pytest.mark.skipif(MACHINE_ARCH != 'x86_64',
reason='Examples are writtent for x86_64')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_build(cli, tmpdir, datafiles):
......@@ -36,6 +38,8 @@ def test_autotools_build(cli, tmpdir, datafiles):
# Test running an executable built with autotools.
@pytest.mark.skipif(MACHINE_ARCH != 'x86_64',
reason='Examples are writtent for x86_64')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_run(cli, tmpdir, datafiles):
......
......@@ -4,7 +4,7 @@ import pytest
import tests.testutils.patch as patch
from tests.testutils import cli_integration as cli
from tests.testutils.integration import assert_contains
from tests.testutils.site import IS_LINUX
from tests.testutils.site import IS_LINUX, MACHINE_ARCH
pytestmark = pytest.mark.integration
......@@ -14,6 +14,8 @@ DATA_DIR = os.path.join(
# Test that the project builds successfully
@pytest.mark.skipif(MACHINE_ARCH != 'x86_64',
reason='Examples are writtent for x86_64')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_build(cli, tmpdir, datafiles):
......@@ -35,6 +37,8 @@ def test_autotools_build(cli, tmpdir, datafiles):
# Test the unmodified hello command works as expected.
@pytest.mark.skipif(MACHINE_ARCH != 'x86_64',
reason='Examples are writtent for x86_64')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_run_unmodified_hello(cli, tmpdir, datafiles):
......@@ -66,6 +70,8 @@ def test_open_workspace(cli, tmpdir, datafiles):
# Test making a change using the workspace
@pytest.mark.skipif(MACHINE_ARCH != 'x86_64',
reason='Examples are writtent for x86_64')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_make_change_in_workspace(cli, tmpdir, datafiles):
......
......@@ -3,7 +3,7 @@ import pytest
from tests.testutils import cli_integration as cli
from tests.testutils.integration import assert_contains
from tests.testutils.site import IS_LINUX
from tests.testutils.site import IS_LINUX, MACHINE_ARCH
pytestmark = pytest.mark.integration
......@@ -32,6 +32,8 @@ def workaround_setuptools_bug(project):
# Test that a build upon flatpak runtime 'works' - we use the autotools sample
# amhello project for this.
@pytest.mark.skipif(MACHINE_ARCH != 'x86_64',
reason='Examples are writtent for x86_64')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_build(cli, tmpdir, datafiles):
......@@ -55,6 +57,8 @@ def test_autotools_build(cli, tmpdir, datafiles):
# Test running an executable built with autotools
@pytest.mark.skipif(MACHINE_ARCH != 'x86_64',
reason='Examples are writtent for x86_64')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_run(cli, tmpdir, datafiles):
......
......@@ -3,7 +3,7 @@ import pytest
from tests.testutils import cli_integration as cli
from tests.testutils.integration import assert_contains
from tests.testutils.site import IS_LINUX
from tests.testutils.site import IS_LINUX, MACHINE_ARCH
pytestmark = pytest.mark.integration
......@@ -12,6 +12,8 @@ DATA_DIR = os.path.join(
)
@pytest.mark.skipif(MACHINE_ARCH != 'x86_64',
reason='Examples are writtent for x86_64')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_integration_commands_build(cli, tmpdir, datafiles):
......@@ -23,6 +25,8 @@ def test_integration_commands_build(cli, tmpdir, datafiles):
# Test running the executable
@pytest.mark.skipif(MACHINE_ARCH != 'x86_64',
reason='Examples are writtent for x86_64')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_integration_commands_run(cli, tmpdir, datafiles):
......