Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 108-integration-tests-not-idempotent-and-self-contained
  • 131-behavior-of-except-argument-is-frustrating-and-confusing
  • 132-loading-external-plugins-works-without-explicit-requirement-in-project-conf
  • 135-expire-artifacts-in-local-cache
  • 135-expire-artifacts-in-local-cache-clean
  • 138-aborting-bst-push-command-causes-stack-trace-3
  • 142-potentially-printing-provenance-more-than-once-in-loaderrors
  • 188-trigger-external-commands-on-certain-events
  • 214-filter-workspacing-rework
  • 218-allow-specifying-the-chroot-binary-to-use-for-sandboxes-on-unix-platforms
  • 239-use-pylint-for-linting
  • 372-allow-queues-to-run-auxilliary-jobs-after-an-element-s-job-finishes
  • 380-untagged-bst
  • 463-make-dependency-type-default-to-build
  • 537-mirror-fallback-does-not-work-for-git
  • 64-clarify-about-plugins-importing-other-plugins
  • 716-add-example-with-build-directory-outside-of-source-directory
  • 716-add-example-with-build-directory-outside-of-source-directory-2
  • 81-non-empty-read-only-directories-not-handled-during-bst-build-and-others
  • BenjaminSchubert/fix-quota-tests
  • Qinusty/235-manifest
  • Qinusty/397
  • Qinusty/470-bst-track-yaml-indent
  • Qinusty/553-backport-1.2
  • Qinusty/663-missing-cache-key-workspace-open
  • Qinusty/backport-576
  • Qinusty/backport-skipped-562
  • Qinusty/gitlab-ci
  • Qinusty/gitlab-ci-duration
  • Qinusty/message-helpers
  • Qinusty/pytest_cache_gitignore
  • abderrahim/cached-failure
  • abderrahim/cachekey-strictrebuild
  • abderrahim/cleanup-speedup
  • abderrahim/makemaker
  • abderrahim/resolve-remotes
  • abderrahim/source-cache
  • abderrahim/stage-artifact-scriptelement
  • abderrahim/virtual-extract
  • adamjones/contributing
  • adamjones/contribution-guide
  • aevri/assert_no_unexpected_size_writes
  • aevri/casdprocessmanager2
  • aevri/check_spawn_ci_working
  • aevri/enable_spawn_ci_4
  • aevri/enable_spawn_ci_6
  • aevri/enable_spawn_ci_7
  • aevri/json_artifact_meta
  • aevri/picklable_jobs
  • aevri/plugin_venvs
  • aevri/provenance_scope
  • aevri/pylint_ignore_argsdiff
  • aevri/safe_noninteractive
  • aevri/win32
  • aevri/win32_minimal
  • aevri/win32_minimal_seemstowork_20190829
  • aevri/win32_receive_signals
  • aevri/win32_temptext
  • alexfazakas/add-bst-init-argument
  • alexfazakas/use-merge-trains
  • always-do-linting
  • another-segfault
  • becky/locally_downloaded_files
  • becky/shell_launch_errors
  • bschubert/add-isolated-tests
  • bschubert/isort
  • bschubert/merge-parent-child-job
  • bschubert/more-mypy
  • bschubert/no-multiprocessing-bak
  • bschubert/no-multiprocessing-full
  • bschubert/optimize-deps
  • bschubert/optimize-element-init
  • bschubert/optimize-loader-sorting
  • bschubert/optimize-mapping-node
  • bschubert/optimize-splits
  • bschubert/remove-multiline-switch-for-re
  • bschubert/remove-parent-child-pipe
  • bschubert/remove-pip-source
  • bschubert/standardize-source-tests
  • bschubert/test-plugins
  • bschubert/update-coverage
  • bst-1
  • bst-1.0
  • bst-1.2
  • bst-1.4
  • bst-pull
  • bst-push
  • buildbox-pre-will
  • cache-key-v0
  • caching_build_trees
  • cascache_timeouts
  • chandan/automate-pypi-release
  • chandan/cli-deps
  • chandan/contrib-dependencies
  • chandan/element-cache
  • chandan/enums
  • chandan/extras-require
  • chandan/macos-multiprocessing
  • chandan/moar-parallelism
  • chandan/moar-runners
  • 1.0.0
  • 1.0.1
  • 1.1.0
  • 1.1.1
  • 1.1.2
  • 1.1.3
  • 1.1.4
  • 1.1.5
  • 1.1.6
  • 1.1.7
  • 1.2.0
  • 1.2.1
  • 1.2.2
  • 1.2.3
  • 1.2.4
  • 1.2.5
  • 1.2.6
  • 1.2.7
  • 1.2.8
  • 1.3.0
  • 1.3.1
  • 1.4.0
  • 1.4.1
  • 1.4.2
  • 1.4.3
  • 1.5.0
  • 1.5.1
  • 1.6.0
  • 1.6.1
  • 1.91.0
  • 1.91.1
  • 1.91.2
  • 1.91.3
  • 1.93.0
  • 1.93.1
  • 1.93.2
  • 1.93.3
  • 1.93.4
  • 1.93.5
  • CROSS_PLATFORM_SEPT_2017
  • PRE_CAS_MERGE_JULY_2018
  • bst-1-branchpoint
  • bst-1.2-branchpoint
  • bst-1.4-branchpoint
144 results

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Select Git revision
  • 108-integration-tests-not-idempotent-and-self-contained
  • 131-behavior-of-except-argument-is-frustrating-and-confusing
  • 132-loading-external-plugins-works-without-explicit-requirement-in-project-conf
  • 135-expire-artifacts-in-local-cache
  • 135-expire-artifacts-in-local-cache-clean
  • 138-aborting-bst-push-command-causes-stack-trace-3
  • 142-potentially-printing-provenance-more-than-once-in-loaderrors
  • 188-trigger-external-commands-on-certain-events
  • 214-filter-workspacing-rework
  • 218-allow-specifying-the-chroot-binary-to-use-for-sandboxes-on-unix-platforms
  • 239-use-pylint-for-linting
  • 372-allow-queues-to-run-auxilliary-jobs-after-an-element-s-job-finishes
  • 380-untagged-bst
  • 463-make-dependency-type-default-to-build
  • 537-mirror-fallback-does-not-work-for-git
  • 64-clarify-about-plugins-importing-other-plugins
  • 716-add-example-with-build-directory-outside-of-source-directory
  • 716-add-example-with-build-directory-outside-of-source-directory-2
  • 81-non-empty-read-only-directories-not-handled-during-bst-build-and-others
  • BenjaminSchubert/fix-quota-tests
  • Qinusty/235-manifest
  • Qinusty/397
  • Qinusty/470-bst-track-yaml-indent
  • Qinusty/553-backport-1.2
  • Qinusty/663-missing-cache-key-workspace-open
  • Qinusty/backport-576
  • Qinusty/backport-skipped-562
  • Qinusty/gitlab-ci
  • Qinusty/gitlab-ci-duration
  • Qinusty/message-helpers
  • Qinusty/pytest_cache_gitignore
  • abderrahim/cached-failure
  • abderrahim/cachekey-strictrebuild
  • abderrahim/cleanup-speedup
  • abderrahim/makemaker
  • abderrahim/resolve-remotes
  • abderrahim/source-cache
  • abderrahim/stage-artifact-scriptelement
  • abderrahim/virtual-extract
  • adamjones/contributing
  • adamjones/contribution-guide
  • aevri/assert_no_unexpected_size_writes
  • aevri/casdprocessmanager2
  • aevri/check_spawn_ci_working
  • aevri/enable_spawn_ci_4
  • aevri/enable_spawn_ci_6
  • aevri/enable_spawn_ci_7
  • aevri/json_artifact_meta
  • aevri/picklable_jobs
  • aevri/plugin_venvs
  • aevri/provenance_scope
  • aevri/pylint_ignore_argsdiff
  • aevri/safe_noninteractive
  • aevri/win32
  • aevri/win32_minimal
  • aevri/win32_minimal_seemstowork_20190829
  • aevri/win32_receive_signals
  • aevri/win32_temptext
  • alexfazakas/add-bst-init-argument
  • alexfazakas/use-merge-trains
  • always-do-linting
  • another-segfault
  • becky/locally_downloaded_files
  • becky/shell_launch_errors
  • bschubert/add-isolated-tests
  • bschubert/isort
  • bschubert/merge-parent-child-job
  • bschubert/more-mypy
  • bschubert/no-multiprocessing-bak
  • bschubert/no-multiprocessing-full
  • bschubert/optimize-deps
  • bschubert/optimize-element-init
  • bschubert/optimize-loader-sorting
  • bschubert/optimize-mapping-node
  • bschubert/optimize-splits
  • bschubert/remove-multiline-switch-for-re
  • bschubert/remove-parent-child-pipe
  • bschubert/remove-pip-source
  • bschubert/standardize-source-tests
  • bschubert/test-plugins
  • bschubert/update-coverage
  • bst-1
  • bst-1.0
  • bst-1.2
  • bst-1.4
  • bst-pull
  • bst-push
  • buildbox-pre-will
  • cache-key-v0
  • caching_build_trees
  • cascache_timeouts
  • chandan/automate-pypi-release
  • chandan/cli-deps
  • chandan/contrib-dependencies
  • chandan/element-cache
  • chandan/enums
  • chandan/extras-require
  • chandan/macos-multiprocessing
  • chandan/moar-parallelism
  • chandan/moar-runners
  • 1.0.0
  • 1.0.1
  • 1.1.0
  • 1.1.1
  • 1.1.2
  • 1.1.3
  • 1.1.4
  • 1.1.5
  • 1.1.6
  • 1.1.7
  • 1.2.0
  • 1.2.1
  • 1.2.2
  • 1.2.3
  • 1.2.4
  • 1.2.5
  • 1.2.6
  • 1.2.7
  • 1.2.8
  • 1.3.0
  • 1.3.1
  • 1.4.0
  • 1.4.1
  • 1.4.2
  • 1.4.3
  • 1.5.0
  • 1.5.1
  • 1.6.0
  • 1.6.1
  • 1.91.0
  • 1.91.1
  • 1.91.2
  • 1.91.3
  • 1.93.0
  • 1.93.1
  • 1.93.2
  • 1.93.3
  • 1.93.4
  • 1.93.5
  • CROSS_PLATFORM_SEPT_2017
  • PRE_CAS_MERGE_JULY_2018
  • bst-1-branchpoint
  • bst-1.2-branchpoint
  • bst-1.4-branchpoint
144 results
Show changes
Commits on Source (46)
Showing
with 683 additions and 454 deletions
......@@ -166,6 +166,12 @@ docs:
BST_EXT_REF: 1d6ab71151b93c8cbc0a91a36ffe9270f3b835f1 # 0.5.1
FD_SDK_REF: 88d7c22c2281b987faa02edd57df80d430eecf1f # 18.08.11-35-g88d7c22c
before_script:
- |
mkdir -p "${HOME}/.config"
cat <<EOF >"${HOME}/.config/buildstream.conf"
scheduler:
fetchers: 2
EOF
- (cd dist && ./unpack.sh && cd buildstream && pip3 install .)
- pip3 install --user -e ${BST_EXT_URL}@${BST_EXT_REF}#egg=bst_ext
- git clone https://gitlab.com/freedesktop-sdk/freedesktop-sdk.git
......
......@@ -16,6 +16,8 @@ recursive-include doc/source *.rst
recursive-include doc/source *.py
recursive-include doc/source *.in
recursive-include doc/source *.html
recursive-include doc/source *.odg
recursive-include doc/source *.svg
recursive-include doc/examples *
# Tests
......
......@@ -2,6 +2,12 @@
buildstream 1.3.1
=================
o BREAKING CHANGE: The 'manual' element lost its default 'MAKEFLAGS' and 'V'
environment variables. There is already a 'make' element with the same
variables. Note that this is a breaking change, it will require users to
make changes to their .bst files if they are expecting these environment
variables to be set.
o Failed builds are included in the cache as well.
`bst checkout` will provide anything in `%{install-root}`.
A build including cached fails will cause any dependant elements
......@@ -31,6 +37,15 @@ buildstream 1.3.1
new the `conf-root` variable to make the process easier. And there has been
a bug fix to workspaces so they can be build in workspaces too.
o Creating a build shell through the interactive mode or `bst shell --build`
will now use the cached build tree. It is now easier to debug local build
failures.
o `bst shell --sysroot` now takes any directory that contains a sysroot,
instead of just a specially-formatted build-root with a `root` and `scratch`
subdirectory.
=================
buildstream 1.1.5
=================
......
......@@ -17,17 +17,22 @@
# Authors:
# Tristan Maat <tristan.maat@codethink.co.uk>
import multiprocessing
import os
import signal
import string
from collections import namedtuple
from collections.abc import Mapping
from ..types import _KeyStrength
from .._exceptions import ArtifactError, ImplError, LoadError, LoadErrorReason
from .._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
from .._message import Message, MessageType
from .. import _signals
from .. import utils
from .. import _yaml
from .cascache import CASCache, CASRemote
CACHE_SIZE_FILE = "cache_size"
......@@ -93,7 +98,8 @@ class ArtifactCache():
def __init__(self, context):
self.context = context
self.extractdir = os.path.join(context.artifactdir, 'extract')
self.tmpdir = os.path.join(context.artifactdir, 'tmp')
self.cas = CASCache(context.artifactdir)
self.global_remote_specs = []
self.project_remote_specs = {}
......@@ -104,12 +110,15 @@ class ArtifactCache():
self._cache_lower_threshold = None # The target cache size for a cleanup
self._remotes_setup = False # Check to prevent double-setup of remotes
# Per-project list of _CASRemote instances.
self._remotes = {}
self._has_fetch_remotes = False
self._has_push_remotes = False
os.makedirs(self.extractdir, exist_ok=True)
os.makedirs(self.tmpdir, exist_ok=True)
################################################
# Methods implemented on the abstract class #
################################################
self._calculate_cache_quota()
# get_artifact_fullname()
#
......@@ -240,8 +249,10 @@ class ArtifactCache():
for key in (strong_key, weak_key):
if key:
try:
self.update_mtime(element, key)
except ArtifactError:
ref = self.get_artifact_fullname(element, key)
self.cas.update_mtime(ref)
except CASError:
pass
# clean():
......@@ -252,7 +263,7 @@ class ArtifactCache():
# (int): The size of the cache after having cleaned up
#
def clean(self):
artifacts = self.list_artifacts() # pylint: disable=assignment-from-no-return
artifacts = self.list_artifacts()
# Build a set of the cache keys which are required
# based on the required elements at cleanup time
......@@ -294,7 +305,7 @@ class ArtifactCache():
if key not in required_artifacts:
# Remove the actual artifact, if it's not required.
size = self.remove(to_remove) # pylint: disable=assignment-from-no-return
size = self.remove(to_remove)
# Remove the size from the removed size
self.set_cache_size(self._cache_size - size)
......@@ -311,7 +322,7 @@ class ArtifactCache():
# (int): The size of the artifact cache.
#
def compute_cache_size(self):
self._cache_size = self.calculate_cache_size() # pylint: disable=assignment-from-no-return
self._cache_size = self.cas.calculate_cache_size()
return self._cache_size
......@@ -380,28 +391,12 @@ class ArtifactCache():
def has_quota_exceeded(self):
return self.get_cache_size() > self._cache_quota
################################################
# Abstract methods for subclasses to implement #
################################################
# preflight():
#
# Preflight check.
#
def preflight(self):
pass
# update_mtime()
#
# Update the mtime of an artifact.
#
# Args:
# element (Element): The Element to update
# key (str): The key of the artifact.
#
def update_mtime(self, element, key):
raise ImplError("Cache '{kind}' does not implement update_mtime()"
.format(kind=type(self).__name__))
self.cas.preflight()
# initialize_remotes():
#
......@@ -411,7 +406,59 @@ class ArtifactCache():
# on_failure (callable): Called if we fail to contact one of the caches.
#
def initialize_remotes(self, *, on_failure=None):
pass
remote_specs = self.global_remote_specs
for project in self.project_remote_specs:
remote_specs += self.project_remote_specs[project]
remote_specs = list(utils._deduplicate(remote_specs))
remotes = {}
q = multiprocessing.Queue()
for remote_spec in remote_specs:
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
p = multiprocessing.Process(target=self.cas.initialize_remote, args=(remote_spec, q))
try:
# Keep SIGINT blocked in the child process
with _signals.blocked([signal.SIGINT], ignore=False):
p.start()
error = q.get()
p.join()
except KeyboardInterrupt:
utils._kill_process_tree(p.pid)
raise
if error and on_failure:
on_failure(remote_spec.url, error)
elif error:
raise ArtifactError(error)
else:
self._has_fetch_remotes = True
if remote_spec.push:
self._has_push_remotes = True
remotes[remote_spec.url] = CASRemote(remote_spec)
for project in self.context.get_projects():
remote_specs = self.global_remote_specs
if project in self.project_remote_specs:
remote_specs = list(utils._deduplicate(remote_specs + self.project_remote_specs[project]))
project_remotes = []
for remote_spec in remote_specs:
# Errors are already handled in the loop above,
# skip unreachable remotes here.
if remote_spec.url not in remotes:
continue
remote = remotes[remote_spec.url]
project_remotes.append(remote)
self._remotes[project] = project_remotes
# contains():
#
......@@ -425,8 +472,9 @@ class ArtifactCache():
# Returns: True if the artifact is in the cache, False otherwise
#
def contains(self, element, key):
raise ImplError("Cache '{kind}' does not implement contains()"
.format(kind=type(self).__name__))
ref = self.get_artifact_fullname(element, key)
return self.cas.contains(ref)
# list_artifacts():
#
......@@ -437,8 +485,7 @@ class ArtifactCache():
# `ArtifactCache.get_artifact_fullname` in LRU order
#
def list_artifacts(self):
raise ImplError("Cache '{kind}' does not implement list_artifacts()"
.format(kind=type(self).__name__))
return self.cas.list_refs()
# remove():
#
......@@ -450,9 +497,31 @@ class ArtifactCache():
# generated by
# `ArtifactCache.get_artifact_fullname`)
#
def remove(self, artifact_name):
raise ImplError("Cache '{kind}' does not implement remove()"
.format(kind=type(self).__name__))
# Returns:
# (int|None) The amount of space pruned from the repository in
# Bytes, or None if defer_prune is True
#
def remove(self, ref):
# Remove extract if not used by other ref
tree = self.cas.resolve_ref(ref)
ref_name, ref_hash = os.path.split(ref)
extract = os.path.join(self.extractdir, ref_name, tree.hash)
keys_file = os.path.join(extract, 'meta', 'keys.yaml')
if os.path.exists(keys_file):
keys_meta = _yaml.load(keys_file)
keys = [keys_meta['strong'], keys_meta['weak']]
remove_extract = True
for other_hash in keys:
if other_hash == ref_hash:
continue
remove_extract = False
break
if remove_extract:
utils._force_rmtree(extract)
return self.cas.remove(ref)
# extract():
#
......@@ -472,8 +541,11 @@ class ArtifactCache():
# Returns: path to extracted artifact
#
def extract(self, element, key):
raise ImplError("Cache '{kind}' does not implement extract()"
.format(kind=type(self).__name__))
ref = self.get_artifact_fullname(element, key)
path = os.path.join(self.extractdir, element._get_project().name, element.normal_name)
return self.cas.extract(ref, path)
# commit():
#
......@@ -485,8 +557,9 @@ class ArtifactCache():
# keys (list): The cache keys to use
#
def commit(self, element, content, keys):
raise ImplError("Cache '{kind}' does not implement commit()"
.format(kind=type(self).__name__))
refs = [self.get_artifact_fullname(element, key) for key in keys]
self.cas.commit(refs, content)
# diff():
#
......@@ -500,8 +573,10 @@ class ArtifactCache():
# subdir (str): A subdirectory to limit the comparison to
#
def diff(self, element, key_a, key_b, *, subdir=None):
raise ImplError("Cache '{kind}' does not implement diff()"
.format(kind=type(self).__name__))
ref_a = self.get_artifact_fullname(element, key_a)
ref_b = self.get_artifact_fullname(element, key_b)
return self.cas.diff(ref_a, ref_b, subdir=subdir)
# has_fetch_remotes():
#
......@@ -513,7 +588,16 @@ class ArtifactCache():
# Returns: True if any remote repositories are configured, False otherwise
#
def has_fetch_remotes(self, *, element=None):
return False
if not self._has_fetch_remotes:
# No project has fetch remotes
return False
elif element is None:
# At least one (sub)project has fetch remotes
return True
else:
# Check whether the specified element's project has fetch remotes
remotes_for_project = self._remotes[element._get_project()]
return bool(remotes_for_project)
# has_push_remotes():
#
......@@ -525,7 +609,16 @@ class ArtifactCache():
# Returns: True if any remote repository is configured, False otherwise
#
def has_push_remotes(self, *, element=None):
return False
if not self._has_push_remotes:
# No project has push remotes
return False
elif element is None:
# At least one (sub)project has push remotes
return True
else:
# Check whether the specified element's project has push remotes
remotes_for_project = self._remotes[element._get_project()]
return any(remote.spec.push for remote in remotes_for_project)
# push():
#
......@@ -542,8 +635,28 @@ class ArtifactCache():
# (ArtifactError): if there was an error
#
def push(self, element, keys):
raise ImplError("Cache '{kind}' does not implement push()"
.format(kind=type(self).__name__))
refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
project = element._get_project()
push_remotes = [r for r in self._remotes[project] if r.spec.push]
pushed = False
for remote in push_remotes:
remote.init()
display_key = element._get_brief_display_key()
element.status("Pushing artifact {} -> {}".format(display_key, remote.spec.url))
if self.cas.push(refs, remote):
element.info("Pushed artifact {} -> {}".format(display_key, remote.spec.url))
pushed = True
else:
element.info("Remote ({}) already has {} cached".format(
remote.spec.url, element._get_brief_display_key()
))
return pushed
# pull():
#
......@@ -558,8 +671,130 @@ class ArtifactCache():
# (bool): True if pull was successful, False if artifact was not available
#
def pull(self, element, key, *, progress=None):
raise ImplError("Cache '{kind}' does not implement pull()"
.format(kind=type(self).__name__))
ref = self.get_artifact_fullname(element, key)
project = element._get_project()
for remote in self._remotes[project]:
try:
display_key = element._get_brief_display_key()
element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
if self.cas.pull(ref, remote, progress=progress):
element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
# no need to pull from additional remotes
return True
else:
element.info("Remote ({}) does not have {} cached".format(
remote.spec.url, element._get_brief_display_key()
))
except CASError as e:
raise ArtifactError("Failed to pull artifact {}: {}".format(
element._get_brief_display_key(), e)) from e
return False
# pull_tree():
#
# Pull a single Tree rather than an artifact.
# Does not update local refs.
#
# Args:
# project (Project): The current project
# digest (Digest): The digest of the tree
#
def pull_tree(self, project, digest):
for remote in self._remotes[project]:
digest = self.cas.pull_tree(remote, digest)
if digest:
# no need to pull from additional remotes
return digest
return None
# push_directory():
#
# Push the given virtual directory to all remotes.
#
# Args:
# project (Project): The current project
# directory (Directory): A virtual directory object to push.
#
# Raises:
# (ArtifactError): if there was an error
#
def push_directory(self, project, directory):
if self._has_push_remotes:
push_remotes = [r for r in self._remotes[project] if r.spec.push]
else:
push_remotes = []
if not push_remotes:
raise ArtifactError("push_directory was called, but no remote artifact " +
"servers are configured as push remotes.")
if directory.ref is None:
return
for remote in push_remotes:
self.cas.push_directory(remote, directory)
# push_message():
#
# Push the given protobuf message to all remotes.
#
# Args:
# project (Project): The current project
# message (Message): A protobuf message to push.
#
# Raises:
# (ArtifactError): if there was an error
#
def push_message(self, project, message):
if self._has_push_remotes:
push_remotes = [r for r in self._remotes[project] if r.spec.push]
else:
push_remotes = []
if not push_remotes:
raise ArtifactError("push_message was called, but no remote artifact " +
"servers are configured as push remotes.")
for remote in push_remotes:
message_digest = self.cas.push_message(remote, message)
return message_digest
# verify_digest_pushed():
#
# Check whether the object is already on the server in which case
# there is no need to upload it.
#
# Args:
# project (Project): The current project
# digest (Digest): The object digest.
#
def verify_digest_pushed(self, project, digest):
if self._has_push_remotes:
push_remotes = [r for r in self._remotes[project] if r.spec.push]
else:
push_remotes = []
if not push_remotes:
raise ArtifactError("verify_digest_pushed was called, but no remote artifact " +
"servers are configured as push remotes.")
pushed = False
for remote in push_remotes:
if self.cas.verify_digest_on_remote(remote, digest):
pushed = True
return pushed
# link_key():
#
......@@ -571,19 +806,10 @@ class ArtifactCache():
# newkey (str): A new cache key for the artifact
#
def link_key(self, element, oldkey, newkey):
raise ImplError("Cache '{kind}' does not implement link_key()"
.format(kind=type(self).__name__))
oldref = self.get_artifact_fullname(element, oldkey)
newref = self.get_artifact_fullname(element, newkey)
# calculate_cache_size()
#
# Return the real artifact cache size.
#
# Returns:
# (int): The size of the artifact cache.
#
def calculate_cache_size(self):
raise ImplError("Cache '{kind}' does not implement calculate_cache_size()"
.format(kind=type(self).__name__))
self.cas.link_ref(oldref, newref)
################################################
# Local Private Methods #
......
This diff is collapsed.
......@@ -32,8 +32,9 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remo
from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
from .._exceptions import ArtifactError
from .._context import Context
from .._exceptions import CASError
from .cascache import CASCache
# The default limit for gRPC messages is 4 MiB.
......@@ -55,26 +56,23 @@ class ArtifactTooLargeException(Exception):
# enable_push (bool): Whether to allow blob uploads and artifact updates
#
def create_server(repo, *, enable_push):
context = Context()
context.artifactdir = os.path.abspath(repo)
artifactcache = context.artifactcache
cas = CASCache(os.path.abspath(repo))
# Use max_workers default from Python 3.5+
max_workers = (os.cpu_count() or 1) * 5
server = grpc.server(futures.ThreadPoolExecutor(max_workers))
bytestream_pb2_grpc.add_ByteStreamServicer_to_server(
_ByteStreamServicer(artifactcache, enable_push=enable_push), server)
_ByteStreamServicer(cas, enable_push=enable_push), server)
remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(
_ContentAddressableStorageServicer(artifactcache, enable_push=enable_push), server)
_ContentAddressableStorageServicer(cas, enable_push=enable_push), server)
remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(
_CapabilitiesServicer(), server)
buildstream_pb2_grpc.add_ReferenceStorageServicer_to_server(
_ReferenceStorageServicer(artifactcache, enable_push=enable_push), server)
_ReferenceStorageServicer(cas, enable_push=enable_push), server)
return server
......@@ -333,7 +331,7 @@ class _ReferenceStorageServicer(buildstream_pb2_grpc.ReferenceStorageServicer):
response.digest.hash = tree.hash
response.digest.size_bytes = tree.size_bytes
except ArtifactError:
except CASError:
context.set_code(grpc.StatusCode.NOT_FOUND)
return response
......@@ -437,7 +435,7 @@ def _clean_up_cache(cas, object_size):
return 0
# obtain a list of LRP artifacts
LRP_artifacts = cas.list_artifacts()
LRP_artifacts = cas.list_refs()
removed_size = 0 # in bytes
while object_size - removed_size > free_disk_space:
......
......@@ -31,7 +31,6 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
from ._message import Message, MessageType
from ._profile import Topics, profile_start, profile_end
from ._artifactcache import ArtifactCache
from ._artifactcache.cascache import CASCache
from ._workspaces import Workspaces
from .plugin import _plugin_lookup
......@@ -233,7 +232,7 @@ class Context():
@property
def artifactcache(self):
if not self._artifactcache:
self._artifactcache = CASCache(self)
self._artifactcache = ArtifactCache(self)
return self._artifactcache
......
......@@ -47,7 +47,6 @@ class ElementFactory(PluginContext):
# Args:
# context (object): The Context object for processing
# project (object): The project object
# artifacts (ArtifactCache): The artifact cache
# meta (object): The loaded MetaElement
#
# Returns: A newly created Element object of the appropriate kind
......@@ -56,9 +55,9 @@ class ElementFactory(PluginContext):
# PluginError (if the kind lookup failed)
# LoadError (if the element itself took issue with the config)
#
def create(self, context, project, artifacts, meta):
def create(self, context, project, meta):
element_type, default_config = self.lookup(meta.kind)
element = element_type(context, project, artifacts, meta, default_config)
element = element_type(context, project, meta, default_config)
version = self._format_versions.get(meta.kind, 0)
self._assert_plugin_format(element, version)
return element
......@@ -90,6 +90,7 @@ class ErrorDomain(Enum):
APP = 12
STREAM = 13
VIRTUAL_FS = 14
CAS = 15
# BstError is an internal base exception class for BuildSream
......@@ -111,10 +112,8 @@ class BstError(Exception):
#
self.detail = detail
# The build sandbox in which the error occurred, if the
# error occurred at element assembly time.
#
self.sandbox = None
# A sandbox can be created to debug this error
self.sandbox = False
# When this exception occurred during the handling of a job, indicate
# whether or not there is any point retrying the job.
......@@ -276,6 +275,15 @@ class ArtifactError(BstError):
super().__init__(message, detail=detail, domain=ErrorDomain.ARTIFACT, reason=reason, temporary=True)
# CASError
#
# Raised when errors are encountered in the CAS
#
class CASError(BstError):
def __init__(self, message, *, detail=None, reason=None, temporary=False):
super().__init__(message, detail=detail, domain=ErrorDomain.CAS, reason=reason, temporary=True)
# PipelineError
#
# Raised from pipeline operations
......
......@@ -305,7 +305,6 @@ class App():
directory = self._main_options['directory']
directory = os.path.abspath(directory)
project_path = os.path.join(directory, 'project.conf')
elements_path = os.path.join(directory, element_path)
try:
# Abort if the project.conf already exists, unless `--force` was specified in `bst init`
......@@ -335,6 +334,7 @@ class App():
raise AppError("Error creating project directory {}: {}".format(directory, e)) from e
# Create the elements sub-directory if it doesnt exist
elements_path = os.path.join(directory, element_path)
try:
os.makedirs(elements_path, exist_ok=True)
except IOError as e:
......@@ -597,7 +597,7 @@ class App():
click.echo("\nDropping into an interactive shell in the failed build sandbox\n", err=True)
try:
prompt = self.shell_prompt(element)
self.stream.shell(element, Scope.BUILD, prompt, directory=failure.sandbox, isolate=True)
self.stream.shell(element, Scope.BUILD, prompt, isolate=True)
except BstError as e:
click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
elif choice == 'log':
......
......@@ -668,17 +668,6 @@ class LogLine(Widget):
extra_nl = True
if message.sandbox is not None:
sandbox = self._indent + 'Sandbox directory: ' + message.sandbox
text += '\n'
if message.message_type == MessageType.FAIL:
text += self._err_profile.fmt(sandbox, bold=True)
else:
text += self._detail_profile.fmt(sandbox)
text += '\n'
extra_nl = True
if message.scheduler and message.message_type == MessageType.FAIL:
text += '\n'
......
......@@ -537,7 +537,7 @@ class Loader():
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
element = Element._new_from_meta(meta_element, self._context.artifactcache)
element = Element._new_from_meta(meta_element)
element._preflight()
sources = list(element.sources())
......
......@@ -70,7 +70,7 @@ class Message():
self.elapsed = elapsed # The elapsed time, in timed messages
self.depth = depth # The depth of a timed message
self.logfile = logfile # The log file path where commands took place
self.sandbox = sandbox # The sandbox directory where an error occurred (if any)
self.sandbox = sandbox # The error that caused this message used a sandbox
self.pid = os.getpid() # The process pid
self.unique_id = unique_id # The plugin object ID issueing the message
self.task_id = task_id # The plugin object ID of the task
......
......@@ -106,7 +106,7 @@ class Pipeline():
profile_start(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, '-') for t in targets))
elements = self._project.load_elements(targets, self._artifacts,
elements = self._project.load_elements(targets,
rewritable=rewritable,
fetch_subprojects=fetch_subprojects)
......
......@@ -224,18 +224,17 @@ class Project():
# Instantiate and return an element
#
# Args:
# artifacts (ArtifactCache): The artifact cache
# meta (MetaElement): The loaded MetaElement
# first_pass (bool): Whether to use first pass configuration (for junctions)
#
# Returns:
# (Element): A newly created Element object of the appropriate kind
#
def create_element(self, artifacts, meta, *, first_pass=False):
def create_element(self, meta, *, first_pass=False):
if first_pass:
return self.first_pass_config.element_factory.create(self._context, self, artifacts, meta)
return self.first_pass_config.element_factory.create(self._context, self, meta)
else:
return self.config.element_factory.create(self._context, self, artifacts, meta)
return self.config.element_factory.create(self._context, self, meta)
# create_source()
#
......@@ -305,7 +304,6 @@ class Project():
#
# Args:
# targets (list): Target names
# artifacts (ArtifactCache): Artifact cache
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
# fetch_subprojects (bool): Whether we should fetch subprojects as a part of the
......@@ -314,7 +312,7 @@ class Project():
# Returns:
# (list): A list of loaded Element
#
def load_elements(self, targets, artifacts, *,
def load_elements(self, targets, *,
rewritable=False, fetch_subprojects=False):
with self._context.timed_activity("Loading elements", silent_nested=True):
meta_elements = self.loader.load(targets, rewritable=rewritable,
......@@ -323,7 +321,7 @@ class Project():
with self._context.timed_activity("Resolving elements"):
elements = [
Element._new_from_meta(meta, artifacts)
Element._new_from_meta(meta)
for meta in meta_elements
]
......
......@@ -23,7 +23,7 @@
# This version is bumped whenever enhancements are made
# to the `project.conf` format or the core element format.
#
BST_FORMAT_VERSION = 17
BST_FORMAT_VERSION = 18
# The base BuildStream artifact version
......
......@@ -1049,6 +1049,12 @@ class ChainMap(collections.ChainMap):
for key in clearable:
del self[key]
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def node_chain_copy(source):
copy = ChainMap({}, source)
......
......@@ -174,7 +174,7 @@ class Element(Plugin):
*Since: 1.4*
"""
def __init__(self, context, project, artifacts, meta, plugin_conf):
def __init__(self, context, project, meta, plugin_conf):
self.__cache_key_dict = None # Dict for cache key calculation
self.__cache_key = None # Our cached cache key
......@@ -199,7 +199,7 @@ class Element(Plugin):
self.__sources = [] # List of Sources
self.__weak_cache_key = None # Our cached weak cache key
self.__strict_cache_key = None # Our cached cache key for strict builds
self.__artifacts = artifacts # Artifact cache
self.__artifacts = context.artifactcache # Artifact cache
self.__consistency = Consistency.INCONSISTENT # Cached overall consistency state
self.__strong_cached = None # Whether we have a cached artifact
self.__weak_cached = None # Whether we have a cached artifact
......@@ -872,14 +872,13 @@ class Element(Plugin):
# and its dependencies from a meta element.
#
# Args:
# artifacts (ArtifactCache): The artifact cache
# meta (MetaElement): The meta element
#
# Returns:
# (Element): A newly created Element instance
#
@classmethod
def _new_from_meta(cls, meta, artifacts):
def _new_from_meta(cls, meta):
if not meta.first_pass:
meta.project.ensure_fully_loaded()
......@@ -887,7 +886,7 @@ class Element(Plugin):
if meta in cls.__instantiated_elements:
return cls.__instantiated_elements[meta]
element = meta.project.create_element(artifacts, meta, first_pass=meta.first_pass)
element = meta.project.create_element(meta, first_pass=meta.first_pass)
cls.__instantiated_elements[meta] = element
# Instantiate sources
......@@ -904,10 +903,10 @@ class Element(Plugin):
# Instantiate dependencies
for meta_dep in meta.dependencies:
dependency = Element._new_from_meta(meta_dep, artifacts)
dependency = Element._new_from_meta(meta_dep)
element.__runtime_dependencies.append(dependency)
for meta_dep in meta.build_dependencies:
dependency = Element._new_from_meta(meta_dep, artifacts)
dependency = Element._new_from_meta(meta_dep)
element.__build_dependencies.append(dependency)
return element
......@@ -1318,7 +1317,9 @@ class Element(Plugin):
@contextmanager
def _prepare_sandbox(self, scope, directory, deps='run', integrate=True):
# bst shell and bst checkout require a local sandbox.
with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False) as sandbox:
bare_directory = True if directory else False
with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False,
bare_directory=bare_directory) as sandbox:
# Configure always comes first, and we need it.
self.configure_sandbox(sandbox)
......@@ -1385,6 +1386,7 @@ class Element(Plugin):
# the same filing system as the rest of our cache.
temp_staging_location = os.path.join(self._get_context().artifactdir, "staging_temp")
temp_staging_directory = tempfile.mkdtemp(prefix=temp_staging_location)
import_dir = temp_staging_directory
try:
workspace = self._get_workspace()
......@@ -1395,12 +1397,16 @@ class Element(Plugin):
with self.timed_activity("Staging local files at {}"
.format(workspace.get_absolute_path())):
workspace.stage(temp_staging_directory)
elif self._cached():
# We have a cached buildtree to use, instead
artifact_base, _ = self.__extract()
import_dir = os.path.join(artifact_base, 'buildtree')
else:
# No workspace, stage directly
for source in self.sources():
source._stage(temp_staging_directory)
vdirectory.import_files(temp_staging_directory)
vdirectory.import_files(import_dir)
finally:
# Staging may produce directories with less than 'rwx' permissions
......@@ -1566,9 +1572,8 @@ class Element(Plugin):
collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
self.__set_build_result(success=True, description="succeeded")
except BstError as e:
# If an error occurred assembling an element in a sandbox,
# then tack on the sandbox directory to the error
e.sandbox = rootdir
# Shelling into a sandbox is useful to debug this error
e.sandbox = True
# If there is a workspace open on this element, it will have
# been mounted for sandbox invocations instead of being staged.
......@@ -1683,8 +1688,8 @@ class Element(Plugin):
"unable to collect artifact contents"
.format(collect))
# Finally cleanup the build dir
cleanup_rootdir()
# Finally cleanup the build dir
cleanup_rootdir()
return artifact_size
......@@ -2051,7 +2056,7 @@ class Element(Plugin):
'sources': [s._get_unique_key(workspace is None) for s in self.__sources],
'workspace': '' if workspace is None else workspace.get_key(self._get_project()),
'public': self.__public,
'cache': type(self.__artifacts).__name__
'cache': 'CASCache'
}
self.__cache_key_dict['fatal-warnings'] = sorted(project._fatal_warnings)
......@@ -2152,12 +2157,14 @@ class Element(Plugin):
# stderr (fileobject): The stream for stderr for the sandbox
# config (SandboxConfig): The SandboxConfig object
# allow_remote (bool): Whether the sandbox is allowed to be remote
# bare_directory (bool): Whether the directory is bare i.e. doesn't have
# a separate 'root' subdir
#
# Yields:
# (Sandbox): A usable sandbox
#
@contextmanager
def __sandbox(self, directory, stdout=None, stderr=None, config=None, allow_remote=True):
def __sandbox(self, directory, stdout=None, stderr=None, config=None, allow_remote=True, bare_directory=False):
context = self._get_context()
project = self._get_project()
platform = Platform.get_platform()
......@@ -2172,6 +2179,7 @@ class Element(Plugin):
stderr=stderr,
config=config,
server_url=self.__remote_execution_url,
bare_directory=bare_directory,
allow_real_directory=False)
yield sandbox
......@@ -2188,6 +2196,7 @@ class Element(Plugin):
stdout=stdout,
stderr=stderr,
config=config,
bare_directory=bare_directory,
allow_real_directory=not self.BST_VIRTUAL_DIRECTORY)
yield sandbox
......@@ -2197,7 +2206,7 @@ class Element(Plugin):
# Recursive contextmanager...
with self.__sandbox(rootdir, stdout=stdout, stderr=stderr, config=config,
allow_remote=allow_remote) as sandbox:
allow_remote=allow_remote, bare_directory=False) as sandbox:
yield sandbox
# Cleanup the build dir
......
# No variables added for the manual element by default, set
# this if you plan to use make, and the sources cannot handle
# parallelization.
#
# variables:
#
# notparallel: True
# Manual build element does not provide any default
# build commands
config:
......@@ -28,14 +20,3 @@ config:
strip-commands:
- |
%{strip-binaries}
# Use max-jobs CPUs for building and enable verbosity
environment:
MAKEFLAGS: -j%{max-jobs}
V: 1
# And dont consider MAKEFLAGS or V as something which may
# affect build output.
environment-nocache:
- MAKEFLAGS
- V
......@@ -5,16 +5,81 @@ import urllib.request
import urllib.error
import contextlib
import shutil
import netrc
from buildstream import Source, SourceError, Consistency
from buildstream import utils
class NetrcFTPOpener(urllib.request.FTPHandler):
def __init__(self, netrc_config):
self.netrc = netrc_config
def _split(self, netloc):
userpass, hostport = urllib.parse.splituser(netloc)
host, port = urllib.parse.splitport(hostport)
if userpass:
user, passwd = urllib.parse.splitpasswd(userpass)
else:
user = ''
passwd = ''
return host, port, user, passwd
def _unsplit(self, host, port, user, passwd):
if port:
host = '{}:{}'.format(host, port)
if user:
if passwd:
user = '{}:{}'.format(user, passwd)
host = '{}@{}'.format(user, host)
return host
def ftp_open(self, req):
host, port, user, passwd = self._split(req.host)
if (not user or not passwd) and self.netrc:
entry = self.netrc.authenticators(host)
if entry:
entry_login, _, entry_password = entry
if not user:
user = entry_login
if not passwd:
passwd = entry_password
req.host = self._unsplit(host, port, user, passwd)
return super().ftp_open(req)
class NetrcPasswordManager:
def __init__(self, netrc_config):
self.netrc = netrc_config
def add_password(self, realm, uri, user, passwd):
pass
def find_user_password(self, realm, authuri):
if not self.netrc:
return None, None
parts = urllib.parse.urlsplit(authuri)
entry = self.netrc.authenticators(parts.hostname)
if not entry:
return None, None
else:
login, _, password = entry
return login, password
class DownloadableFileSource(Source):
# pylint: disable=attribute-defined-outside-init
COMMON_CONFIG_KEYS = Source.COMMON_CONFIG_KEYS + ['url', 'ref', 'etag']
__urlopener = None
def configure(self, node):
self.original_url = self.node_get_member(node, str, 'url')
self.ref = self.node_get_member(node, str, 'ref', None)
......@@ -118,7 +183,8 @@ class DownloadableFileSource(Source):
if etag and self.get_consistency() == Consistency.CACHED:
request.add_header('If-None-Match', etag)
with contextlib.closing(urllib.request.urlopen(request)) as response:
opener = self.__get_urlopener()
with contextlib.closing(opener.open(request)) as response:
info = response.info()
etag = info['ETag'] if 'ETag' in info else None
......@@ -164,3 +230,19 @@ class DownloadableFileSource(Source):
def _get_mirror_file(self, sha=None):
return os.path.join(self._get_mirror_dir(), sha or self.ref)
def __get_urlopener(self):
if not DownloadableFileSource.__urlopener:
try:
netrc_config = netrc.netrc()
except FileNotFoundError:
DownloadableFileSource.__urlopener = urllib.request.build_opener()
except netrc.NetrcParseError as e:
self.warn('{}: While reading .netrc: {}'.format(self, e))
return urllib.request.build_opener()
else:
netrc_pw_mgr = NetrcPasswordManager(netrc_config)
http_auth = urllib.request.HTTPBasicAuthHandler(netrc_pw_mgr)
ftp_handler = NetrcFTPOpener(netrc_config)
DownloadableFileSource.__urlopener = urllib.request.build_opener(http_auth, ftp_handler)
return DownloadableFileSource.__urlopener