Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (42)
Showing
with 94 additions and 87 deletions
......@@ -16,6 +16,8 @@ recursive-include doc/source *.rst
recursive-include doc/source *.py
recursive-include doc/source *.in
recursive-include doc/source *.html
recursive-include doc/source *.odg
recursive-include doc/source *.svg
recursive-include doc/examples *
# Tests
......
......@@ -2,6 +2,12 @@
buildstream 1.3.1
=================
o BREAKING CHANGE: The 'manual' element lost its default 'MAKEFLAGS' and 'V'
environment variables. There is already a 'make' element with the same
variables. Note that this is a breaking change, it will require users to
make changes to their .bst files if they are expecting these environment
variables to be set.
o Failed builds are included in the cache as well.
`bst checkout` will provide anything in `%{install-root}`.
A build including cached fails will cause any dependant elements
......@@ -31,6 +37,15 @@ buildstream 1.3.1
new the `conf-root` variable to make the process easier. And there has been
a bug fix to workspaces so they can be build in workspaces too.
o Creating a build shell through the interactive mode or `bst shell --build`
will now use the cached build tree. It is now easier to debug local build
failures.
o `bst shell --sysroot` now takes any directory that contains a sysroot,
instead of just a specially-formatted build-root with a `root` and `scratch`
subdirectory.
=================
buildstream 1.1.5
=================
......
......@@ -111,10 +111,8 @@ class BstError(Exception):
#
self.detail = detail
# The build sandbox in which the error occurred, if the
# error occurred at element assembly time.
#
self.sandbox = None
# A sandbox can be created to debug this error
self.sandbox = False
# When this exception occurred during the handling of a job, indicate
# whether or not there is any point retrying the job.
......
......@@ -305,7 +305,6 @@ class App():
directory = self._main_options['directory']
directory = os.path.abspath(directory)
project_path = os.path.join(directory, 'project.conf')
elements_path = os.path.join(directory, element_path)
try:
# Abort if the project.conf already exists, unless `--force` was specified in `bst init`
......@@ -335,6 +334,7 @@ class App():
raise AppError("Error creating project directory {}: {}".format(directory, e)) from e
# Create the elements sub-directory if it doesnt exist
elements_path = os.path.join(directory, element_path)
try:
os.makedirs(elements_path, exist_ok=True)
except IOError as e:
......@@ -597,7 +597,7 @@ class App():
click.echo("\nDropping into an interactive shell in the failed build sandbox\n", err=True)
try:
prompt = self.shell_prompt(element)
self.stream.shell(element, Scope.BUILD, prompt, directory=failure.sandbox, isolate=True)
self.stream.shell(element, Scope.BUILD, prompt, isolate=True)
except BstError as e:
click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
elif choice == 'log':
......
......@@ -668,17 +668,6 @@ class LogLine(Widget):
extra_nl = True
if message.sandbox is not None:
sandbox = self._indent + 'Sandbox directory: ' + message.sandbox
text += '\n'
if message.message_type == MessageType.FAIL:
text += self._err_profile.fmt(sandbox, bold=True)
else:
text += self._detail_profile.fmt(sandbox)
text += '\n'
extra_nl = True
if message.scheduler and message.message_type == MessageType.FAIL:
text += '\n'
......
......@@ -146,8 +146,8 @@ def _extract_depends_from_node(node, *, key=None):
depends = _yaml.node_get(node, list, key, default_value=[])
output_deps = []
for dep in depends:
dep_provenance = _yaml.node_get_provenance(node, key=key, indices=[depends.index(dep)])
for index, dep in enumerate(depends):
dep_provenance = _yaml.node_get_provenance(node, key=key, indices=[index])
if isinstance(dep, str):
dependency = Dependency(dep, provenance=dep_provenance, dep_type=default_dep_type)
......@@ -177,10 +177,8 @@ def _extract_depends_from_node(node, *, key=None):
provenance=dep_provenance)
else:
index = depends.index(dep)
p = _yaml.node_get_provenance(node, key=key, indices=[index])
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Dependency is not specified as a string or a dictionary".format(p))
"{}: Dependency is not specified as a string or a dictionary".format(dep_provenance))
output_deps.append(dependency)
......
......@@ -70,7 +70,7 @@ class Message():
self.elapsed = elapsed # The elapsed time, in timed messages
self.depth = depth # The depth of a timed message
self.logfile = logfile # The log file path where commands took place
self.sandbox = sandbox # The sandbox directory where an error occurred (if any)
self.sandbox = sandbox # The error that caused this message used a sandbox
self.pid = os.getpid() # The process pid
self.unique_id = unique_id # The plugin object ID issueing the message
self.task_id = task_id # The plugin object ID of the task
......
......@@ -423,9 +423,16 @@ class Stream():
else:
if location == '-':
with target.timed_activity("Creating tarball"):
with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
with tarfile.open(fileobj=fo, mode="w|") as tf:
sandbox_vroot.export_to_tar(tf, '.')
# Save the stdout FD to restore later
saved_fd = os.dup(sys.stdout.fileno())
try:
with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
with tarfile.open(fileobj=fo, mode="w|") as tf:
sandbox_vroot.export_to_tar(tf, '.')
finally:
# No matter what, restore stdout for further use
os.dup2(saved_fd, sys.stdout.fileno())
os.close(saved_fd)
else:
with target.timed_activity("Creating tarball '{}'"
.format(location)):
......
......@@ -23,7 +23,7 @@
# This version is bumped whenever enhancements are made
# to the `project.conf` format or the core element format.
#
BST_FORMAT_VERSION = 17
BST_FORMAT_VERSION = 18
# The base BuildStream artifact version
......
......@@ -335,16 +335,9 @@ def node_get_provenance(node, key=None, indices=None):
return provenance
# Helper to use utils.sentinel without unconditional utils import,
# which causes issues for completion.
#
# Local private, but defined here because sphinx appears to break if
# it's not defined before any functions calling it in default kwarg
# values.
#
def _get_sentinel():
from .utils import _sentinel
return _sentinel
# A sentinel to be used as a default argument for functions that need
# to distinguish between a kwarg set to None and an unset kwarg.
_sentinel = object()
# node_get()
......@@ -368,10 +361,10 @@ def _get_sentinel():
# Note:
# Returned strings are stripped of leading and trailing whitespace
#
def node_get(node, expected_type, key, indices=None, default_value=_get_sentinel()):
def node_get(node, expected_type, key, indices=None, default_value=_sentinel):
value = node.get(key, default_value)
provenance = node_get_provenance(node)
if value is _get_sentinel():
if value is _sentinel:
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Dictionary did not contain expected key '{}'".format(provenance, key))
......
......@@ -451,7 +451,7 @@ class Element(Plugin):
return None
def node_subst_member(self, node, member_name, default=utils._sentinel):
def node_subst_member(self, node, member_name, default=_yaml._sentinel):
"""Fetch the value of a string node member, substituting any variables
in the loaded value with the element contextual variables.
......@@ -1318,7 +1318,9 @@ class Element(Plugin):
@contextmanager
def _prepare_sandbox(self, scope, directory, deps='run', integrate=True):
# bst shell and bst checkout require a local sandbox.
with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False) as sandbox:
bare_directory = True if directory else False
with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False,
bare_directory=bare_directory) as sandbox:
# Configure always comes first, and we need it.
self.configure_sandbox(sandbox)
......@@ -1385,6 +1387,7 @@ class Element(Plugin):
# the same filing system as the rest of our cache.
temp_staging_location = os.path.join(self._get_context().artifactdir, "staging_temp")
temp_staging_directory = tempfile.mkdtemp(prefix=temp_staging_location)
import_dir = temp_staging_directory
try:
workspace = self._get_workspace()
......@@ -1395,12 +1398,16 @@ class Element(Plugin):
with self.timed_activity("Staging local files at {}"
.format(workspace.get_absolute_path())):
workspace.stage(temp_staging_directory)
elif self._cached():
# We have a cached buildtree to use, instead
artifact_base, _ = self.__extract()
import_dir = os.path.join(artifact_base, 'buildtree')
else:
# No workspace, stage directly
for source in self.sources():
source._stage(temp_staging_directory)
vdirectory.import_files(temp_staging_directory)
vdirectory.import_files(import_dir)
finally:
# Staging may produce directories with less than 'rwx' permissions
......@@ -1566,9 +1573,8 @@ class Element(Plugin):
collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
self.__set_build_result(success=True, description="succeeded")
except BstError as e:
# If an error occurred assembling an element in a sandbox,
# then tack on the sandbox directory to the error
e.sandbox = rootdir
# Shelling into a sandbox is useful to debug this error
e.sandbox = True
# If there is a workspace open on this element, it will have
# been mounted for sandbox invocations instead of being staged.
......@@ -1683,8 +1689,8 @@ class Element(Plugin):
"unable to collect artifact contents"
.format(collect))
# Finally cleanup the build dir
cleanup_rootdir()
# Finally cleanup the build dir
cleanup_rootdir()
return artifact_size
......@@ -2152,12 +2158,14 @@ class Element(Plugin):
# stderr (fileobject): The stream for stderr for the sandbox
# config (SandboxConfig): The SandboxConfig object
# allow_remote (bool): Whether the sandbox is allowed to be remote
# bare_directory (bool): Whether the directory is bare i.e. doesn't have
# a separate 'root' subdir
#
# Yields:
# (Sandbox): A usable sandbox
#
@contextmanager
def __sandbox(self, directory, stdout=None, stderr=None, config=None, allow_remote=True):
def __sandbox(self, directory, stdout=None, stderr=None, config=None, allow_remote=True, bare_directory=False):
context = self._get_context()
project = self._get_project()
platform = Platform.get_platform()
......@@ -2188,6 +2196,7 @@ class Element(Plugin):
stdout=stdout,
stderr=stderr,
config=config,
bare_directory=bare_directory,
allow_real_directory=not self.BST_VIRTUAL_DIRECTORY)
yield sandbox
......@@ -2197,7 +2206,7 @@ class Element(Plugin):
# Recursive contextmanager...
with self.__sandbox(rootdir, stdout=stdout, stderr=stderr, config=config,
allow_remote=allow_remote) as sandbox:
allow_remote=allow_remote, bare_directory=False) as sandbox:
yield sandbox
# Cleanup the build dir
......
......@@ -321,7 +321,7 @@ class Plugin():
provenance = _yaml.node_get_provenance(node, key=member_name)
return str(provenance)
def node_get_member(self, node, expected_type, member_name, default=utils._sentinel):
def node_get_member(self, node, expected_type, member_name, default=_yaml._sentinel):
"""Fetch the value of a node member, raising an error if the value is
missing or incorrectly typed.
......
# No variables added for the manual element by default, set
# this if you plan to use make, and the sources cannot handle
# parallelization.
#
# variables:
#
# notparallel: True
# Manual build element does not provide any default
# build commands
config:
......@@ -28,14 +20,3 @@ config:
strip-commands:
- |
%{strip-binaries}
# Use max-jobs CPUs for building and enable verbosity
environment:
MAKEFLAGS: -j%{max-jobs}
V: 1
# And dont consider MAKEFLAGS or V as something which may
# affect build output.
environment-nocache:
- MAKEFLAGS
- V
......@@ -31,7 +31,6 @@ from .._fuse import SafeHardlinks
#
class Mount():
def __init__(self, sandbox, mount_point, safe_hardlinks, fuse_mount_options=None):
scratch_directory = sandbox._get_scratch_directory()
# Getting _get_underlying_directory() here is acceptable as
# we're part of the sandbox code. This will fail if our
# directory is CAS-based.
......@@ -51,6 +50,7 @@ class Mount():
# a regular mount point within the parent's redirected mount.
#
if self.safe_hardlinks:
scratch_directory = sandbox._get_scratch_directory()
# Redirected mount
self.mount_origin = os.path.join(root_directory, mount_point.lstrip(os.sep))
self.mount_base = os.path.join(scratch_directory, utils.url_directory_name(mount_point))
......
......@@ -66,15 +66,15 @@ class SandboxBwrap(Sandbox):
cwd = self._get_work_directory(cwd=cwd)
env = self._get_environment(cwd=cwd, env=env)
# Convert single-string argument to a list
if isinstance(command, str):
command = [command]
if not self._has_command(command[0], env):
raise SandboxError("Staged artifacts do not provide command "
"'{}'".format(command[0]),
reason='missing-command')
# We want command args as a list of strings
if isinstance(command, str):
command = [command]
# Create the mount map, this will tell us where
# each mount point needs to be mounted from and to
mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY)
......
......@@ -57,15 +57,15 @@ class SandboxChroot(Sandbox):
cwd = self._get_work_directory(cwd=cwd)
env = self._get_environment(cwd=cwd, env=env)
# Convert single-string argument to a list
if isinstance(command, str):
command = [command]
if not self._has_command(command[0], env):
raise SandboxError("Staged artifacts do not provide command "
"'{}'".format(command[0]),
reason='missing-command')
# Command must be a list
if isinstance(command, str):
command = [command]
stdout, stderr = self._get_output()
# Create the mount map, this will tell us where
......
......@@ -33,6 +33,10 @@ class SandboxDummy(Sandbox):
cwd = self._get_work_directory(cwd=cwd)
env = self._get_environment(cwd=cwd, env=env)
# Convert single-string argument to a list
if isinstance(command, str):
command = [command]
if not self._has_command(command[0], env):
raise SandboxError("Staged artifacts do not provide command "
"'{}'".format(command[0]),
......
......@@ -98,16 +98,23 @@ class Sandbox():
self.__config = kwargs['config']
self.__stdout = kwargs['stdout']
self.__stderr = kwargs['stderr']
self.__bare_directory = kwargs['bare_directory']
# Setup the directories. Root and output_directory should be
# available to subclasses, hence being single-underscore. The
# others are private to this class.
self._root = os.path.join(directory, 'root')
# If the directory is bare, it probably doesn't need scratch
if self.__bare_directory:
self._root = directory
self.__scratch = None
os.makedirs(self._root, exist_ok=True)
else:
self._root = os.path.join(directory, 'root')
self.__scratch = os.path.join(directory, 'scratch')
for directory_ in [self._root, self.__scratch]:
os.makedirs(directory_, exist_ok=True)
self._output_directory = None
self.__directory = directory
self.__scratch = os.path.join(self.__directory, 'scratch')
for directory_ in [self._root, self.__scratch]:
os.makedirs(directory_, exist_ok=True)
self._vdir = None
# This is set if anyone requests access to the underlying
......@@ -334,6 +341,7 @@ class Sandbox():
# Returns:
# (str): The sandbox scratch directory
def _get_scratch_directory(self):
assert not self.__bare_directory, "Scratch is not going to work with bare directories"
return self.__scratch
# _get_output()
......
......@@ -654,10 +654,6 @@ def _pretty_size(size, dec_places=0):
return "{size:g}{unit}".format(size=round(psize, dec_places), unit=unit)
# A sentinel to be used as a default argument for functions that need
# to distinguish between a kwarg set to None and an unset kwarg.
_sentinel = object()
# Main process pid
_main_pid = os.getpid()
......
......@@ -23,6 +23,8 @@ import shutil
import pytest
from buildstream._platform.platform import Platform
def pytest_addoption(parser):
parser.addoption('--integration', action='store_true', default=False,
......@@ -52,3 +54,8 @@ def integration_cache(request):
shutil.rmtree(os.path.join(cache_dir, 'artifacts'))
except FileNotFoundError:
pass
@pytest.fixture(autouse=True)
def clean_platform_cache():
Platform._instance = None