Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (35)
Showing
with 800 additions and 138 deletions
......@@ -67,6 +67,12 @@ buildstream 1.3.1
allows the user to set a default location for their creation. This has meant
that the new CLI is no longer backwards compatible with buildstream 1.2.
o Add sandbox API for command batching and use it for build, script, and
compose elements.
o Opening a workspace now creates a .bstproject.yaml file that allows buildstream
commands to be run from a workspace that is not inside a project.
=================
buildstream 1.1.5
......
......@@ -27,7 +27,7 @@ if "_BST_COMPLETION" not in os.environ:
del get_versions
from .utils import UtilError, ProgramNotFoundError
from .sandbox import Sandbox, SandboxFlags
from .sandbox import Sandbox, SandboxFlags, SandboxCommandError
from .types import Scope, Consistency
from .plugin import Plugin
from .source import Source, SourceError, SourceFetcher
......
......@@ -31,7 +31,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
from ._message import Message, MessageType
from ._profile import Topics, profile_start, profile_end
from ._artifactcache import ArtifactCache
from ._workspaces import Workspaces
from ._workspaces import Workspaces, WorkspaceProjectCache
from .plugin import _plugin_lookup
......@@ -121,6 +121,10 @@ class Context():
# remove a workspace directory.
self.prompt_workspace_close_remove_dir = None
# Boolean, whether we double-check with the user that they meant to
# close the workspace when they're using it to access the project.
self.prompt_workspace_close_project_inaccessible = None
# Boolean, whether we double-check with the user that they meant to do
# a hard reset of a workspace, potentially losing changes.
self.prompt_workspace_reset_hard = None
......@@ -139,6 +143,7 @@ class Context():
self._projects = []
self._project_overrides = {}
self._workspaces = None
self._workspace_project_cache = WorkspaceProjectCache()
self._log_handle = None
self._log_filename = None
......@@ -248,12 +253,15 @@ class Context():
defaults, Mapping, 'prompt')
_yaml.node_validate(prompt, [
'auto-init', 'really-workspace-close-remove-dir',
'really-workspace-close-project-inaccessible',
'really-workspace-reset-hard',
])
self.prompt_auto_init = _node_get_option_str(
prompt, 'auto-init', ['ask', 'no']) == 'ask'
self.prompt_workspace_close_remove_dir = _node_get_option_str(
prompt, 'really-workspace-close-remove-dir', ['ask', 'yes']) == 'ask'
self.prompt_workspace_close_project_inaccessible = _node_get_option_str(
prompt, 'really-workspace-close-project-inaccessible', ['ask', 'yes']) == 'ask'
self.prompt_workspace_reset_hard = _node_get_option_str(
prompt, 'really-workspace-reset-hard', ['ask', 'yes']) == 'ask'
......@@ -310,6 +318,16 @@ class Context():
def get_workspaces(self):
return self._workspaces
# get_workspace_project_cache():
#
# Return the WorkspaceProjectCache object used for this BuildStream invocation
#
# Returns:
# (WorkspaceProjectCache): The WorkspaceProjectCache object
#
def get_workspace_project_cache(self):
return self._workspace_project_cache
# get_overrides():
#
# Fetch the override dictionary for the active project. This returns
......
......@@ -59,18 +59,9 @@ def complete_target(args, incomplete):
:return: all the possible user-specified completions for the param
"""
from .. import utils
project_conf = 'project.conf'
def ensure_project_dir(directory):
directory = os.path.abspath(directory)
while not os.path.isfile(os.path.join(directory, project_conf)):
parent_dir = os.path.dirname(directory)
if directory == parent_dir:
break
directory = parent_dir
return directory
# First resolve the directory, in case there is an
# active --directory/-C option
#
......@@ -89,7 +80,7 @@ def complete_target(args, incomplete):
else:
# Check if this directory or any of its parent directories
# contain a project config file
base_directory = ensure_project_dir(base_directory)
base_directory = utils._search_upward_for_file(base_directory, project_conf)
# Now parse the project.conf just to find the element path,
# this is unfortunately a bit heavy.
......@@ -756,11 +747,18 @@ def workspace_close(app, remove_dir, all_, elements):
elements = app.stream.redirect_element_names(elements)
# Check that the workspaces in question exist
# Check that the workspaces in question exist, and that it's safe to
# remove them.
nonexisting = []
for element_name in elements:
if not app.stream.workspace_exists(element_name):
nonexisting.append(element_name)
if (app.stream.workspace_is_required(element_name) and app.interactive and
app.context.prompt_workspace_close_project_inaccessible):
click.echo("Removing '{}' will prevent you from running buildstream commands".format(element_name))
if not click.confirm('Are you sure you want to close this workspace?'):
click.echo('Aborting', err=True)
sys.exit(-1)
if nonexisting:
raise AppError("Workspace does not exist", detail="\n".join(nonexisting))
......
......@@ -563,17 +563,23 @@ class Loader():
"Subproject has no ref for junction: {}".format(filename),
detail=detail)
# Stage sources
os.makedirs(self._context.builddir, exist_ok=True)
basedir = tempfile.mkdtemp(prefix="{}-".format(element.normal_name), dir=self._context.builddir)
element._stage_sources_at(basedir, mount_workspaces=False)
if len(sources) == 1 and sources[0]._get_local_path():
# Optimization for junctions with a single local source
basedir = sources[0]._get_local_path()
tempdir = None
else:
# Stage sources
os.makedirs(self._context.builddir, exist_ok=True)
basedir = tempfile.mkdtemp(prefix="{}-".format(element.normal_name), dir=self._context.builddir)
element._stage_sources_at(basedir, mount_workspaces=False)
tempdir = basedir
# Load the project
project_dir = os.path.join(basedir, element.path)
try:
from .._project import Project
project = Project(project_dir, self._context, junction=element,
parent_loader=self, tempdir=basedir)
parent_loader=self, tempdir=tempdir)
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
raise LoadError(reason=LoadErrorReason.INVALID_JUNCTION,
......
......@@ -94,8 +94,10 @@ class Project():
# The project name
self.name = None
# The project directory
self.directory = self._ensure_project_dir(directory)
self._context = context # The invocation Context, a private member
# The project directory, and whether the project was found from an external workspace
self.directory, self._required_workspace_element = self._find_project_dir(directory)
# Absolute path to where elements are loaded from within the project
self.element_path = None
......@@ -116,7 +118,6 @@ class Project():
#
# Private Members
#
self._context = context # The invocation Context
self._default_mirror = default_mirror # The name of the preferred mirror.
......@@ -370,6 +371,14 @@ class Project():
self._load_second_pass()
# required_workspace_element()
#
# Returns the element whose workspace is required to load this project,
# if any.
#
def required_workspace_element(self):
return self._required_workspace_element
# cleanup()
#
# Cleans up resources used loading elements
......@@ -651,7 +660,7 @@ class Project():
# Source url aliases
output._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
# _ensure_project_dir()
# _find_project_dir()
#
# Returns path of the project directory, if a configuration file is found
# in given directory or any of its parent directories.
......@@ -662,18 +671,26 @@ class Project():
# Raises:
# LoadError if project.conf is not found
#
def _ensure_project_dir(self, directory):
directory = os.path.abspath(directory)
while not os.path.isfile(os.path.join(directory, _PROJECT_CONF_FILE)):
parent_dir = os.path.dirname(directory)
if directory == parent_dir:
# Returns:
# (str) - the directory that contains the project, and
# (str) - the name of the element required to find the project, or an empty string
#
def _find_project_dir(self, directory):
workspace_element = ""
project_directory = utils._search_upward_for_file(directory, _PROJECT_CONF_FILE)
if not project_directory:
workspace_project_cache = self._context.get_workspace_project_cache()
workspace_project = workspace_project_cache.get(directory)
if workspace_project:
project_directory = workspace_project.get_default_path()
workspace_element = workspace_project.get_default_element()
else:
raise LoadError(
LoadErrorReason.MISSING_PROJECT_CONF,
'{} not found in current directory or any of its parent directories'
.format(_PROJECT_CONF_FILE))
directory = parent_dir
return directory
return project_directory, workspace_element
def _load_plugin_factories(self, config, output):
plugin_source_origins = [] # Origins of custom sources
......
......@@ -28,7 +28,7 @@ import tarfile
from contextlib import contextmanager
from tempfile import TemporaryDirectory
from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
from ._exceptions import StreamError, ImplError, BstError, set_last_task_error, LoadError, LoadErrorReason
from ._message import Message, MessageType
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
from ._pipeline import Pipeline, PipelineSelection
......@@ -550,6 +550,8 @@ class Stream():
# So far this function has tried to catch as many issues as possible with out making any changes
# Now it dose the bits that can not be made atomic.
targetGenerator = zip(elements, expanded_directories)
workspace_project_cache = self._context.get_workspace_project_cache()
project = self._context.get_toplevel_project()
for target, directory in targetGenerator:
self._message(MessageType.INFO, "Creating workspace for element {}"
.format(target.name))
......@@ -574,6 +576,10 @@ class Stream():
with target.timed_activity("Staging sources to {}".format(directory)):
target._open_workspace()
workspace_project = workspace_project_cache.add(directory, project.directory,
target._get_full_name())
workspace_project.write()
# Saving the workspace once it is set up means that if the next workspace fails to be created before
# the configuration gets saved. The successfully created workspace still gets saved.
workspaces.save_config()
......@@ -601,6 +607,16 @@ class Stream():
except OSError as e:
raise StreamError("Could not remove '{}': {}"
.format(workspace.get_absolute_path(), e)) from e
else:
workspace_project_cache = self._context.get_workspace_project_cache()
try:
workspace_project_cache.remove(workspace.get_absolute_path())
except LoadError as e:
# We might be closing a workspace with a deleted directory
if e.reason == LoadErrorReason.MISSING_FILE:
pass
else:
raise
# Delete the workspace and save the configuration
workspaces.delete_workspace(element_name)
......@@ -644,6 +660,8 @@ class Stream():
for element in elements:
workspace = workspaces.get_workspace(element._get_full_name())
workspace_path = workspace.get_absolute_path()
workspace_project_cache = self._context.get_workspace_project_cache()
workspace_project = workspace_project_cache.get(workspace_path)
if soft:
workspace.prepared = False
self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
......@@ -664,6 +682,8 @@ class Stream():
with element.timed_activity("Staging sources to {}".format(workspace_path)):
element._open_workspace()
workspace_project.write()
self._message(MessageType.INFO,
"Reset workspace for {} at: {}".format(element.name,
workspace_path))
......@@ -694,6 +714,20 @@ class Stream():
return False
# workspace_is_required()
#
# Checks whether the workspace belonging to element_name is required to
# load the project
#
# Args:
# element_name (str): The element whose workspace may be required
#
# Returns:
# (bool): True if the workspace is required
def workspace_is_required(self, element_name):
required_elm = self._project.required_workspace_element()
return required_elm == element_name
# workspace_list
#
# Serializes the workspaces and dumps them in YAML to stdout.
......
......@@ -25,6 +25,211 @@ from ._exceptions import LoadError, LoadErrorReason
BST_WORKSPACE_FORMAT_VERSION = 3
BST_WORKSPACE_PROJECT_FORMAT_VERSION = 1
WORKSPACE_PROJECT_FILE = ".bstproject.yaml"
# WorkspaceProject()
#
# An object to contain various helper functions and data required for
# referring from a workspace back to buildstream.
#
# Args:
# directory (str): The directory that the workspace exists in
# project_path (str): The project path used to refer back
# to buildstream projects.
# element_name (str): The name of the element used to create this workspace.
class WorkspaceProject():
def __init__(self, directory, project_path="", element_name=""):
self._projects = []
self._directory = directory
assert (project_path and element_name) or (not project_path and not element_name)
if project_path:
self._add_project(project_path, element_name)
# get_default_path()
#
# Retrieves the default path to a project.
#
# Returns:
# (str): The path to a project
def get_default_path(self):
return self._projects[0]['project-path']
# get_default_element()
#
# Retrieves the name of the element that owns this workspace.
#
# Returns:
# (str): The name of an element
def get_default_element(self):
return self._projects[0]['element-name']
# to_dict()
#
# Turn the members data into a dict for serialization purposes
#
# Returns:
# (dict): A dict representation of the WorkspaceProject
#
def to_dict(self):
ret = {
'projects': self._projects,
'format-version': BST_WORKSPACE_PROJECT_FORMAT_VERSION,
}
return ret
# from_dict()
#
# Loads a new WorkspaceProject from a simple dictionary
#
# Args:
# directory (str): The directory that the workspace exists in
# dictionary (dict): The dict to generate a WorkspaceProject from
#
# Returns:
# (WorkspaceProject): A newly instantiated WorkspaceProject
@classmethod
def from_dict(cls, directory, dictionary):
# Only know how to handle one format-version at the moment.
format_version = int(dictionary['format-version'])
assert format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION, \
"Format version {} not found in {}".format(BST_WORKSPACE_PROJECT_FORMAT_VERSION, dictionary)
workspace_project = cls(directory)
for item in dictionary['projects']:
workspace_project._add_project(item['project-path'], item['element-name'])
return workspace_project
# load()
#
# Loads the WorkspaceProject for a given directory. This directory may be a
# subdirectory of the workspace's directory.
#
# Args:
# directory (str): The directory
# Returns:
# (WorkspaceProject): The created WorkspaceProject, if in a workspace, or
# (NoneType): None, if the directory is not inside a workspace.
@classmethod
def load(cls, directory):
project_dir = cls.search_for_dir(directory)
if project_dir:
workspace_file = os.path.join(project_dir, WORKSPACE_PROJECT_FILE)
data_dict = _yaml.load(workspace_file)
return cls.from_dict(project_dir, data_dict)
else:
return None
# write()
#
# Writes the WorkspaceProject to disk
def write(self):
os.makedirs(self._directory, exist_ok=True)
_yaml.dump(self.to_dict(), self._get_filename())
# search_for_dir()
#
# Returns the directory that contains the workspace local project file,
# searching upwards from search_dir.
@staticmethod
def search_for_dir(search_dir):
return utils._search_upward_for_file(search_dir, WORKSPACE_PROJECT_FILE)
def _get_filename(self):
return os.path.join(self._directory, WORKSPACE_PROJECT_FILE)
def _add_project(self, project_path, element_name):
assert (project_path and element_name)
self._projects.append({'project-path': project_path, 'element-name': element_name})
# WorkspaceProjectCache()
#
# A class to manage workspace project data for multiple workspaces.
#
class WorkspaceProjectCache():
def __init__(self):
self._projects = {} # Mapping of a workspace directory to its WorkspaceProject
# get()
#
# Returns a WorkspaceProject for a given directory, retrieving from the cache if
# present, and searching the filesystem for the file and loading it if not.
#
# Args:
# directory (str): The directory to search for a WorkspaceProject.
#
# Returns:
# (WorkspaceProject): The WorkspaceProject that was found for that directory.
# or (NoneType): None, if no WorkspaceProject can be found.
#
def get(self, directory):
try:
workspace_project = self._projects[directory]
except KeyError:
found_dir = WorkspaceProject.search_for_dir(directory)
if found_dir:
try:
workspace_project = self._projects[found_dir]
except KeyError:
workspace_project = WorkspaceProject.load(found_dir)
self._projects[found_dir] = workspace_project
else:
workspace_project = None
return workspace_project
# add()
#
# Adds the project path and element name to the WorkspaceProject that exists
# for that directory
#
# Args:
# directory (str): The directory to search for a WorkspaceProject.
# project_path (str): The path to the project that refers to this workspace
# element_name (str): The element in the project that was refers to this workspace
#
# Returns:
# (WorkspaceProject): The WorkspaceProject that was found for that directory.
#
def add(self, directory, project_path='', element_name=''):
workspace_project = self.get(directory)
if not workspace_project:
workspace_project = WorkspaceProject(directory)
self._projects[directory] = workspace_project
if project_path:
workspace_project._add_project(project_path, element_name)
return workspace_project
# remove()
#
# Removes the project path and element name from the WorkspaceProject that exists
# for that directory.
#
# NOTE: This currently just deletes the file, but with support for multiple
# projects opening the same workspace, this will involve decreasing the count
# and deleting the file if there are no more projects.
#
# Args:
# directory (str): The directory to search for a WorkspaceProject.
# project_path (str): **UNUSED** The path to the project that refers to this workspace
# element_name (str): **UNUSED** The element in the project that was refers to this workspace
#
def remove(self, directory, project_path='', element_name=''):
# NOTE: project_path and element_name will only be used when I implement
# multiple owners of a workspace
workspace_project = self.get(directory)
if not workspace_project:
raise LoadError(LoadErrorReason.MISSING_FILE,
"Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE))
path = workspace_project._get_filename()
try:
os.unlink(path)
except FileNotFoundError:
pass
# Workspace()
......@@ -174,10 +379,15 @@ class Workspace():
if recalculate or self._key is None:
fullpath = self.get_absolute_path()
excluded_files = (WORKSPACE_PROJECT_FILE,)
# Get a list of tuples of the the project relative paths and fullpaths
if os.path.isdir(fullpath):
filelist = utils.list_relative_paths(fullpath)
filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
filelist = [
(relpath, os.path.join(fullpath, relpath)) for relpath in filelist
if relpath not in excluded_files
]
else:
filelist = [(self.get_absolute_path(), fullpath)]
......
......@@ -127,7 +127,7 @@ artifact collection purposes.
"""
import os
from . import Element, Scope, ElementError
from . import Element, Scope
from . import SandboxFlags
......@@ -207,6 +207,10 @@ class BuildElement(Element):
# Setup environment
sandbox.set_environment(self.get_environment())
# Enable command batching across prepare() and assemble()
self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
collect=self.get_variable('install-root'))
def stage(self, sandbox):
# Stage deps in the sandbox root
......@@ -215,7 +219,7 @@ class BuildElement(Element):
# Run any integration commands provided by the dependencies
# once they are all staged and ready
with self.timed_activity("Integrating sandbox"):
with sandbox.batch(SandboxFlags.NONE, label="Integrating sandbox"):
for dep in self.dependencies(Scope.BUILD):
dep.integrate(sandbox)
......@@ -223,14 +227,13 @@ class BuildElement(Element):
self.stage_sources(sandbox, self.get_variable('build-root'))
def assemble(self, sandbox):
# Run commands
for command_name in _command_steps:
commands = self.__commands[command_name]
if not commands or command_name == 'configure-commands':
continue
with self.timed_activity("Running {}".format(command_name)):
with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running {}".format(command_name)):
for cmd in commands:
self.__run_command(sandbox, cmd, command_name)
......@@ -254,7 +257,7 @@ class BuildElement(Element):
def prepare(self, sandbox):
commands = self.__commands['configure-commands']
if commands:
with self.timed_activity("Running configure-commands"):
with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"):
for cmd in commands:
self.__run_command(sandbox, cmd, 'configure-commands')
......@@ -282,13 +285,9 @@ class BuildElement(Element):
return commands
def __run_command(self, sandbox, cmd, cmd_name):
self.status("Running {}".format(cmd_name), detail=cmd)
# Note the -e switch to 'sh' means to exit with an error
# if any untested command fails.
#
exitcode = sandbox.run(['sh', '-c', '-e', cmd + '\n'],
SandboxFlags.ROOT_READ_ONLY)
if exitcode != 0:
raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode),
collect=self.get_variable('install-root'))
sandbox.run(['sh', '-c', '-e', cmd + '\n'],
SandboxFlags.ROOT_READ_ONLY,
label=cmd)
......@@ -128,6 +128,14 @@ prompt:
#
really-workspace-close-remove-dir: ask
# Whether to really proceed with 'bst workspace close' when doing so would
# stop them from running bst commands in this workspace.
#
# ask - Ask the user if they are sure.
# yes - Always close, without asking.
#
really-workspace-close-project-inaccessible: ask
# Whether to really proceed with 'bst workspace reset' doing a hard reset of
# a workspace, potentially losing changes.
#
......
......@@ -78,6 +78,7 @@ import stat
import copy
from collections import OrderedDict
from collections.abc import Mapping
import contextlib
from contextlib import contextmanager
import tempfile
import shutil
......@@ -89,7 +90,7 @@ from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
ErrorDomain
from .utils import UtilError
from . import Plugin, Consistency, Scope
from . import SandboxFlags
from . import SandboxFlags, SandboxCommandError
from . import utils
from . import _cachekey
from . import _signals
......@@ -217,6 +218,10 @@ class Element(Plugin):
self.__build_result = None # The result of assembling this Element (success, description, detail)
self._build_log_path = None # The path of the build log for this Element
self.__batch_prepare_assemble = False # Whether batching across prepare()/assemble() is configured
self.__batch_prepare_assemble_flags = 0 # Sandbox flags for batching across prepare()/assemble()
self.__batch_prepare_assemble_collect = None # Collect dir for batching across prepare()/assemble()
# hash tables of loaded artifact metadata, hashed by key
self.__metadata_keys = {} # Strong and weak keys for this key
self.__metadata_dependencies = {} # Dictionary of dependency strong keys
......@@ -770,13 +775,13 @@ class Element(Plugin):
environment = self.get_environment()
if bstdata is not None:
commands = self.node_get_member(bstdata, list, 'integration-commands', [])
for i in range(len(commands)):
cmd = self.node_subst_list_element(bstdata, 'integration-commands', [i])
self.status("Running integration command", detail=cmd)
exitcode = sandbox.run(['sh', '-e', '-c', cmd], 0, env=environment, cwd='/')
if exitcode != 0:
raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode))
with sandbox.batch(SandboxFlags.NONE):
commands = self.node_get_member(bstdata, list, 'integration-commands', [])
for i in range(len(commands)):
cmd = self.node_subst_list_element(bstdata, 'integration-commands', [i])
sandbox.run(['sh', '-e', '-c', cmd], 0, env=environment, cwd='/',
label=cmd)
def stage_sources(self, sandbox, directory):
"""Stage this element's sources to a directory in the sandbox
......@@ -863,6 +868,24 @@ class Element(Plugin):
return None
def batch_prepare_assemble(self, flags, *, collect=None):
""" Configure command batching across prepare() and assemble()
Args:
flags (:class:`.SandboxFlags`): The sandbox flags for the command batch
collect (str): An optional directory containing partial install contents
on command failure.
This may be called in :func:`Element.configure_sandbox() <buildstream.element.Element.configure_sandbox>`
to enable batching of all sandbox commands issued in prepare() and assemble().
"""
if self.__batch_prepare_assemble:
raise ElementError("{}: Command batching for prepare/assemble is already configured".format(self))
self.__batch_prepare_assemble = True
self.__batch_prepare_assemble_flags = flags
self.__batch_prepare_assemble_collect = collect
#############################################################
# Private Methods used in BuildStream #
#############################################################
......@@ -1323,7 +1346,7 @@ class Element(Plugin):
bare_directory=bare_directory) as sandbox:
# Configure always comes first, and we need it.
self.configure_sandbox(sandbox)
self.__configure_sandbox(sandbox)
# Stage something if we need it
if not directory:
......@@ -1556,15 +1579,24 @@ class Element(Plugin):
# Call the abstract plugin methods
try:
# Step 1 - Configure
self.configure_sandbox(sandbox)
self.__configure_sandbox(sandbox)
# Step 2 - Stage
self.stage(sandbox)
# Step 3 - Prepare
self.__prepare(sandbox)
# Step 4 - Assemble
collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
if self.__batch_prepare_assemble:
cm = sandbox.batch(self.__batch_prepare_assemble_flags,
collect=self.__batch_prepare_assemble_collect)
else:
cm = contextlib.suppress()
with cm:
# Step 3 - Prepare
self.__prepare(sandbox)
# Step 4 - Assemble
collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
self.__set_build_result(success=True, description="succeeded")
except ElementError as e:
except (ElementError, SandboxCommandError) as e:
# Shelling into a sandbox is useful to debug this error
e.sandbox = True
......@@ -2059,6 +2091,15 @@ class Element(Plugin):
def __can_build_incrementally(self):
return bool(self._get_workspace())
# __configure_sandbox():
#
# Internal method for calling public abstract configure_sandbox() method.
#
def __configure_sandbox(self, sandbox):
self.__batch_prepare_assemble = False
self.configure_sandbox(sandbox)
# __prepare():
#
# Internal method for calling public abstract prepare() method.
......@@ -2074,7 +2115,12 @@ class Element(Plugin):
self.prepare(sandbox)
if workspace:
workspace.prepared = True
def mark_workspace_prepared():
workspace.prepared = True
# Defer workspace.prepared setting until pending batch commands
# have been executed.
sandbox._callback(mark_workspace_prepared)
def __is_cached(self, keystrength):
if keystrength is None:
......@@ -2157,6 +2203,7 @@ class Element(Plugin):
sandbox = SandboxRemote(context, project,
directory,
plugin=self,
stdout=stdout,
stderr=stderr,
config=config,
......@@ -2175,6 +2222,7 @@ class Element(Plugin):
sandbox = platform.create_sandbox(context, project,
directory,
plugin=self,
stdout=stdout,
stderr=stderr,
config=config,
......
......@@ -122,8 +122,9 @@ class ComposeElement(Element):
snapshot = set(vbasedir.list_relative_paths())
vbasedir.mark_unmodified()
for dep in self.dependencies(Scope.BUILD):
dep.integrate(sandbox)
with sandbox.batch(0):
for dep in self.dependencies(Scope.BUILD):
dep.integrate(sandbox)
if require_split:
# Calculate added, modified and removed files
......
......@@ -124,6 +124,9 @@ class LocalSource(Source):
else:
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
def _get_local_path(self):
return self.fullpath
# Create a unique key for a file
def unique_key(filename):
......
......@@ -17,6 +17,6 @@
# Authors:
# Tristan Maat <tristan.maat@codethink.co.uk>
from .sandbox import Sandbox, SandboxFlags
from .sandbox import Sandbox, SandboxFlags, SandboxCommandError
from ._sandboxremote import SandboxRemote
from ._sandboxdummy import SandboxDummy
......@@ -58,22 +58,12 @@ class SandboxBwrap(Sandbox):
self.die_with_parent_available = kwargs['die_with_parent_available']
self.json_status_available = kwargs['json_status_available']
def run(self, command, flags, *, cwd=None, env=None):
def _run(self, command, flags, *, cwd, env):
stdout, stderr = self._get_output()
# Allowable access to underlying storage as we're part of the sandbox
root_directory = self.get_virtual_directory()._get_underlying_directory()
# Fallback to the sandbox default settings for
# the cwd and env.
#
cwd = self._get_work_directory(cwd=cwd)
env = self._get_environment(cwd=cwd, env=env)
# Convert single-string argument to a list
if isinstance(command, str):
command = [command]
if not self._has_command(command[0], env):
raise SandboxError("Staged artifacts do not provide command "
"'{}'".format(command[0]),
......
......@@ -49,17 +49,7 @@ class SandboxChroot(Sandbox):
self.mount_map = None
def run(self, command, flags, *, cwd=None, env=None):
# Fallback to the sandbox default settings for
# the cwd and env.
#
cwd = self._get_work_directory(cwd=cwd)
env = self._get_environment(cwd=cwd, env=env)
# Convert single-string argument to a list
if isinstance(command, str):
command = [command]
def _run(self, command, flags, *, cwd, env):
if not self._has_command(command[0], env):
raise SandboxError("Staged artifacts do not provide command "
......
......@@ -25,17 +25,7 @@ class SandboxDummy(Sandbox):
super().__init__(*args, **kwargs)
self._reason = kwargs.get("dummy_reason", "no reason given")
def run(self, command, flags, *, cwd=None, env=None):
# Fallback to the sandbox default settings for
# the cwd and env.
#
cwd = self._get_work_directory(cwd=cwd)
env = self._get_environment(cwd=cwd, env=env)
# Convert single-string argument to a list
if isinstance(command, str):
command = [command]
def _run(self, command, flags, *, cwd, env):
if not self._has_command(command[0], env):
raise SandboxError("Staged artifacts do not provide command "
......
......@@ -19,12 +19,14 @@
# Jim MacArthur <jim.macarthur@codethink.co.uk>
import os
import shlex
from urllib.parse import urlparse
from functools import partial
import grpc
from . import Sandbox
from . import Sandbox, SandboxCommandError
from .sandbox import _SandboxBatch
from ..storage._filebaseddirectory import FileBasedDirectory
from ..storage._casbaseddirectory import CasBasedDirectory
from .. import _signals
......@@ -212,7 +214,7 @@ class SandboxRemote(Sandbox):
new_dir = CasBasedDirectory(self._get_context().artifactcache.cas, ref=dir_digest)
self._set_virtual_directory(new_dir)
def run(self, command, flags, *, cwd=None, env=None):
def _run(self, command, flags, *, cwd, env):
# Upload sources
upload_vdir = self.get_virtual_directory()
......@@ -230,16 +232,6 @@ class SandboxRemote(Sandbox):
if not cascache.verify_digest_pushed(self._get_project(), upload_vdir.ref):
raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
# Fallback to the sandbox default settings for
# the cwd and env.
#
cwd = self._get_work_directory(cwd=cwd)
env = self._get_environment(cwd=cwd, env=env)
# We want command args as a list of strings
if isinstance(command, str):
command = [command]
# Now transmit the command to execute
operation = self.run_remote_command(command, upload_vdir.ref, cwd, env)
......@@ -275,3 +267,69 @@ class SandboxRemote(Sandbox):
self.process_job_output(action_result.output_directories, action_result.output_files)
return 0
def _create_batch(self, main_group, flags, *, collect=None):
return _SandboxRemoteBatch(self, main_group, flags, collect=collect)
# _SandboxRemoteBatch()
#
# Command batching by shell script generation.
#
class _SandboxRemoteBatch(_SandboxBatch):
def __init__(self, sandbox, main_group, flags, *, collect=None):
super().__init__(sandbox, main_group, flags, collect=collect)
self.script = None
self.first_command = None
self.cwd = None
self.env = None
def execute(self):
self.script = ""
self.main_group.execute(self)
first = self.first_command
if first and self.sandbox.run(['sh', '-c', '-e', self.script], self.flags, cwd=first.cwd, env=first.env) != 0:
raise SandboxCommandError("Command execution failed", collect=self.collect)
def execute_group(self, group):
group.execute_children(self)
def execute_command(self, command):
if self.first_command is None:
# First command in batch
# Initial working directory and environment of script already matches
# the command configuration.
self.first_command = command
else:
# Change working directory for this command
if command.cwd != self.cwd:
self.script += "mkdir -p {}\n".format(command.cwd)
self.script += "cd {}\n".format(command.cwd)
# Update environment for this command
for key in self.env.keys():
if key not in command.env:
self.script += "unset {}\n".format(key)
for key, value in command.env.items():
if key not in self.env or self.env[key] != value:
self.script += "export {}={}\n".format(key, shlex.quote(value))
# Keep track of current working directory and environment
self.cwd = command.cwd
self.env = command.env
# Actual command execution
cmdline = ' '.join(shlex.quote(cmd) for cmd in command.command)
self.script += "(set -ex; {})".format(cmdline)
# Error handling
label = command.label or cmdline
quoted_label = shlex.quote("'{}'".format(label))
self.script += " || (echo Command {} failed with exitcode $? >&2 ; exit 1)\n".format(quoted_label)
def execute_call(self, call):
raise SandboxError("SandboxRemote does not support callbacks in command batches")
#
# Copyright (C) 2017 Codethink Limited
# Copyright (C) 2018 Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
......@@ -29,7 +30,12 @@ See also: :ref:`sandboxing`.
"""
import os
from .._exceptions import ImplError, BstError
import shlex
import contextlib
from contextlib import contextmanager
from .._exceptions import ImplError, BstError, SandboxError
from .._message import Message, MessageType
from ..storage._filebaseddirectory import FileBasedDirectory
from ..storage._casbaseddirectory import CasBasedDirectory
......@@ -38,6 +44,10 @@ class SandboxFlags():
"""Flags indicating how the sandbox should be run.
"""
NONE = 0
"""Use default sandbox configuration.
"""
ROOT_READ_ONLY = 0x01
"""The root filesystem is read only.
......@@ -71,6 +81,19 @@ class SandboxFlags():
"""
class SandboxCommandError(SandboxError):
"""Raised by :class:`.Sandbox` implementations when a command fails.
Args:
message (str): The error message to report to the user
collect (str): An optional directory containing partial install contents
"""
def __init__(self, message, *, collect=None):
super().__init__(message, reason='command-failed')
self.collect = collect
class Sandbox():
"""Sandbox()
......@@ -94,6 +117,13 @@ class Sandbox():
self.__mount_sources = {}
self.__allow_real_directory = kwargs['allow_real_directory']
# Plugin ID for logging
plugin = kwargs.get('plugin', None)
if plugin:
self.__plugin_id = plugin._get_unique_id()
else:
self.__plugin_id = None
# Configuration from kwargs common to all subclasses
self.__config = kwargs['config']
self.__stdout = kwargs['stdout']
......@@ -121,6 +151,9 @@ class Sandbox():
# directory via get_directory.
self._never_cache_vdirs = False
# Pending command batch
self.__batch = None
def get_directory(self):
"""Fetches the sandbox root directory
......@@ -209,9 +242,16 @@ class Sandbox():
'artifact': artifact
})
def run(self, command, flags, *, cwd=None, env=None):
def run(self, command, flags, *, cwd=None, env=None, label=None):
"""Run a command in the sandbox.
If this is called outside a batch context, the command is immediately
executed.
If this is called in a batch context, the command is added to the batch
for later execution. If the command fails, later commands will not be
executed. Command flags must match batch flags.
Args:
command (list): The command to run in the sandboxed environment, as a list
of strings starting with the binary to run.
......@@ -219,9 +259,10 @@ class Sandbox():
cwd (str): The sandbox relative working directory in which to run the command.
env (dict): A dictionary of string key, value pairs to set as environment
variables inside the sandbox environment.
label (str): An optional label for the command, used for logging. (*Since: 1.4*)
Returns:
(int): The program exit code.
(int|None): The program exit code, or None if running in batch context.
Raises:
(:class:`.ProgramNotFoundError`): If a host tool which the given sandbox
......@@ -234,9 +275,115 @@ class Sandbox():
function must make sure the directory will be created if it does
not exist yet, even if a workspace is being used.
"""
raise ImplError("Sandbox of type '{}' does not implement run()"
# Fallback to the sandbox default settings for
# the cwd and env.
#
cwd = self._get_work_directory(cwd=cwd)
env = self._get_environment(cwd=cwd, env=env)
# Convert single-string argument to a list
if isinstance(command, str):
command = [command]
if self.__batch:
if flags != self.__batch.flags:
raise SandboxError("Inconsistent sandbox flags in single command batch")
batch_command = _SandboxBatchCommand(command, cwd=cwd, env=env, label=label)
current_group = self.__batch.current_group
current_group.append(batch_command)
return None
else:
return self._run(command, flags, cwd=cwd, env=env)
@contextmanager
def batch(self, flags, *, label=None, collect=None):
"""Context manager for command batching
This provides a batch context that defers execution of commands until
the end of the context. If a command fails, the batch will be aborted
and subsequent commands will not be executed.
Command batches may be nested. Execution will start only when the top
level batch context ends.
Args:
flags (:class:`.SandboxFlags`): The flags for this command batch.
label (str): An optional label for the batch group, used for logging.
collect (str): An optional directory containing partial install contents
on command failure.
Raises:
(:class:`.SandboxCommandError`): If a command fails.
*Since: 1.4*
"""
group = _SandboxBatchGroup(label=label)
if self.__batch:
# Nested batch
if flags != self.__batch.flags:
raise SandboxError("Inconsistent sandbox flags in single command batch")
parent_group = self.__batch.current_group
parent_group.append(group)
self.__batch.current_group = group
try:
yield
finally:
self.__batch.current_group = parent_group
else:
# Top-level batch
batch = self._create_batch(group, flags, collect=collect)
self.__batch = batch
try:
yield
finally:
self.__batch = None
batch.execute()
#####################################################
# Abstract Methods for Sandbox implementations #
#####################################################
# _run()
#
# Abstract method for running a single command
#
# Args:
# command (list): The command to run in the sandboxed environment, as a list
# of strings starting with the binary to run.
# flags (:class:`.SandboxFlags`): The flags for running this command.
# cwd (str): The sandbox relative working directory in which to run the command.
# env (dict): A dictionary of string key, value pairs to set as environment
# variables inside the sandbox environment.
#
# Returns:
# (int): The program exit code.
#
def _run(self, command, flags, *, cwd, env):
raise ImplError("Sandbox of type '{}' does not implement _run()"
.format(type(self).__name__))
# _create_batch()
#
# Abstract method for creating a batch object. Subclasses can override
# this method to instantiate a subclass of _SandboxBatch.
#
# Args:
# main_group (:class:`_SandboxBatchGroup`): The top level batch group.
# flags (:class:`.SandboxFlags`): The flags for commands in this batch.
# collect (str): An optional directory containing partial install contents
# on command failure.
#
def _create_batch(self, main_group, flags, *, collect=None):
return _SandboxBatch(self, main_group, flags, collect=collect)
################################################
# Private methods #
################################################
......@@ -385,3 +532,138 @@ class Sandbox():
return True
return False
# _get_plugin_id()
#
# Get the plugin's unique identifier
#
def _get_plugin_id(self):
return self.__plugin_id
# _callback()
#
# If this is called outside a batch context, the specified function is
# invoked immediately.
#
# If this is called in a batch context, the function is added to the batch
# for later invocation.
#
# Args:
# callback (callable): The function to invoke
#
def _callback(self, callback):
if self.__batch:
batch_call = _SandboxBatchCall(callback)
current_group = self.__batch.current_group
current_group.append(batch_call)
else:
callback()
# _SandboxBatch()
#
# A batch of sandbox commands.
#
class _SandboxBatch():
def __init__(self, sandbox, main_group, flags, *, collect=None):
self.sandbox = sandbox
self.main_group = main_group
self.current_group = main_group
self.flags = flags
self.collect = collect
def execute(self):
self.main_group.execute(self)
def execute_group(self, group):
if group.label:
context = self.sandbox._get_context()
cm = context.timed_activity(group.label, unique_id=self.sandbox._get_plugin_id())
else:
cm = contextlib.suppress()
with cm:
group.execute_children(self)
def execute_command(self, command):
if command.label:
context = self.sandbox._get_context()
message = Message(self.sandbox._get_plugin_id(), MessageType.STATUS,
'Running {}'.format(command.label))
context.message(message)
exitcode = self.sandbox._run(command.command, self.flags, cwd=command.cwd, env=command.env)
if exitcode != 0:
cmdline = ' '.join(shlex.quote(cmd) for cmd in command.command)
label = command.label or cmdline
raise SandboxCommandError("Command '{}' failed with exitcode {}".format(label, exitcode),
collect=self.collect)
def execute_call(self, call):
call.callback()
# _SandboxBatchItem()
#
# An item in a command batch.
#
class _SandboxBatchItem():
def __init__(self, *, label=None):
self.label = label
# _SandboxBatchCommand()
#
# A command item in a command batch.
#
class _SandboxBatchCommand(_SandboxBatchItem):
def __init__(self, command, *, cwd, env, label=None):
super().__init__(label=label)
self.command = command
self.cwd = cwd
self.env = env
def execute(self, batch):
batch.execute_command(self)
# _SandboxBatchGroup()
#
# A group in a command batch.
#
class _SandboxBatchGroup(_SandboxBatchItem):
def __init__(self, *, label=None):
super().__init__(label=label)
self.children = []
def append(self, item):
self.children.append(item)
def execute(self, batch):
batch.execute_group(self)
def execute_children(self, batch):
for item in self.children:
item.execute(batch)
# _SandboxBatchCall()
#
# A call item in a command batch.
#
class _SandboxBatchCall(_SandboxBatchItem):
def __init__(self, callback):
super().__init__()
self.callback = callback
def execute(self, batch):
batch.execute_call(self)
......@@ -226,10 +226,11 @@ class ScriptElement(Element):
.format(build_dep.name), silent_nested=True):
build_dep.stage_dependency_artifacts(sandbox, Scope.RUN, path="/")
for build_dep in self.dependencies(Scope.BUILD, recurse=False):
with self.timed_activity("Integrating {}".format(build_dep.name), silent_nested=True):
for dep in build_dep.dependencies(Scope.RUN):
dep.integrate(sandbox)
with sandbox.batch(SandboxFlags.NONE):
for build_dep in self.dependencies(Scope.BUILD, recurse=False):
with self.timed_activity("Integrating {}".format(build_dep.name), silent_nested=True):
for dep in build_dep.dependencies(Scope.RUN):
dep.integrate(sandbox)
else:
# If layout, follow its rules.
for item in self.__layout:
......@@ -251,37 +252,40 @@ class ScriptElement(Element):
virtual_dstdir.descend(item['destination'].lstrip(os.sep).split(os.sep), create=True)
element.stage_dependency_artifacts(sandbox, Scope.RUN, path=item['destination'])
for item in self.__layout:
with sandbox.batch(SandboxFlags.NONE):
for item in self.__layout:
# Skip layout members which dont stage an element
if not item['element']:
continue
# Skip layout members which dont stage an element
if not item['element']:
continue
element = self.search(Scope.BUILD, item['element'])
element = self.search(Scope.BUILD, item['element'])
# Integration commands can only be run for elements staged to /
if item['destination'] == '/':
with self.timed_activity("Integrating {}".format(element.name),
silent_nested=True):
for dep in element.dependencies(Scope.RUN):
dep.integrate(sandbox)
# Integration commands can only be run for elements staged to /
if item['destination'] == '/':
with self.timed_activity("Integrating {}".format(element.name),
silent_nested=True):
for dep in element.dependencies(Scope.RUN):
dep.integrate(sandbox)
install_root_path_components = self.__install_root.lstrip(os.sep).split(os.sep)
sandbox.get_virtual_directory().descend(install_root_path_components, create=True)
def assemble(self, sandbox):
for groupname, commands in self.__commands.items():
with self.timed_activity("Running '{}'".format(groupname)):
for cmd in commands:
self.status("Running command", detail=cmd)
# Note the -e switch to 'sh' means to exit with an error
# if any untested command fails.
exitcode = sandbox.run(['sh', '-c', '-e', cmd + '\n'],
SandboxFlags.ROOT_READ_ONLY if self.__root_read_only else 0)
if exitcode != 0:
raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode),
collect=self.__install_root)
flags = SandboxFlags.NONE
if self.__root_read_only:
flags |= SandboxFlags.ROOT_READ_ONLY
with sandbox.batch(flags, collect=self.__install_root):
for groupname, commands in self.__commands.items():
with sandbox.batch(flags, label="Running '{}'".format(groupname)):
for cmd in commands:
# Note the -e switch to 'sh' means to exit with an error
# if any untested command fails.
sandbox.run(['sh', '-c', '-e', cmd + '\n'],
flags,
label=cmd)
# Return where the result can be collected from
return self.__install_root
......