Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (31)
Showing
with 517 additions and 96 deletions
......@@ -30,6 +30,10 @@ buildstream 1.3.1
specific. Recommendation if you are building in Linux is to use the
ones being used in freedesktop-sdk project, for example
o Running commands without elements specified will now attempt to use
the default targets defined in the project configuration.
If no default target is defined, all elements in the project will be used.
o All elements must now be suffixed with `.bst`
Attempting to use an element that does not have the `.bst` extension,
will result in a warning.
......
......@@ -32,7 +32,7 @@ from ._message import Message, MessageType
from ._profile import Topics, profile_start, profile_end
from ._artifactcache import ArtifactCache
from ._cas import CASCache
from ._workspaces import Workspaces, WorkspaceProjectCache, WORKSPACE_PROJECT_FILE
from ._workspaces import Workspaces, WorkspaceProjectCache
from .plugin import _plugin_lookup
from .sandbox import SandboxRemote
......@@ -657,20 +657,6 @@ class Context():
self._cascache = CASCache(self.artifactdir)
return self._cascache
# guess_element()
#
# Attempts to interpret which element the user intended to run commands on
#
# Returns:
# (str) The name of the element, or None if no element can be guessed
def guess_element(self):
workspace_project_dir, _ = utils._search_upward_for_files(self._directory, [WORKSPACE_PROJECT_FILE])
if workspace_project_dir:
workspace_project = self._workspace_project_cache.get(workspace_project_dir)
return workspace_project.get_default_element()
else:
return None
# _node_get_option_str()
#
......
......@@ -342,7 +342,15 @@ def init(app, project_name, format_version, element_path, force):
type=click.Path(readable=False))
@click.pass_obj
def build(app, elements, all_, track_, track_save, track_all, track_except, track_cross_junctions):
"""Build elements in a pipeline"""
"""Build elements in a pipeline
Specifying no elements will result in building the default targets
of the project. If no default targets are configured, all project
elements will be built.
When this command is executed from a workspace directory, the default
is to build the workspace element.
"""
if (track_except or track_cross_junctions) and not (track_ or track_all):
click.echo("ERROR: The --track-except and --track-cross-junctions options "
......@@ -353,10 +361,12 @@ def build(app, elements, all_, track_, track_save, track_all, track_except, trac
click.echo("WARNING: --track-save is deprecated, saving is now unconditional", err=True)
with app.initialized(session_name="Build"):
if not all_ and not elements:
guessed_target = app.context.guess_element()
if guessed_target:
elements = (guessed_target,)
ignore_junction_targets = False
if not elements:
elements = app.project.get_default_targets()
# Junction elements cannot be built, exclude them from default targets
ignore_junction_targets = True
if track_all:
track_ = elements
......@@ -365,6 +375,7 @@ def build(app, elements, all_, track_, track_save, track_all, track_except, trac
track_targets=track_,
track_except=track_except,
track_cross_junctions=track_cross_junctions,
ignore_junction_targets=ignore_junction_targets,
build_all=all_)
......@@ -390,6 +401,13 @@ def build(app, elements, all_, track_, track_save, track_all, track_except, trac
def show(app, elements, deps, except_, order, format_):
"""Show elements in the pipeline
Specifying no elements will result in showing the default targets
of the project. If no default targets are configured, all project
elements will be shown.
When this command is executed from a workspace directory, the default
is to show the workspace element.
By default this will show all of the dependencies of the
specified target element.
......@@ -436,9 +454,7 @@ def show(app, elements, deps, except_, order, format_):
"""
with app.initialized():
if not elements:
guessed_target = app.context.guess_element()
if guessed_target:
elements = (guessed_target,)
elements = app.project.get_default_targets()
dependencies = app.stream.load_selection(elements,
selection=deps,
......@@ -478,6 +494,9 @@ def show(app, elements, deps, except_, order, format_):
def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, command):
"""Run a command in the target element's sandbox environment
When this command is executed from a workspace directory, the default
is to shell into the workspace element.
This will stage a temporary sysroot for running the target
element, assuming it has already been built and all required
artifacts are in the local cache.
......@@ -511,7 +530,7 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, command)
with app.initialized():
if not element:
element = app.context.guess_element()
element = app.project.get_default_target()
if not element:
raise AppError('Missing argument "ELEMENT".')
......@@ -581,6 +600,13 @@ def source():
def source_fetch(app, elements, deps, track_, except_, track_cross_junctions):
"""Fetch sources required to build the pipeline
Specifying no elements will result in fetching the default targets
of the project. If no default targets are configured, all project
elements will be fetched.
When this command is executed from a workspace directory, the default
is to fetch the workspace element.
By default this will only try to fetch sources which are
required for the build plan of the specified target element,
omitting sources for any elements which are already built
......@@ -606,9 +632,7 @@ def source_fetch(app, elements, deps, track_, except_, track_cross_junctions):
with app.initialized(session_name="Fetch"):
if not elements:
guessed_target = app.context.guess_element()
if guessed_target:
elements = (guessed_target,)
elements = app.project.get_default_targets()
app.stream.fetch(elements,
selection=deps,
......@@ -636,6 +660,15 @@ def source_track(app, elements, deps, except_, cross_junctions):
"""Consults the specified tracking branches for new versions available
to build and updates the project with any newly available references.
Specifying no elements will result in tracking the default targets
of the project. If no default targets are configured, all project
elements will be tracked.
When this command is executed from a workspace directory, the default
is to track the workspace element.
If no default is declared, all elements in the project will be tracked
By default this will track just the specified element, but you can also
update a whole tree of dependencies in one go.
......@@ -647,9 +680,7 @@ def source_track(app, elements, deps, except_, cross_junctions):
"""
with app.initialized(session_name="Track"):
if not elements:
guessed_target = app.context.guess_element()
if guessed_target:
elements = (guessed_target,)
elements = app.project.get_default_targets()
# Substitute 'none' for 'redirect' so that element redirections
# will be done
......@@ -685,6 +716,9 @@ def source_track(app, elements, deps, except_, cross_junctions):
def source_checkout(app, element, location, force, deps, fetch_, except_,
tar, build_scripts):
"""Checkout sources of an element to the specified location
When this command is executed from a workspace directory, the default
is to checkout the sources of the workspace element.
"""
if not element and not location:
click.echo("ERROR: LOCATION is not specified", err=True)
......@@ -697,7 +731,7 @@ def source_checkout(app, element, location, force, deps, fetch_, except_,
with app.initialized():
if not element:
element = app.context.guess_element()
element = app.project.get_default_target()
if not element:
raise AppError('Missing argument "ELEMENT".')
......@@ -763,7 +797,7 @@ def workspace_close(app, remove_dir, all_, elements):
if not (all_ or elements):
# NOTE: I may need to revisit this when implementing multiple projects
# opening one workspace.
element = app.context.guess_element()
element = app.project.get_default_target()
if element:
elements = (element,)
else:
......@@ -824,7 +858,7 @@ def workspace_reset(app, soft, track_, all_, elements):
with app.initialized():
if not (all_ or elements):
element = app.context.guess_element()
element = app.project.get_default_target()
if element:
elements = (element,)
else:
......@@ -921,7 +955,11 @@ def artifact():
type=click.Path(readable=False))
@click.pass_obj
def artifact_checkout(app, force, deps, integrate, hardlinks, tar, directory, element):
"""Checkout contents of an artifact"""
"""Checkout contents of an artifact
When this command is executed from a workspace directory, the default
is to checkout the artifact of the workspace element.
"""
from ..element import Scope
if hardlinks and tar is not None:
......@@ -952,7 +990,7 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, directory, el
with app.initialized():
if not element:
element = app.context.guess_element()
element = app.project.get_default_target()
if not element:
raise AppError('Missing argument "ELEMENT".')
......@@ -980,6 +1018,13 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, directory, el
def artifact_pull(app, elements, deps, remote):
"""Pull a built artifact from the configured remote artifact cache.
Specifying no elements will result in pulling the default targets
of the project. If no default targets are configured, all project
elements will be pulled.
When this command is executed from a workspace directory, the default
is to pull the workspace element.
By default the artifact will be pulled one of the configured caches
if possible, following the usual priority order. If the `--remote` flag
is given, only the specified cache will be queried.
......@@ -992,12 +1037,15 @@ def artifact_pull(app, elements, deps, remote):
"""
with app.initialized(session_name="Pull"):
ignore_junction_targets = False
if not elements:
guessed_target = app.context.guess_element()
if guessed_target:
elements = (guessed_target,)
elements = app.project.get_default_targets()
# Junction elements cannot be pulled, exclude them from default targets
ignore_junction_targets = True
app.stream.pull(elements, selection=deps, remote=remote)
app.stream.pull(elements, selection=deps, remote=remote,
ignore_junction_targets=ignore_junction_targets)
##################################################################
......@@ -1015,6 +1063,13 @@ def artifact_pull(app, elements, deps, remote):
def artifact_push(app, elements, deps, remote):
"""Push a built artifact to a remote artifact cache.
Specifying no elements will result in pushing the default targets
of the project. If no default targets are configured, all project
elements will be pushed.
When this command is executed from a workspace directory, the default
is to push the workspace element.
The default destination is the highest priority configured cache. You can
override this by passing a different cache URL with the `--remote` flag.
......@@ -1029,12 +1084,15 @@ def artifact_push(app, elements, deps, remote):
all: All dependencies
"""
with app.initialized(session_name="Push"):
ignore_junction_targets = False
if not elements:
guessed_target = app.context.guess_element()
if guessed_target:
elements = (guessed_target,)
elements = app.project.get_default_targets()
# Junction elements cannot be pushed, exclude them from default targets
ignore_junction_targets = True
app.stream.push(elements, selection=deps, remote=remote)
app.stream.push(elements, selection=deps, remote=remote,
ignore_junction_targets=ignore_junction_targets)
################################################################
......
......@@ -175,29 +175,22 @@ class TypeName(Widget):
# A widget for displaying the Element name
class ElementName(Widget):
def __init__(self, context, content_profile, format_profile):
super(ElementName, self).__init__(context, content_profile, format_profile)
# Pre initialization format string, before we know the length of
# element names in the pipeline
self._fmt_string = '{: <30}'
def render(self, message):
action_name = message.action_name
element_id = message.task_id or message.unique_id
if element_id is None:
return ""
plugin = _plugin_lookup(element_id)
name = plugin._get_full_name()
if element_id is not None:
plugin = _plugin_lookup(element_id)
name = plugin._get_full_name()
name = '{: <30}'.format(name)
else:
name = 'core activity'
name = '{: <30}'.format(name)
# Sneak the action name in with the element name
action_name = message.action_name
if not action_name:
action_name = "Main"
return self.content_profile.fmt("{: >5}".format(action_name.lower())) + \
self.format_profile.fmt(':') + \
self.content_profile.fmt(self._fmt_string.format(name))
self.format_profile.fmt(':') + self.content_profile.fmt(name)
# A widget for displaying the primary message text
......@@ -219,9 +212,12 @@ class CacheKey(Widget):
def render(self, message):
element_id = message.task_id or message.unique_id
if element_id is None or not self._key_length:
if not self._key_length:
return ""
if element_id is None:
return ' ' * self._key_length
missing = False
key = ' ' * self._key_length
plugin = _plugin_lookup(element_id)
......
......@@ -104,6 +104,9 @@ class Project():
# Absolute path to where elements are loaded from within the project
self.element_path = None
# Default target elements
self._default_targets = None
# ProjectRefs for the main refs and also for junctions
self.refs = ProjectRefs(self.directory, 'project.refs')
self.junction_refs = ProjectRefs(self.directory, 'junction.refs')
......@@ -228,7 +231,7 @@ class Project():
'element-path', 'variables',
'environment', 'environment-nocache',
'split-rules', 'elements', 'plugins',
'aliases', 'name',
'aliases', 'name', 'defaults',
'artifacts', 'options',
'fail-on-overlap', 'shell', 'fatal-warnings',
'ref-storage', 'sandbox', 'mirrors', 'remote-execution',
......@@ -391,6 +394,44 @@ class Project():
# Reset the element loader state
Element._reset_load_state()
# get_default_target()
#
# Attempts to interpret which element the user intended to run a command on.
# This is for commands that only accept a single target element and thus,
# this only uses the workspace element (if invoked from workspace directory)
# and does not use the project default targets.
#
def get_default_target(self):
return self._invoked_from_workspace_element
# get_default_targets()
#
# Attempts to interpret which elements the user intended to run a command on.
# This is for commands that accept multiple target elements.
#
def get_default_targets(self):
# If _invoked_from_workspace_element has a value,
# a workspace element was found before a project config
# Therefore the workspace does not contain a project
if self._invoked_from_workspace_element:
return (self._invoked_from_workspace_element,)
# Default targets from project configuration
if self._default_targets:
return tuple(self._default_targets)
# If default targets are not configured, default to all project elements
default_targets = []
for root, _, files in os.walk(self.element_path):
for file in files:
if file.endswith(".bst"):
rel_dir = os.path.relpath(root, self.element_path)
rel_file = os.path.join(rel_dir, file).lstrip("./")
default_targets.append(rel_file)
return tuple(default_targets)
# _load():
#
# Loads the project configuration file in the project
......@@ -456,6 +497,10 @@ class Project():
self.config.options = OptionPool(self.element_path)
self.first_pass_config.options = OptionPool(self.element_path)
defaults = _yaml.node_get(pre_config_node, Mapping, 'defaults')
_yaml.node_validate(defaults, ['targets'])
self._default_targets = _yaml.node_get(defaults, list, "targets")
# Fatal warnings
self._fatal_warnings = _yaml.node_get(pre_config_node, list, 'fatal-warnings', default_value=[])
......
......@@ -163,4 +163,4 @@ class Resources():
def unregister_exclusive_interest(self, resources, source):
for resource in resources:
self._exclusive_resources[resource].remove(source)
self._exclusive_resources[resource].discard(source)
......@@ -40,8 +40,8 @@ class SchedStatus():
# Some action names for the internal jobs we launch
#
_ACTION_NAME_CLEANUP = 'cleanup'
_ACTION_NAME_CACHE_SIZE = 'cache_size'
_ACTION_NAME_CLEANUP = 'clean'
_ACTION_NAME_CACHE_SIZE = 'size'
# Scheduler()
......@@ -151,6 +151,9 @@ class Scheduler():
# Handle unix signals while running
self._connect_signals()
# Check if we need to start with some cache maintenance
self._check_cache_management()
# Run the queues
self._sched()
self.loop.run_forever()
......@@ -272,6 +275,31 @@ class Scheduler():
# Local Private Methods #
#######################################################
# _check_cache_management()
#
# Run an initial check if we need to lock the cache
# resource and check the size and possibly launch
# a cleanup.
#
# Sessions which do not add to the cache are not affected.
#
def _check_cache_management(self):
# Only trigger the check for a scheduler run which has
# queues which require the CACHE resource.
if not any(q for q in self.queues
if ResourceType.CACHE in q.resources):
return
# If the estimated size outgrows the quota, queue a job to
# actually check the real cache size initially, this one
# should have exclusive access to the cache to ensure nothing
# starts while we are checking the cache.
#
artifacts = self.context.artifactcache
if artifacts.has_quota_exceeded():
self._sched_cache_size_job(exclusive=True)
# _spawn_job()
#
# Spanws a job
......@@ -292,6 +320,11 @@ class Scheduler():
self._cache_size_running = None
self.resources.release([ResourceType.CACHE, ResourceType.PROCESS])
# Unregister the exclusive interest if there was any
self.resources.unregister_exclusive_interest(
[ResourceType.CACHE], 'cache-size'
)
# Schedule a cleanup job if we've hit the threshold
if status != JobStatus.OK:
return
......@@ -344,11 +377,35 @@ class Scheduler():
# Runs a cache size job if one is scheduled to run now and
# sufficient recources are available.
#
def _sched_cache_size_job(self):
# Args:
# exclusive (bool): Run a cache size job immediately and
# hold the ResourceType.CACHE resource
# exclusively (used at startup).
#
def _sched_cache_size_job(self, *, exclusive=False):
# The exclusive argument is not intended (or safe) for arbitrary use.
if exclusive:
assert not self._cache_size_scheduled
assert not self._cache_size_running
assert not self._active_jobs
self._cache_size_scheduled = True
if self._cache_size_scheduled and not self._cache_size_running:
if self.resources.reserve([ResourceType.CACHE, ResourceType.PROCESS]):
# Handle the exclusive launch
exclusive_resources = set()
if exclusive:
exclusive_resources.add(ResourceType.CACHE)
self.resources.register_exclusive_interest(
exclusive_resources, 'cache-size'
)
# Reserve the resources (with the possible exclusive cache resource)
if self.resources.reserve([ResourceType.CACHE, ResourceType.PROCESS],
exclusive_resources):
# Update state and launch
self._cache_size_scheduled = False
self._cache_size_running = \
CacheSizeJob(self, _ACTION_NAME_CACHE_SIZE,
......
......@@ -161,6 +161,7 @@ class Stream():
# track_targets (list of str): Specified targets for tracking
# track_except (list of str): Specified targets to except from tracking
# track_cross_junctions (bool): Whether tracking should cross junction boundaries
# ignore_junction_targets (bool): Whether junction targets should be filtered out
# build_all (bool): Whether to build all elements, or only those
# which are required to build the target.
#
......@@ -168,6 +169,7 @@ class Stream():
track_targets=None,
track_except=None,
track_cross_junctions=False,
ignore_junction_targets=False,
build_all=False):
if build_all:
......@@ -180,6 +182,7 @@ class Stream():
selection=selection, track_selection=PipelineSelection.ALL,
track_except_targets=track_except,
track_cross_junctions=track_cross_junctions,
ignore_junction_targets=ignore_junction_targets,
use_artifact_config=True,
fetch_subprojects=True,
dynamic_plan=True)
......@@ -291,6 +294,7 @@ class Stream():
# Args:
# targets (list of str): Targets to pull
# selection (PipelineSelection): The selection mode for the specified targets
# ignore_junction_targets (bool): Whether junction targets should be filtered out
# remote (str): The URL of a specific remote server to pull from, or None
#
# If `remote` specified as None, then regular configuration will be used
......@@ -298,6 +302,7 @@ class Stream():
#
def pull(self, targets, *,
selection=PipelineSelection.NONE,
ignore_junction_targets=False,
remote=None):
use_config = True
......@@ -306,6 +311,7 @@ class Stream():
elements, _ = self._load(targets, (),
selection=selection,
ignore_junction_targets=ignore_junction_targets,
use_artifact_config=use_config,
artifact_remote_url=remote,
fetch_subprojects=True)
......@@ -325,6 +331,7 @@ class Stream():
# Args:
# targets (list of str): Targets to push
# selection (PipelineSelection): The selection mode for the specified targets
# ignore_junction_targets (bool): Whether junction targets should be filtered out
# remote (str): The URL of a specific remote server to push to, or None
#
# If `remote` specified as None, then regular configuration will be used
......@@ -336,6 +343,7 @@ class Stream():
#
def push(self, targets, *,
selection=PipelineSelection.NONE,
ignore_junction_targets=False,
remote=None):
use_config = True
......@@ -344,6 +352,7 @@ class Stream():
elements, _ = self._load(targets, (),
selection=selection,
ignore_junction_targets=ignore_junction_targets,
use_artifact_config=use_config,
artifact_remote_url=remote,
fetch_subprojects=True)
......@@ -851,6 +860,7 @@ class Stream():
# except_targets (list of str): Specified targets to except from fetching
# track_except_targets (list of str): Specified targets to except from fetching
# track_cross_junctions (bool): Whether tracking should cross junction boundaries
# ignore_junction_targets (bool): Whether junction targets should be filtered out
# use_artifact_config (bool): Whether to initialize artifacts with the config
# artifact_remote_url (bool): A remote url for initializing the artifacts
# fetch_subprojects (bool): Whether to fetch subprojects while loading
......@@ -865,6 +875,7 @@ class Stream():
except_targets=(),
track_except_targets=(),
track_cross_junctions=False,
ignore_junction_targets=False,
use_artifact_config=False,
artifact_remote_url=None,
fetch_subprojects=False,
......@@ -881,6 +892,10 @@ class Stream():
rewritable=rewritable,
fetch_subprojects=fetch_subprojects)
# Optionally filter out junction elements
if ignore_junction_targets:
elements = [e for e in elements if e.get_kind() != 'junction']
# Hold on to the targets
self.targets = elements
......
......@@ -23,7 +23,7 @@
# This version is bumped whenever enhancements are made
# to the `project.conf` format or the core element format.
#
BST_FORMAT_VERSION = 20
BST_FORMAT_VERSION = 21
# The base BuildStream artifact version
......
......@@ -167,3 +167,11 @@ shell:
# Command to run when `bst shell` does not provide a command
#
command: [ 'sh', '-i' ]
# Defaults for bst commands
#
defaults:
# Set default target elements to use when none are passed on the command line.
# If none are configured in the project, default to all project elements.
targets: []
......@@ -945,6 +945,44 @@ Host side environment variable expansion is also supported:
- '${XDG_RUNTIME_DIR}/pulse/native'
.. _project_default_targets:
Default targets
---------------
When running BuildStream commands from a project directory or subdirectory
without specifying any target elements on the command line, the default targets
of the project will be used. The default targets can be configured in the
``defaults`` section as follows:
.. code:: yaml
defaults:
# List of default target elements
targets:
- app.bst
If no default targets are configured in ``project.conf``, BuildStream commands
will default to all ``.bst`` files in the configured element path.
Commands that cannot support junctions as target elements (``bst build``,
``bst artifact push``, and ``bst artifact pull``) ignore junctions in the list
of default targets.
When running BuildStream commands from a workspace directory (that is not a
BuildStream project directory), project default targets are not used and the
workspace element will be used as the default target instead.
``bst artifact checkout``, ``bst source checkout``, and ``bst shell`` are
currently limited to a single target element and due to this, they currently
do not use project default targets. However, they still use the workspace
element as default target when run from a workspace directory.
.. note::
The ``targets`` configuration is available since :ref:`format version 21 <project_format_version>`
.. _project_builtin_defaults:
Builtin defaults
......
......@@ -18,6 +18,7 @@
#
import os
import re
from unittest import mock
import pytest
......@@ -66,8 +67,9 @@ def test_artifact_expires(cli, datafiles, tmpdir):
res.assert_success()
# Check that the correct element remains in the cache
assert cli.get_element_state(project, 'target.bst') != 'cached'
assert cli.get_element_state(project, 'target2.bst') == 'cached'
states = cli.get_element_states(project, ['target.bst', 'target2.bst'])
assert states['target.bst'] != 'cached'
assert states['target2.bst'] == 'cached'
# Ensure that we don't end up deleting the whole cache (or worse) if
......@@ -144,9 +146,11 @@ def test_expiry_order(cli, datafiles, tmpdir):
# have been removed.
# Note that buildstream will reduce the cache to 50% of the
# original size - we therefore remove multiple elements.
assert (tuple(cli.get_element_state(project, element) for element in
('unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst')) ==
check_elements = [
'unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst'
]
states = cli.get_element_states(project, check_elements)
assert (tuple(states[element] for element in check_elements) ==
('buildable', 'buildable', 'buildable', 'cached', 'cached', ))
......@@ -176,8 +180,9 @@ def test_keep_dependencies(cli, datafiles, tmpdir):
res.assert_success()
# Check that the correct element remains in the cache
assert cli.get_element_state(project, 'dependency.bst') == 'cached'
assert cli.get_element_state(project, 'unrelated.bst') == 'cached'
states = cli.get_element_states(project, ['dependency.bst', 'unrelated.bst'])
assert states['dependency.bst'] == 'cached'
assert states['unrelated.bst'] == 'cached'
# We try to build an element which depends on the LRU artifact,
# and could therefore fail if we didn't make sure dependencies
......@@ -192,9 +197,10 @@ def test_keep_dependencies(cli, datafiles, tmpdir):
res = cli.run(project=project, args=['build', 'target.bst'])
res.assert_success()
assert cli.get_element_state(project, 'unrelated.bst') != 'cached'
assert cli.get_element_state(project, 'dependency.bst') == 'cached'
assert cli.get_element_state(project, 'target.bst') == 'cached'
states = cli.get_element_states(project, ['target.bst', 'unrelated.bst'])
assert states['target.bst'] == 'cached'
assert states['dependency.bst'] == 'cached'
assert states['unrelated.bst'] != 'cached'
# Assert that we never delete a dependency required for a build tree
......@@ -239,11 +245,11 @@ def test_never_delete_required(cli, datafiles, tmpdir):
# life there may potentially be N-builders cached artifacts
# which exceed the quota
#
assert cli.get_element_state(project, 'dep1.bst') == 'cached'
assert cli.get_element_state(project, 'dep2.bst') == 'cached'
assert cli.get_element_state(project, 'dep3.bst') != 'cached'
assert cli.get_element_state(project, 'target.bst') != 'cached'
states = cli.get_element_states(project, ['target.bst'])
assert states['dep1.bst'] == 'cached'
assert states['dep2.bst'] == 'cached'
assert states['dep3.bst'] != 'cached'
assert states['target.bst'] != 'cached'
# Assert that we never delete a dependency required for a build tree,
......@@ -275,10 +281,11 @@ def test_never_delete_required_track(cli, datafiles, tmpdir):
res.assert_success()
# They should all be cached
assert cli.get_element_state(project, 'dep1.bst') == 'cached'
assert cli.get_element_state(project, 'dep2.bst') == 'cached'
assert cli.get_element_state(project, 'dep3.bst') == 'cached'
assert cli.get_element_state(project, 'target.bst') == 'cached'
states = cli.get_element_states(project, ['target.bst'])
assert states['dep1.bst'] == 'cached'
assert states['dep2.bst'] == 'cached'
assert states['dep3.bst'] == 'cached'
assert states['target.bst'] == 'cached'
# Now increase the size of all the elements
#
......@@ -296,10 +303,11 @@ def test_never_delete_required_track(cli, datafiles, tmpdir):
# Expect the same result that we did in test_never_delete_required()
#
assert cli.get_element_state(project, 'dep1.bst') == 'cached'
assert cli.get_element_state(project, 'dep2.bst') == 'cached'
assert cli.get_element_state(project, 'dep3.bst') != 'cached'
assert cli.get_element_state(project, 'target.bst') != 'cached'
states = cli.get_element_states(project, ['target.bst'])
assert states['dep1.bst'] == 'cached'
assert states['dep2.bst'] == 'cached'
assert states['dep3.bst'] != 'cached'
assert states['target.bst'] != 'cached'
# Ensure that only valid cache quotas make it through the loading
......@@ -418,3 +426,66 @@ def test_extract_expiry(cli, datafiles, tmpdir):
assert os.path.isdir(refsdirtarget2)
assert not os.path.exists(refsdirtarget)
# Ensures that when launching BuildStream with a full artifact cache,
# the cache size and cleanup jobs are run before any other jobs.
#
@pytest.mark.datafiles(DATA_DIR)
def test_cleanup_first(cli, datafiles, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
element_path = 'elements'
cache_location = os.path.join(project, 'cache', 'artifacts', 'ostree')
checkout = os.path.join(project, 'checkout')
cli.configure({
'cache': {
'quota': 10000000,
}
})
# Create an element that uses almost the entire cache (an empty
# ostree cache starts at about ~10KiB, so we need a bit of a
# buffer)
create_element_size('target.bst', project, element_path, [], 8000000)
res = cli.run(project=project, args=['build', 'target.bst'])
res.assert_success()
assert cli.get_element_state(project, 'target.bst') == 'cached'
# Now configure with a smaller quota, create a situation
# where the cache must be cleaned up before building anything else.
#
# Fix the fetchers and builders just to ensure a predictable
# sequence of events (although it does not effect this test)
cli.configure({
'cache': {
'quota': 5000000,
},
'scheduler': {
'fetchers': 1,
'builders': 1
}
})
# Our cache is now more than full, BuildStream
create_element_size('target2.bst', project, element_path, [], 4000000)
res = cli.run(project=project, args=['build', 'target2.bst'])
res.assert_success()
# Find all of the activity (like push, pull, fetch) lines
results = re.findall(r'\[.*\]\[.*\]\[\s*(\S+):.*\]\s*START\s*.*\.log', res.stderr)
# Don't bother checking the order of 'fetch', it is allowed to start
# before or after the initial cache size job, runs in parallel, and does
# not require ResourceType.CACHE.
results.remove('fetch')
print(results)
# Assert the expected sequence of events
assert results == ['size', 'clean', 'build']
# Check that the correct element remains in the cache
states = cli.get_element_states(project, ['target.bst', 'target2.bst'])
assert states['target.bst'] != 'cached'
assert states['target2.bst'] == 'cached'
......@@ -389,8 +389,9 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
_yaml.dump(filter2_config, filter2_file)
# Assert that a fetch is needed
assert cli.get_element_state(project, input_name) == 'no reference'
assert cli.get_element_state(project, input2_name) == 'no reference'
states = cli.get_element_states(project, [input_name, input2_name])
assert states[input_name] == 'no reference'
assert states[input2_name] == 'no reference'
# Now try to track it
result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
......@@ -450,8 +451,9 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
_yaml.dump(filter2_config, filter2_file)
# Assert that a fetch is needed
assert cli.get_element_state(project, input_name) == 'no reference'
assert cli.get_element_state(project, input2_name) == 'no reference'
states = cli.get_element_states(project, [input_name, input2_name])
assert states[input_name] == 'no reference'
assert states[input2_name] == 'no reference'
# Now try to track it
result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst", "--except", input_name])
......
......@@ -2,6 +2,7 @@ import os
import tarfile
import hashlib
import pytest
import subprocess
from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
from tests.testutils.site import IS_WINDOWS
......@@ -61,6 +62,35 @@ def test_build_checkout(datafiles, cli, strict, hardlinks):
assert os.path.exists(filename)
@pytest.mark.datafiles(DATA_DIR + "_world")
def test_build_default_all(datafiles, cli):
project = os.path.join(datafiles.dirname, datafiles.basename)
result = cli.run(project=project, silent=True, args=['build'])
result.assert_success()
target_dir = os.path.join(cli.directory, DATA_DIR + "_world", "elements")
output_dir = os.path.join(cli.directory, "logs", "test")
expected = subprocess.Popen(('ls', target_dir), stdout=subprocess.PIPE)
expected = subprocess.check_output(("wc", "-w"), stdin=expected.stdout)
results = subprocess.Popen(('ls', output_dir), stdout=subprocess.PIPE)
results = subprocess.check_output(("wc", "-w"), stdin=results.stdout)
assert results == expected
@pytest.mark.datafiles(DATA_DIR + "_default")
def test_build_default(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
result = cli.run(project=project, silent=True, args=['build'])
result.assert_success()
results = cli.get_element_state(project, "target2.bst")
expected = "cached"
assert results == expected
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("strict,hardlinks", [
("non-strict", "hardlinks"),
......@@ -550,6 +580,53 @@ def test_build_checkout_junction(cli, tmpdir, datafiles):
assert contents == 'animal=Pony\n'
# Test that default targets work with projects with junctions
@pytest.mark.datafiles(DATA_DIR + "_world")
def test_build_checkout_junction_default_targets(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
subproject_path = os.path.join(project, 'files', 'sub-project')
junction_path = os.path.join(project, 'elements', 'junction.bst')
element_path = os.path.join(project, 'elements', 'junction-dep.bst')
checkout = os.path.join(cli.directory, 'checkout')
# Create a repo to hold the subproject and generate a junction element for it
ref = generate_junction(tmpdir, subproject_path, junction_path)
# Create a stack element to depend on a cross junction element
#
element = {
'kind': 'stack',
'depends': [
{
'junction': 'junction.bst',
'filename': 'import-etc.bst'
}
]
}
_yaml.dump(element, element_path)
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
result = cli.run(project=project, args=['build'])
result.assert_success()
# Assert that it's cached now
assert cli.get_element_state(project, 'junction-dep.bst') == 'cached'
# Now check it out
result = cli.run(project=project, args=[
'artifact', 'checkout', 'junction-dep.bst', '--directory', checkout
])
result.assert_success()
# Assert the content of /etc/animal.conf
filename = os.path.join(checkout, 'etc', 'animal.conf')
assert os.path.exists(filename)
with open(filename, 'r') as f:
contents = f.read()
assert contents == 'animal=Pony\n'
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
......
......@@ -49,6 +49,41 @@ def test_fetch(cli, tmpdir, datafiles, kind):
assert cli.get_element_state(project, element_name) == 'buildable'
@pytest.mark.datafiles(os.path.join(TOP_DIR, 'project_world'))
def test_fetch_default_targets(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
element_path = os.path.join(project, 'elements')
element_name = 'fetch-test.bst'
# Create our repo object of the given source type with
# the bin files, and then collect the initial ref.
#
repo = create_repo('git', str(tmpdir))
ref = repo.create(project)
# Write out our test target
element = {
'kind': 'import',
'sources': [
repo.source_config(ref=ref)
]
}
_yaml.dump(element,
os.path.join(element_path,
element_name))
# Assert that a fetch is needed
assert cli.get_element_state(project, element_name) == 'fetch needed'
# Now try to fetch it, using the default target feature
result = cli.run(project=project, args=['source', 'fetch'])
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
assert cli.get_element_state(project, element_name) == 'buildable'
@pytest.mark.datafiles(os.path.join(TOP_DIR, 'consistencyerror'))
def test_fetch_consistency_error(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
......
......@@ -41,7 +41,7 @@ def test_default_logging(cli, tmpdir, datafiles):
result = cli.run(project=project, args=['source', 'fetch', element_name])
result.assert_success()
m = re.search(r"\[\d\d:\d\d:\d\d\]\[\]\[\] SUCCESS Checking sources", result.stderr)
m = re.search(r"\[\d\d:\d\d:\d\d\]\[\s*\]\[.*\] SUCCESS Checking sources", result.stderr)
assert(m is not None)
......@@ -77,7 +77,7 @@ def test_custom_logging(cli, tmpdir, datafiles):
result = cli.run(project=project, args=['source', 'fetch', element_name])
result.assert_success()
m = re.search(r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,,,SUCCESS,Checking sources", result.stderr)
m = re.search(r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\s*,.*,SUCCESS,Checking sources", result.stderr)
assert(m is not None)
......
kind: stack
description: |
Main stack target for the bst build test
kind: stack
description: |
Main stack target for the bst build test
# Project config for frontend build test
name: test
element-path: elements
fatal-warnings:
- bad-element-suffix
defaults:
targets:
- target2.bst
kind: compose
depends:
- fileNAME: import-dev.bst
type: build
config:
# Dont try running the sandbox, we dont have a
# runtime to run anything in this context.
integrate: False