Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (83)
Showing
with 311 additions and 180 deletions
image: buildstream/testsuite-debian:9-master-102-9067e269
image: buildstream/testsuite-debian:9-master-112-a9f63c5e
cache:
key: "$CI_JOB_NAME-"
......@@ -79,18 +79,25 @@ source_dist:
- coverage-linux/
tests-debian-9:
image: buildstream/testsuite-debian:9-master-102-9067e269
image: buildstream/testsuite-debian:9-master-112-a9f63c5e
<<: *linux-tests
tests-fedora-27:
image: buildstream/testsuite-fedora:27-master-102-9067e269
image: buildstream/testsuite-fedora:27-master-112-a9f63c5e
<<: *linux-tests
tests-fedora-28:
image: buildstream/testsuite-fedora:28-master-112-a9f63c5e
<<: *linux-tests
tests-ubuntu-18.04:
image: buildstream/testsuite-ubuntu:18.04-master-112-a9f63c5e
<<: *linux-tests
tests-unix:
# Use fedora here, to a) run a test on fedora and b) ensure that we
# can get rid of ostree - this is not possible with debian-8
image: buildstream/testsuite-fedora:27-master-102-9067e269
image: buildstream/testsuite-fedora:27-master-112-a9f63c5e
stage: test
variables:
BST_FORCE_BACKEND: "unix"
......@@ -211,6 +218,7 @@ coverage:
dependencies:
- tests-debian-9
- tests-fedora-27
- tests-fedora-28
- tests-unix
- source_dist
......
......@@ -21,3 +21,6 @@ recursive-include tests *.expected
# Protocol Buffers
recursive-include buildstream/_protos *.proto
# Requirements files
dev-requirements.txt
=================
buildstream 1.3.1
=================
o Failed builds are included in the cache as well.
`bst checkout` will provide anything in `%{install-root}`.
A build including cached fails will cause any dependant elements
to not be scheduled and fail during artifact assembly,
and display the retry prompt during an interactive session.
o Due to enabling the use of relative workspaces, "Legacy" workspaces
may need to be closed and remade before the changes will affect them.
Downgrading after using this feature may result in workspaces
not functioning correctly
o Elements may now specify 'build-depends' and 'runtime-depends' fields
to avoid having to specify the dependency type for every entry in
'depends'.
o Source plugins may now request access access to previous during track and
fetch by setting `BST_REQUIRES_PREVIOUS_SOURCES_TRACK` and/or
`BST_REQUIRES_PREVIOUS_SOURCES_FETCH` attributes.
o Add new `pip` source plugin for downloading python packages using pip,
based on requirements files from previous sources.
=================
buildstream 1.1.5
=================
......@@ -11,11 +38,7 @@ buildstream 1.1.5
o Added new `remote` source plugin for downloading file blobs
o Failed builds are included in the cache as well.
`bst checkout` will provide anything in `%{install-root}`.
A build including cached fails will cause any dependant elements
to not be scheduled and fail during artifact assembly,
and display the retry prompt during an interactive session.
o Add support for the new include '(@)' directive in project.conf and .bst files
=================
......
......@@ -30,6 +30,7 @@ if "_BST_COMPLETION" not in os.environ:
from .sandbox import Sandbox, SandboxFlags
from .plugin import Plugin
from .source import Source, SourceError, Consistency, SourceFetcher
from .element import Element, ElementError, Scope
from .element import Element, ElementError
from .element_enums import Scope
from .buildelement import BuildElement
from .scriptelement import ScriptElement
......@@ -21,7 +21,7 @@ import os
import string
from collections import Mapping, namedtuple
from ..element import _KeyStrength
from ..element_enums import _KeyStrength
from .._exceptions import ArtifactError, ImplError, LoadError, LoadErrorReason
from .._message import Message, MessageType
from .. import utils
......
......@@ -24,6 +24,8 @@ import os
import signal
import stat
import tempfile
import uuid
import errno
from urllib.parse import urlparse
import grpc
......@@ -81,7 +83,8 @@ class CASCache(ArtifactCache):
tree = self.resolve_ref(ref, update_mtime=True)
dest = os.path.join(self.extractdir, element._get_project().name, element.normal_name, tree.hash)
dest = os.path.join(self.extractdir, element._get_project().name,
element.normal_name, tree.hash)
if os.path.isdir(dest):
# artifact has already been extracted
return dest
......@@ -99,7 +102,7 @@ class CASCache(ArtifactCache):
#
# If rename fails with these errors, another process beat
# us to it so just ignore.
if e.errno not in [os.errno.ENOTEMPTY, os.errno.EEXIST]:
if e.errno not in [errno.ENOTEMPTY, errno.EEXIST]:
raise ArtifactError("Failed to extract artifact for ref '{}': {}"
.format(ref, e)) from e
......@@ -309,8 +312,11 @@ class CASCache(ArtifactCache):
# Upload any blobs missing on the server
skipped_remote = False
for digest in missing_blobs.values():
uuid_ = uuid.uuid4()
resource_name = '/'.join(['uploads', str(uuid_), 'blobs',
digest.hash, str(digest.size_bytes)])
def request_stream():
resource_name = os.path.join(digest.hash, str(digest.size_bytes))
with open(self.objpath(digest), 'rb') as f:
assert os.fstat(f.fileno()).st_size == digest.size_bytes
offset = 0
......@@ -747,7 +753,7 @@ class CASCache(ArtifactCache):
yield from self._required_blobs(dirnode.digest)
def _fetch_blob(self, remote, digest, out):
resource_name = os.path.join(digest.hash, str(digest.size_bytes))
resource_name = '/'.join(['blobs', digest.hash, str(digest.size_bytes)])
request = bytestream_pb2.ReadRequest()
request.resource_name = resource_name
request.read_offset = 0
......
......@@ -23,6 +23,7 @@ import os
import signal
import sys
import tempfile
import uuid
import click
import grpc
......@@ -130,12 +131,21 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
def Read(self, request, context):
resource_name = request.resource_name
client_digest = _digest_from_resource_name(resource_name)
assert request.read_offset <= client_digest.size_bytes
client_digest = _digest_from_download_resource_name(resource_name)
if client_digest is None:
context.set_code(grpc.StatusCode.NOT_FOUND)
return
if request.read_offset > client_digest.size_bytes:
context.set_code(grpc.StatusCode.OUT_OF_RANGE)
return
try:
with open(self.cas.objpath(client_digest), 'rb') as f:
assert os.fstat(f.fileno()).st_size == client_digest.size_bytes
if os.fstat(f.fileno()).st_size != client_digest.size_bytes:
context.set_code(grpc.StatusCode.NOT_FOUND)
return
if request.read_offset > 0:
f.seek(request.read_offset)
......@@ -163,12 +173,18 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
resource_name = None
with tempfile.NamedTemporaryFile(dir=self.cas.tmpdir) as out:
for request in request_iterator:
assert not finished
assert request.write_offset == offset
if finished or request.write_offset != offset:
context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
return response
if resource_name is None:
# First request
resource_name = request.resource_name
client_digest = _digest_from_resource_name(resource_name)
client_digest = _digest_from_upload_resource_name(resource_name)
if client_digest is None:
context.set_code(grpc.StatusCode.NOT_FOUND)
return response
try:
_clean_up_cache(self.cas, client_digest.size_bytes)
except ArtifactTooLargeException as e:
......@@ -177,14 +193,20 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
return response
elif request.resource_name:
# If it is set on subsequent calls, it **must** match the value of the first request.
assert request.resource_name == resource_name
if request.resource_name != resource_name:
context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
return response
out.write(request.data)
offset += len(request.data)
if request.finish_write:
assert client_digest.size_bytes == offset
if client_digest.size_bytes != offset:
context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
return response
out.flush()
digest = self.cas.add_object(path=out.name)
assert digest.hash == client_digest.hash
if digest.hash != client_digest.hash:
context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
return response
finished = True
assert finished
......@@ -247,13 +269,48 @@ class _ReferenceStorageServicer(buildstream_pb2_grpc.ReferenceStorageServicer):
return response
def _digest_from_resource_name(resource_name):
def _digest_from_download_resource_name(resource_name):
parts = resource_name.split('/')
# Accept requests from non-conforming BuildStream 1.1.x clients
if len(parts) == 2:
parts.insert(0, 'blobs')
if len(parts) != 3 or parts[0] != 'blobs':
return None
try:
digest = remote_execution_pb2.Digest()
digest.hash = parts[1]
digest.size_bytes = int(parts[2])
return digest
except ValueError:
return None
def _digest_from_upload_resource_name(resource_name):
parts = resource_name.split('/')
assert len(parts) == 2
digest = remote_execution_pb2.Digest()
digest.hash = parts[0]
digest.size_bytes = int(parts[1])
return digest
# Accept requests from non-conforming BuildStream 1.1.x clients
if len(parts) == 2:
parts.insert(0, 'uploads')
parts.insert(1, str(uuid.uuid4()))
parts.insert(2, 'blobs')
if len(parts) < 5 or parts[0] != 'uploads' or parts[2] != 'blobs':
return None
try:
uuid_ = uuid.UUID(hex=parts[1])
if uuid_.version != 4:
return None
digest = remote_execution_pb2.Digest()
digest.hash = parts[3]
digest.size_bytes = int(parts[4])
return digest
except ValueError:
return None
def _has_object(cas, digest):
......
......@@ -269,6 +269,9 @@ class App():
else:
self._message(MessageType.FAIL, session_name, elapsed=elapsed)
# Notify session failure
self._notify("{} failed".format(session_name), "{}".format(e))
if self._started:
self._print_summary()
......@@ -286,6 +289,9 @@ class App():
if self._started:
self._print_summary()
# Notify session success
self._notify("{} succeeded".format(session_name), "")
# init_project()
#
# Initialize a new BuildStream project, either with the explicitly passed options,
......@@ -419,6 +425,12 @@ class App():
# Local Functions #
############################################################
# Local function for calling the notify() virtual method
#
def _notify(self, title, text):
if self.interactive:
self.notify(title, text)
# Local message propagator
#
def _message(self, message_type, message, **kwargs):
......@@ -571,8 +583,8 @@ class App():
while choice not in ['continue', 'quit', 'terminate', 'retry']:
click.echo(summary, err=True)
self.notify("BuildStream failure", "{} on element {}"
.format(failure.action_name, element.name))
self._notify("BuildStream failure", "{} on element {}"
.format(failure.action_name, element.name))
try:
choice = click.prompt("Choice:", default='continue', err=True,
......
......@@ -418,7 +418,9 @@ class LogLine(Widget):
if "%{workspace-dirs" in format_:
workspace = element._get_workspace()
if workspace is not None:
path = workspace.path.replace(os.getenv('HOME', '/root'), '~')
path = workspace.get_absolute_path()
if path.startswith("~/"):
path = os.path.join(os.getenv('HOME', '/root'), path[2:])
line = p.fmt_subst(line, 'workspace-dirs', "Workspace: {}".format(path))
else:
line = p.fmt_subst(
......
......@@ -71,6 +71,7 @@ class LoadElement():
'kind', 'depends', 'sources', 'sandbox',
'variables', 'environment', 'environment-nocache',
'config', 'public', 'description',
'build-depends', 'runtime-depends',
])
# Extract the Dependencies
......@@ -127,28 +128,46 @@ class LoadElement():
# Returns:
# (list): a list of Dependency objects
#
def _extract_depends_from_node(node):
depends = _yaml.node_get(node, list, Symbol.DEPENDS, default_value=[])
def _extract_depends_from_node(node, *, key=None):
if key is None:
build_depends = _extract_depends_from_node(node, key=Symbol.BUILD_DEPENDS)
runtime_depends = _extract_depends_from_node(node, key=Symbol.RUNTIME_DEPENDS)
depends = _extract_depends_from_node(node, key=Symbol.DEPENDS)
return build_depends + runtime_depends + depends
elif key == Symbol.BUILD_DEPENDS:
default_dep_type = Symbol.BUILD
elif key == Symbol.RUNTIME_DEPENDS:
default_dep_type = Symbol.RUNTIME
elif key == Symbol.DEPENDS:
default_dep_type = None
else:
assert False, "Unexpected value of key '{}'".format(key)
depends = _yaml.node_get(node, list, key, default_value=[])
output_deps = []
for dep in depends:
dep_provenance = _yaml.node_get_provenance(node, key=Symbol.DEPENDS, indices=[depends.index(dep)])
dep_provenance = _yaml.node_get_provenance(node, key=key, indices=[depends.index(dep)])
if isinstance(dep, str):
dependency = Dependency(dep, provenance=dep_provenance)
dependency = Dependency(dep, provenance=dep_provenance, dep_type=default_dep_type)
elif isinstance(dep, Mapping):
_yaml.node_validate(dep, ['filename', 'type', 'junction'])
# Make type optional, for this we set it to None
dep_type = _yaml.node_get(dep, str, Symbol.TYPE, default_value=None)
if dep_type is None or dep_type == Symbol.ALL:
dep_type = None
elif dep_type not in [Symbol.BUILD, Symbol.RUNTIME]:
provenance = _yaml.node_get_provenance(dep, key=Symbol.TYPE)
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Dependency type '{}' is not 'build', 'runtime' or 'all'"
.format(provenance, dep_type))
if default_dep_type:
_yaml.node_validate(dep, ['filename', 'junction'])
dep_type = default_dep_type
else:
_yaml.node_validate(dep, ['filename', 'type', 'junction'])
# Make type optional, for this we set it to None
dep_type = _yaml.node_get(dep, str, Symbol.TYPE, default_value=None)
if dep_type is None or dep_type == Symbol.ALL:
dep_type = None
elif dep_type not in [Symbol.BUILD, Symbol.RUNTIME]:
provenance = _yaml.node_get_provenance(dep, key=Symbol.TYPE)
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Dependency type '{}' is not 'build', 'runtime' or 'all'"
.format(provenance, dep_type))
filename = _yaml.node_get(dep, str, Symbol.FILENAME)
junction = _yaml.node_get(dep, str, Symbol.JUNCTION, default_value=None)
......@@ -159,13 +178,13 @@ def _extract_depends_from_node(node):
else:
index = depends.index(dep)
p = _yaml.node_get_provenance(node, key=Symbol.DEPENDS, indices=[index])
p = _yaml.node_get_provenance(node, key=key, indices=[index])
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Dependency is not specified as a string or a dictionary".format(p))
output_deps.append(dependency)
# Now delete "depends", we dont want it anymore
del node[Symbol.DEPENDS]
# Now delete the field, we dont want it anymore
del node[key]
return output_deps
......@@ -522,14 +522,15 @@ class Loader():
element = Element._new_from_meta(meta_element, platform.artifactcache)
element._preflight()
for source in element.sources():
sources = list(element.sources())
for idx, source in enumerate(sources):
# Handle the case where a subproject needs to be fetched
#
if source.get_consistency() == Consistency.RESOLVED:
if fetch_subprojects:
if ticker:
ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
source._fetch()
source._fetch(sources[0:idx])
else:
detail = "Try fetching the project with `bst fetch {}`".format(filename)
raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,
......
......@@ -26,6 +26,8 @@ class Symbol():
FILENAME = "filename"
KIND = "kind"
DEPENDS = "depends"
BUILD_DEPENDS = "build-depends"
RUNTIME_DEPENDS = "runtime-depends"
SOURCES = "sources"
CONFIG = "config"
VARIABLES = "variables"
......
......@@ -359,23 +359,14 @@ class Pipeline():
if inconsistent:
detail = "Exact versions are missing for the following elements:\n\n"
missingTrack = 0
for element in inconsistent:
detail += " " + element._get_full_name()
detail += " Element: {} is inconsistent\n".format(element._get_full_name())
for source in element.sources():
if not source._get_consistency() and not source.get_ref():
if hasattr(source, 'tracking') and source.tracking is None:
detail += ": Source {} is missing ref and track. ".format(source._get_full_name()) + \
"Please specify a ref or branch/tag to track."
missingTrack = 1
detail += "\n"
if source._get_consistency() == Consistency.INCONSISTENT:
detail += " Source {} is missing ref\n".format(source)
detail += '\n'
detail += "Try tracking these elements first with `bst track`\n"
if missingTrack:
detail += "\nThen track these elements with `bst track`\n"
else:
detail += "\nTry tracking these elements first with `bst track`\n"
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
#############################################################
......
......@@ -19,7 +19,6 @@
# Tiago Gomes <tiago.gomes@codethink.co.uk>
import os
import multiprocessing # for cpu_count()
from collections import Mapping, OrderedDict
from pluginbase import PluginBase
from . import utils
......@@ -32,6 +31,7 @@ from ._options import OptionPool
from ._artifactcache import ArtifactCache
from ._elementfactory import ElementFactory
from ._sourcefactory import SourceFactory
from .plugin import CoreWarnings
from ._projectrefs import ProjectRefs, ProjectRefStorage
from ._versions import BST_FORMAT_VERSION
from ._loader import Loader
......@@ -106,7 +106,7 @@ class Project():
self.first_pass_config = ProjectConfig()
self.junction = junction # The junction Element object, if this is a subproject
self.fail_on_overlap = False # Whether overlaps are treated as errors
self.ref_storage = None # ProjectRefStorage setting
self.base_environment = {} # The base set of environment variables
self.base_env_nocache = None # The base nocache mask (list) for the environment
......@@ -121,6 +121,8 @@ class Project():
self._cli_options = cli_options
self._cache_key = None
self._fatal_warnings = [] # A list of warnings which should trigger an error
self._shell_command = [] # The default interactive shell command
self._shell_environment = {} # Statically set environment vars
self._shell_host_files = [] # A list of HostMount objects
......@@ -457,7 +459,7 @@ class Project():
'split-rules', 'elements', 'plugins',
'aliases', 'name',
'artifacts', 'options',
'fail-on-overlap', 'shell',
'fail-on-overlap', 'shell', 'fatal-warnings',
'ref-storage', 'sandbox', 'mirrors'
])
......@@ -479,8 +481,25 @@ class Project():
# Load project split rules
self._splits = _yaml.node_get(config, Mapping, 'split-rules')
# Fail on overlap
self.fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap')
# Fatal warnings
self._fatal_warnings = _yaml.node_get(config, list, 'fatal-warnings', default_value=[])
# Support backwards compatibility for fail-on-overlap
fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap', default_value=None)
if (CoreWarnings.OVERLAPS not in self._fatal_warnings) and fail_on_overlap:
self._fatal_warnings.append(CoreWarnings.OVERLAPS)
# Deprecation check
if fail_on_overlap is not None:
self._context.message(
Message(
None,
MessageType.WARN,
"Use of fail-on-overlap within project.conf " +
"is deprecated. Consider using fatal-warnings instead."
)
)
# Load project.refs if it exists, this may be ignored.
if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
......@@ -572,7 +591,10 @@ class Project():
# Extend variables with automatic variables and option exports
# Initialize it as a string as all variables are processed as strings.
output.base_variables['max-jobs'] = str(multiprocessing.cpu_count())
# Based on some testing (mainly on AWS), maximum effective
# max-jobs value seems to be around 8-10 if we have enough cores
# users should set values based on workload and build infrastructure
output.base_variables['max-jobs'] = str(min(len(os.sched_getaffinity(0)), 8))
# Export options into variables, if that was requested
output.options.export_variables(output.base_variables)
......@@ -710,3 +732,17 @@ class Project():
# paths are passed in relative to the project, but must be absolute
origin_dict['path'] = os.path.join(self.directory, path)
destination.append(origin_dict)
# _warning_is_fatal():
#
# Returns true if the warning in question should be considered fatal based on
# the project configuration.
#
# Args:
# warning_str (str): The warning configuration string to check against
#
# Returns:
# (bool): True if the warning should be considered fatal and cause an error.
#
def _warning_is_fatal(self, warning_str):
return warning_str in self._fatal_warnings
......@@ -40,8 +40,10 @@ class FetchQueue(Queue):
self._skip_cached = skip_cached
def process(self, element):
previous_sources = []
for source in element.sources():
source._fetch()
source._fetch(previous_sources)
previous_sources.append(source)
def status(self, element):
# state of dependencies may have changed, recalculate element state
......
......@@ -267,8 +267,11 @@ class Stream():
except_targets=None,
cross_junctions=False):
# We pass no target to build. Only to track. Passing build targets
# would fully load project configuration which might not be
# possible before tracking is done.
_, elements = \
self._load(targets, targets,
self._load([], targets,
selection=selection, track_selection=selection,
except_targets=except_targets,
track_except_targets=except_targets,
......@@ -460,7 +463,7 @@ class Stream():
selection=PipelineSelection.REDIRECT,
track_selection=PipelineSelection.REDIRECT)
target = elements[0]
workdir = os.path.abspath(directory)
directory = os.path.abspath(directory)
if not list(target.sources()):
build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
......@@ -476,7 +479,7 @@ class Stream():
workspace = workspaces.get_workspace(target._get_full_name())
if workspace and not force:
raise StreamError("Workspace '{}' is already defined at: {}"
.format(target.name, workspace.path))
.format(target.name, workspace.get_absolute_path()))
# If we're going to checkout, we need at least a fetch,
# if we were asked to track first, we're going to fetch anyway.
......@@ -502,7 +505,7 @@ class Stream():
except OSError as e:
raise StreamError("Failed to create workspace directory: {}".format(e)) from e
workspaces.create_workspace(target._get_full_name(), workdir)
workspaces.create_workspace(target._get_full_name(), directory)
if not no_checkout:
with target.timed_activity("Staging sources to {}".format(directory)):
......@@ -526,12 +529,12 @@ class Stream():
# Remove workspace directory if prompted
if remove_dir:
with self._context.timed_activity("Removing workspace directory {}"
.format(workspace.path)):
.format(workspace.get_absolute_path())):
try:
shutil.rmtree(workspace.path)
shutil.rmtree(workspace.get_absolute_path())
except OSError as e:
raise StreamError("Could not remove '{}': {}"
.format(workspace.path, e)) from e
.format(workspace.get_absolute_path(), e)) from e
# Delete the workspace and save the configuration
workspaces.delete_workspace(element_name)
......@@ -574,28 +577,30 @@ class Stream():
for element in elements:
workspace = workspaces.get_workspace(element._get_full_name())
workspace_path = workspace.get_absolute_path()
if soft:
workspace.prepared = False
self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
.format(element.name, workspace.path))
.format(element.name, workspace_path))
continue
with element.timed_activity("Removing workspace directory {}"
.format(workspace.path)):
.format(workspace_path)):
try:
shutil.rmtree(workspace.path)
shutil.rmtree(workspace_path)
except OSError as e:
raise StreamError("Could not remove '{}': {}"
.format(workspace.path, e)) from e
.format(workspace_path, e)) from e
workspaces.delete_workspace(element._get_full_name())
workspaces.create_workspace(element._get_full_name(), workspace.path)
workspaces.create_workspace(element._get_full_name(), workspace_path)
with element.timed_activity("Staging sources to {}".format(workspace.path)):
with element.timed_activity("Staging sources to {}".format(workspace_path)):
element._open_workspace()
self._message(MessageType.INFO, "Reset workspace for {} at: {}".format(element.name, workspace.path))
self._message(MessageType.INFO,
"Reset workspace for {} at: {}".format(element.name,
workspace_path))
workspaces.save_config()
......@@ -632,7 +637,7 @@ class Stream():
for element_name, workspace_ in self._context.get_workspaces().list():
workspace_detail = {
'element': element_name,
'directory': workspace_.path,
'directory': workspace_.get_absolute_path(),
}
workspaces.append(workspace_detail)
......@@ -822,6 +827,12 @@ class Stream():
#
# A convenience method for loading element lists
#
# If `targets` is not empty used project configuration will be
# fully loaded. If `targets` is empty, tracking will still be
# resolved for elements in `track_targets`, but no build pipeline
# will be resolved. This is behavior is import for track() to
# not trigger full loading of project configuration.
#
# Args:
# targets (list of str): Main targets to load
# track_targets (list of str): Tracking targets
......@@ -869,7 +880,7 @@ class Stream():
#
# This can happen with `bst build --track`
#
if not self._pipeline.targets_include(elements, track_elements):
if targets and not self._pipeline.targets_include(elements, track_elements):
raise StreamError("Specified tracking targets that are not "
"within the scope of primary targets")
......@@ -905,6 +916,10 @@ class Stream():
for element in track_selected:
element._schedule_tracking()
if not targets:
self._pipeline.resolve_elements(track_selected)
return [], track_selected
# ArtifactCache.setup_remotes expects all projects to be fully loaded
for project in self._context.get_projects():
project.ensure_fully_loaded()
......
......@@ -23,7 +23,7 @@
# This version is bumped whenever enhancements are made
# to the `project.conf` format or the core element format.
#
BST_FORMAT_VERSION = 13
BST_FORMAT_VERSION = 16
# The base BuildStream artifact version
......@@ -33,4 +33,4 @@ BST_FORMAT_VERSION = 13
# or if buildstream was changed in a way which can cause
# the same cache key to produce something that is no longer
# the same.
BST_CORE_ARTIFACT_VERSION = 3
BST_CORE_ARTIFACT_VERSION = 5
......@@ -26,14 +26,6 @@ from ._exceptions import LoadError, LoadErrorReason
BST_WORKSPACE_FORMAT_VERSION = 3
# Hold on to a list of members which get serialized
_WORKSPACE_MEMBERS = [
'prepared',
'path',
'last_successful',
'running_files'
]
# Workspace()
#
......@@ -56,7 +48,7 @@ class Workspace():
def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
self.prepared = prepared
self.last_successful = last_successful
self.path = path
self._path = path
self.running_files = running_files if running_files is not None else {}
self._toplevel_project = toplevel_project
......@@ -64,14 +56,20 @@ class Workspace():
# to_dict()
#
# Convert this object to a dict for serialization purposes
# Convert a list of members which get serialized to a dict for serialization purposes
#
# Returns:
# (dict) A dict representation of the workspace
#
def to_dict(self):
return {key: val for key, val in self.__dict__.items()
if key in _WORKSPACE_MEMBERS and val is not None}
ret = {
'prepared': self.prepared,
'path': self._path,
'running_files': self.running_files
}
if self.last_successful is not None:
ret["last_successful"] = self.last_successful
return ret
# from_dict():
#
......@@ -103,15 +101,7 @@ class Workspace():
# True if the workspace differs from 'other', otherwise False
#
def differs(self, other):
for member in _WORKSPACE_MEMBERS:
member_a = getattr(self, member)
member_b = getattr(other, member)
if member_a != member_b:
return True
return False
return self.to_dict() != other.to_dict()
# invalidate_key()
#
......@@ -133,7 +123,7 @@ class Workspace():
if os.path.isdir(fullpath):
utils.copy_files(fullpath, directory)
else:
destfile = os.path.join(directory, os.path.basename(self.path))
destfile = os.path.join(directory, os.path.basename(self.get_absolute_path()))
utils.safe_copy(fullpath, destfile)
# add_running_files()
......@@ -189,7 +179,7 @@ class Workspace():
filelist = utils.list_relative_paths(fullpath)
filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
else:
filelist = [(self.path, fullpath)]
filelist = [(self.get_absolute_path(), fullpath)]
self._key = [(relpath, unique_key(fullpath)) for relpath, fullpath in filelist]
......@@ -200,7 +190,7 @@ class Workspace():
# Returns: The absolute path of the element's workspace.
#
def get_absolute_path(self):
return os.path.join(self._toplevel_project.directory, self.path)
return os.path.join(self._toplevel_project.directory, self._path)
# Workspaces()
......@@ -236,6 +226,9 @@ class Workspaces():
# path (str) - The path in which the workspace should be kept
#
def create_workspace(self, element_name, path):
if path.startswith(self._toplevel_project.directory):
path = os.path.relpath(path, self._toplevel_project.directory)
self._workspaces[element_name] = Workspace(self._toplevel_project, path=path)
return self._workspaces[element_name]
......
......@@ -13,10 +13,6 @@ element-path: .
# Store source references in element files
ref-storage: inline
# Overlaps are just warnings
fail-on-overlap: False
# Variable Configuration
#
variables:
......
......@@ -78,7 +78,6 @@ import stat
import copy
from collections import Mapping, OrderedDict
from contextlib import contextmanager
from enum import Enum
import tempfile
import shutil
......@@ -94,44 +93,13 @@ from . import _cachekey
from . import _signals
from . import _site
from ._platform import Platform
from .plugin import CoreWarnings
from .sandbox._config import SandboxConfig
from .storage.directory import Directory
from .storage._filebaseddirectory import FileBasedDirectory, VirtualDirectoryError
# _KeyStrength():
#
# Strength of cache key
#
class _KeyStrength(Enum):
# Includes strong cache keys of all build dependencies and their
# runtime dependencies.
STRONG = 1
# Includes names of direct build dependencies but does not include
# cache keys of dependencies.
WEAK = 2
class Scope(Enum):
"""Types of scope for a given element"""
ALL = 1
"""All elements which the given element depends on, following
all elements required for building. Including the element itself.
"""
BUILD = 2
"""All elements required for building the element, including their
respective run dependencies. Not including the given element itself.
"""
RUN = 3
"""All elements required for running the element. Including the element
itself.
"""
from .storage._filebaseddirectory import FileBasedDirectory
from .storage.directory import VirtualDirectoryError
from .element_enums import _KeyStrength, Scope
class ElementError(BstError):
......@@ -746,32 +714,23 @@ class Element(Plugin):
ignored[dep.name] = result.ignored
if overlaps:
overlap_error = overlap_warning = False
error_detail = warning_detail = "Staged files overwrite existing files in staging area:\n"
overlap_warning = False
warning_detail = "Staged files overwrite existing files in staging area:\n"
for f, elements in overlaps.items():
overlap_error_elements = []
overlap_warning_elements = []
# The bottom item overlaps nothing
overlapping_elements = elements[1:]
for elm in overlapping_elements:
element = self.search(scope, elm)
element_project = element._get_project()
if not element.__file_is_whitelisted(f):
if element_project.fail_on_overlap:
overlap_error_elements.append(elm)
overlap_error = True
else:
overlap_warning_elements.append(elm)
overlap_warning = True
overlap_warning_elements.append(elm)
overlap_warning = True
warning_detail += _overlap_error_detail(f, overlap_warning_elements, elements)
error_detail += _overlap_error_detail(f, overlap_error_elements, elements)
if overlap_warning:
self.warn("Non-whitelisted overlaps detected", detail=warning_detail)
if overlap_error:
raise ElementError("Non-whitelisted overlaps detected and fail-on-overlaps is set",
detail=error_detail, reason="overlap-error")
self.warn("Non-whitelisted overlaps detected", detail=warning_detail,
warning_token=CoreWarnings.OVERLAPS)
if ignored:
detail = "Not staging files which would replace non-empty directories:\n"
......@@ -1270,6 +1229,12 @@ class Element(Plugin):
# Prepend provenance to the error
raise ElementError("{}: {}".format(self, e), reason=e.reason) from e
# Ensure that the first source does not need access to previous soruces
if self.__sources and self.__sources[0]._requires_previous_sources():
raise ElementError("{}: {} cannot be the first source of an element "
"as it requires access to previous sources"
.format(self, self.__sources[0]))
# Preflight the sources
for source in self.sources():
source._preflight()
......@@ -1313,9 +1278,9 @@ class Element(Plugin):
#
def _track(self):
refs = []
for source in self.__sources:
for index, source in enumerate(self.__sources):
old_ref = source.get_ref()
new_ref = source._track()
new_ref = source._track(self.__sources[0:index])
refs.append((source._get_unique_id(), new_ref))
# Complimentary warning that the new ref will be unused.
......@@ -1403,7 +1368,8 @@ class Element(Plugin):
# If mount_workspaces is set and we're doing incremental builds,
# the workspace is already mounted into the sandbox.
if not (mount_workspaces and self.__can_build_incrementally()):
with self.timed_activity("Staging local files at {}".format(workspace.path)):
with self.timed_activity("Staging local files at {}"
.format(workspace.get_absolute_path())):
workspace.stage(temp_staging_directory)
else:
# No workspace, stage directly
......@@ -1566,7 +1532,7 @@ class Element(Plugin):
path_components = self.__staged_sources_directory.lstrip(os.sep).split(os.sep)
sandbox_vpath = sandbox_vroot.descend(path_components)
try:
sandbox_vpath.import_files(workspace.path)
sandbox_vpath.import_files(workspace.get_absolute_path())
except UtilError as e:
self.warn("Failed to preserve workspace state for failed build sysroot: {}"
.format(e))
......@@ -1893,7 +1859,7 @@ class Element(Plugin):
source._init_workspace(temp)
# Now hardlink the files into the workspace target.
utils.link_files(temp, workspace.path)
utils.link_files(temp, workspace.get_absolute_path())
# _get_workspace():
#
......@@ -2053,9 +2019,7 @@ class Element(Plugin):
'cache': type(self.__artifacts).__name__
}
# fail-on-overlap setting cannot affect elements without dependencies
if project.fail_on_overlap and dependencies:
self.__cache_key_dict['fail-on-overlap'] = True
self.__cache_key_dict['fatal-warnings'] = sorted(project._fatal_warnings)
cache_key_dict = self.__cache_key_dict.copy()
cache_key_dict['dependencies'] = dependencies
......