Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (26)
Showing
with 298 additions and 90 deletions
......@@ -74,6 +74,7 @@ class ArtifactCache():
self._has_fetch_remotes = False
self._has_push_remotes = False
self._has_partial_push_remotes = False
os.makedirs(self.extractdir, exist_ok=True)
......@@ -398,6 +399,8 @@ class ArtifactCache():
self._has_fetch_remotes = True
if remote_spec.push:
self._has_push_remotes = True
if remote_spec.partial_push:
self._has_partial_push_remotes = True
remotes[remote_spec.url] = CASRemote(remote_spec)
......@@ -596,6 +599,31 @@ class ArtifactCache():
remotes_for_project = self._remotes[element._get_project()]
return any(remote.spec.push for remote in remotes_for_project)
# has_partial_push_remotes():
#
# Check whether any remote repositories are available for pushing
# non-complete artifacts
#
# Args:
# element (Element): The Element to check
#
# Returns:
# (bool): True if any remote repository is configured for optional
# partial pushes, False otherwise
#
def has_partial_push_remotes(self, *, element=None):
# If there's no partial push remotes available, we can't partial push at all
if not self._has_partial_push_remotes:
return False
elif element is None:
# At least one remote is set to allow partial pushes
return True
else:
# Check whether the specified element's project has push remotes configured
# to not accept partial artifact pushes
remotes_for_project = self._remotes[element._get_project()]
return any(remote.spec.partial_push for remote in remotes_for_project)
# push():
#
# Push committed artifact to remote repository.
......@@ -603,6 +631,8 @@ class ArtifactCache():
# Args:
# element (Element): The Element whose artifact is to be pushed
# keys (list): The cache keys to use
# partial(bool): If the artifact is cached in a partial state
# subdir(string): Optional subdir to not push
#
# Returns:
# (bool): True if any remote was updated, False if no pushes were required
......@@ -610,12 +640,25 @@ class ArtifactCache():
# Raises:
# (ArtifactError): if there was an error
#
def push(self, element, keys):
def push(self, element, keys, partial=False, subdir=None):
refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
project = element._get_project()
push_remotes = [r for r in self._remotes[project] if r.spec.push]
push_remotes = []
partial_remotes = []
# Create list of remotes to push to, given current element and partial push config
if not partial:
push_remotes = [r for r in self._remotes[project] if (r.spec.push and not r.spec.partial_push)]
if self._has_partial_push_remotes:
# Create a specific list of the remotes expecting the artifact to be push in a partial
# state. This list needs to be pushed in a partial state, without the optional subdir if
# exists locally. No need to attempt pushing a partial artifact to a remote that is queued to
# to also recieve a full artifact
partial_remotes = [r for r in self._remotes[project] if (r.spec.partial_push and r.spec.push) and
r not in push_remotes]
pushed = False
......@@ -632,6 +675,19 @@ class ArtifactCache():
remote.spec.url, element._get_brief_display_key()
))
for remote in partial_remotes:
remote.init()
display_key = element._get_brief_display_key()
element.status("Pushing partial artifact {} -> {}".format(display_key, remote.spec.url))
if self.cas.push(refs, remote, subdir=subdir):
element.info("Pushed partial artifact {} -> {}".format(display_key, remote.spec.url))
pushed = True
else:
element.info("Remote ({}) already has {} partial cached".format(
remote.spec.url, element._get_brief_display_key()
))
return pushed
# pull():
......@@ -659,14 +715,23 @@ class ArtifactCache():
element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
if self.cas.pull(ref, remote, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
if subdir:
# Attempt to extract subdir into artifact extract dir if it already exists
# without containing the subdir. If the respective artifact extract dir does not
# exist a complete extraction will complete.
self.extract(element, key, subdir)
# no need to pull from additional remotes
return True
if not self.contains_subdir_artifact(element, key, subdir):
# The pull was expecting the specific subdit to be present, attempt
# to find it in other available remotes
element.info("Pulled partial artifact {} <- {}. Attempting to retrieve {} from remotes"
.format(display_key, remote.spec.url, subdir))
else:
element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
# Attempt to extract subdir into artifact extract dir if it already exists
# without containing the subdir. If the respective artifact extract dir does not
# exist a complete extraction will complete.
self.extract(element, key, subdir)
# no need to pull from additional remotes
return True
else:
element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
return True
else:
element.info("Remote ({}) does not have {} cached".format(
remote.spec.url, element._get_brief_display_key()
......
......@@ -45,7 +45,8 @@ from .. import _yaml
_MAX_PAYLOAD_BYTES = 1024 * 1024
class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
class CASRemoteSpec(namedtuple('CASRemoteSpec',
'url push partial_push server_cert client_key client_cert instance_name')):
# _new_from_config_node
#
......@@ -53,9 +54,13 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
#
@staticmethod
def _new_from_config_node(spec_node, basedir=None):
_yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
_yaml.node_validate(spec_node,
['url', 'push', 'allow-partial-push', 'server-cert', 'client-key',
'client-cert', 'instance_name'])
url = _yaml.node_get(spec_node, str, 'url')
push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
partial_push = _yaml.node_get(spec_node, bool, 'allow-partial-push', default_value=False)
if not url:
provenance = _yaml.node_get_provenance(spec_node, 'url')
raise LoadError(LoadErrorReason.INVALID_DATA,
......@@ -85,10 +90,10 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: 'client-cert' was specified without 'client-key'".format(provenance))
return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
return CASRemoteSpec(url, push, partial_push, server_cert, client_key, client_cert, instance_name)
CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
CASRemoteSpec.__new__.__defaults__ = (False, None, None, None, None)
class BlobNotFound(CASError):
......@@ -283,34 +288,40 @@ class CASCache():
# (bool): True if pull was successful, False if ref was not available
#
def pull(self, ref, remote, *, progress=None, subdir=None, excluded_subdirs=None):
try:
remote.init()
request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
request.key = ref
response = remote.ref_storage.GetReference(request)
while True:
try:
remote.init()
tree = remote_execution_pb2.Digest()
tree.hash = response.digest.hash
tree.size_bytes = response.digest.size_bytes
request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
request.key = ref
response = remote.ref_storage.GetReference(request)
# Check if the element artifact is present, if so just fetch the subdir.
if subdir and os.path.exists(self.objpath(tree)):
self._fetch_subdir(remote, tree, subdir)
else:
# Fetch artifact, excluded_subdirs determined in pullqueue
self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
tree = remote_execution_pb2.Digest()
tree.hash = response.digest.hash
tree.size_bytes = response.digest.size_bytes
self.set_ref(ref, tree)
# Check if the element artifact is present, if so just fetch the subdir.
if subdir and os.path.exists(self.objpath(tree)):
self._fetch_subdir(remote, tree, subdir)
else:
# Fetch artifact, excluded_subdirs determined in pullqueue
self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
return True
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
else:
return False
except BlobNotFound as e:
return False
self.set_ref(ref, tree)
return True
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
else:
return False
except BlobNotFound as e:
if not excluded_subdirs and subdir:
# Could not complete a full pull, attempt partial
excluded_subdirs, subdir = subdir, excluded_subdirs
else:
return False
# pull_tree():
#
......@@ -355,6 +366,7 @@ class CASCache():
# Args:
# refs (list): The refs to push
# remote (CASRemote): The remote to push to
# subdir (string): Optional specific subdir to exempt from the push
#
# Returns:
# (bool): True if any remote was updated, False if no pushes were required
......@@ -362,7 +374,7 @@ class CASCache():
# Raises:
# (CASError): if there was an error
#
def push(self, refs, remote):
def push(self, refs, remote, subdir=None):
skipped_remote = True
try:
for ref in refs:
......@@ -384,7 +396,7 @@ class CASCache():
# Intentionally re-raise RpcError for outer except block.
raise
self._send_directory(remote, tree)
self._send_directory(remote, tree, excluded_dir=subdir)
request = buildstream_pb2.UpdateReferenceRequest(instance_name=remote.spec.instance_name)
request.keys.append(ref)
......@@ -866,10 +878,17 @@ class CASCache():
a += 1
b += 1
def _reachable_refs_dir(self, reachable, tree, update_mtime=False):
def _reachable_refs_dir(self, reachable, tree, update_mtime=False, subdir=False):
if tree.hash in reachable:
return
# If looping through subdir digests, skip processing if
# ref path does not exist, allowing for partial objects
if subdir and not os.path.exists(self.objpath(tree)):
return
# Raises FileNotFound exception is path does not exist,
# which should only be entered on the top level digest
if update_mtime:
os.utime(self.objpath(tree))
......@@ -886,9 +905,9 @@ class CASCache():
reachable.add(filenode.digest.hash)
for dirnode in directory.directories:
self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime)
self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime, subdir=True)
def _required_blobs(self, directory_digest):
def _required_blobs(self, directory_digest, excluded_dir=None):
# parse directory, and recursively add blobs
d = remote_execution_pb2.Digest()
d.hash = directory_digest.hash
......@@ -907,7 +926,8 @@ class CASCache():
yield d
for dirnode in directory.directories:
yield from self._required_blobs(dirnode.digest)
if dirnode.name != excluded_dir:
yield from self._required_blobs(dirnode.digest)
def _fetch_blob(self, remote, digest, stream):
resource_name_components = ['blobs', digest.hash, str(digest.size_bytes)]
......@@ -1029,6 +1049,7 @@ class CASCache():
objpath = self._ensure_blob(remote, dir_digest)
directory = remote_execution_pb2.Directory()
with open(objpath, 'rb') as f:
directory.ParseFromString(f.read())
......@@ -1104,9 +1125,8 @@ class CASCache():
assert response.committed_size == digest.size_bytes
def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
required_blobs = self._required_blobs(digest)
def _send_directory(self, remote, digest, u_uid=uuid.uuid4(), excluded_dir=None):
required_blobs = self._required_blobs(digest, excluded_dir=excluded_dir)
missing_blobs = dict()
# Limit size of FindMissingBlobs request
for required_blobs_group in _grouper(required_blobs, 512):
......
......@@ -27,8 +27,8 @@ import uuid
import errno
import threading
import click
import grpc
import click
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
......
......@@ -673,7 +673,6 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
@cli.group(short_help="Manipulate sources for an element")
def source():
"""Manipulate sources for an element"""
pass
##################################################################
......@@ -822,7 +821,6 @@ def source_checkout(app, element, location, force, deps, fetch_, except_,
@cli.group(short_help="Manipulate developer workspaces")
def workspace():
"""Manipulate developer workspaces"""
pass
##################################################################
......@@ -1000,7 +998,6 @@ def _classify_artifacts(names, cas, project_directory):
@cli.group(short_help="Manipulate cached artifacts")
def artifact():
"""Manipulate cached artifacts"""
pass
################################################################
......
......@@ -203,7 +203,7 @@ def is_incomplete_option(all_args, cmd_param):
if start_of_option(arg_str):
last_option = arg_str
return True if last_option and last_option in cmd_param.opts else False
return bool(last_option and last_option in cmd_param.opts)
def is_incomplete_argument(current_params, cmd_param):
......
......@@ -23,8 +23,8 @@ from contextlib import ExitStack
from mmap import mmap
import re
import textwrap
import click
from ruamel import yaml
import click
from . import Profile
from .. import Element, Consistency
......
......@@ -99,7 +99,6 @@ class Loader():
# Returns: The toplevel LoadElement
def load(self, targets, rewritable=False, ticker=None, fetch_subprojects=False):
invalid_elements = []
for filename in targets:
if os.path.isabs(filename):
# XXX Should this just be an assertion ?
......@@ -109,14 +108,8 @@ class Loader():
"path to the base project directory: {}"
.format(filename, self._basedir))
if not filename.endswith(".bst"):
invalid_elements.append(filename)
self._warn_invalid_elements(targets)
if invalid_elements:
self._warn("Target elements '{}' do not have expected file extension `.bst` "
"Improperly named elements will not be discoverable by commands"
.format(invalid_elements),
warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX)
# First pass, recursively load files and populate our table of LoadElements
#
deps = []
......@@ -280,12 +273,7 @@ class Loader():
self._elements[filename] = element
# Load all dependency files for the new LoadElement
invalid_elements = []
for dep in element.deps:
if not dep.name.endswith(".bst"):
invalid_elements.append(dep.name)
continue
if dep.junction:
self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
......@@ -300,11 +288,9 @@ class Loader():
"{}: Cannot depend on junction"
.format(dep.provenance))
if invalid_elements:
self._warn("The following dependencies do not have expected file extension `.bst`: {} "
"Improperly named elements will not be discoverable by commands"
.format(invalid_elements),
warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX)
deps_names = [dep.name for dep in element.deps]
self._warn_invalid_elements(deps_names)
return element
# _check_circular_deps():
......@@ -679,3 +665,69 @@ class Loader():
message = Message(None, MessageType.WARN, brief)
self._context.message(message)
# Print warning messages if any of the specified elements have invalid names.
#
# Valid filenames should end with ".bst" extension.
#
# Args:
# elements (list): List of element names
#
# Raises:
# (:class:`.LoadError`): When warning_token is considered fatal by the project configuration
#
def _warn_invalid_elements(self, elements):
# invalid_elements
#
# A dict that maps warning types to the matching elements.
invalid_elements = {
CoreWarnings.BAD_ELEMENT_SUFFIX: [],
CoreWarnings.BAD_CHARACTERS_IN_NAME: [],
}
for filename in elements:
if not filename.endswith(".bst"):
invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX].append(filename)
if not self._valid_chars_name(filename):
invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME].append(filename)
if invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]:
self._warn("Target elements '{}' do not have expected file extension `.bst` "
"Improperly named elements will not be discoverable by commands"
.format(invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]),
warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX)
if invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]:
self._warn("Target elements '{}' have invalid characerts in their name."
.format(invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]),
warning_token=CoreWarnings.BAD_CHARACTERS_IN_NAME)
# Check if given filename containers valid characters.
#
# Args:
# name (str): Name of the file
#
# Returns:
# (bool): True if all characters are valid, False otherwise.
#
def _valid_chars_name(self, name):
for char in name:
char_val = ord(char)
# 0-31 are control chars, 127 is DEL, and >127 means non-ASCII
if char_val <= 31 or char_val >= 127:
return False
# Disallow characters that are invalid on Windows. The list can be
# found at https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file
#
# Note that although : (colon) is not allowed, we do not raise
# warnings because of that, since we use it as a separator for
# junctioned elements.
#
# We also do not raise warnings on slashes since they are used as
# path separators.
if char in r'<>"|?*':
return False
return True
......@@ -77,7 +77,7 @@ def terminator(terminate_func):
yield
return
outermost = False if terminator_stack else True
outermost = bool(not terminator_stack)
terminator_stack.append(terminate_func)
if outermost:
......@@ -137,7 +137,7 @@ def suspend_handler(sig, frame):
def suspendable(suspend_callback, resume_callback):
global suspendable_stack # pylint: disable=global-statement
outermost = False if suspendable_stack else True
outermost = bool(not suspendable_stack)
suspender = Suspender(suspend_callback, resume_callback)
suspendable_stack.append(suspender)
......
......@@ -326,7 +326,6 @@ class Element(Plugin):
*Since: 1.2*
"""
pass
def assemble(self, sandbox):
"""Assemble the output artifact
......@@ -1340,7 +1339,7 @@ class Element(Plugin):
@contextmanager
def _prepare_sandbox(self, scope, directory, shell=False, integrate=True, usebuildtree=False):
# bst shell and bst checkout require a local sandbox.
bare_directory = True if directory else False
bare_directory = bool(directory)
with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False,
bare_directory=bare_directory) as sandbox:
sandbox._usebuildtree = usebuildtree
......@@ -1695,7 +1694,7 @@ class Element(Plugin):
# Store workspaced.yaml
_yaml.dump(_yaml.node_sanitize({
'workspaced': True if self._get_workspace() else False
'workspaced': bool(self._get_workspace())
}), os.path.join(metadir, 'workspaced.yaml'))
# Store workspaced-dependencies.yaml
......@@ -1801,13 +1800,19 @@ class Element(Plugin):
# (bool): True if this element does not need a push job to be created
#
def _skip_push(self):
if not self.__artifacts.has_push_remotes(element=self):
# No push remotes for this element's project
return True
# Do not push elements that aren't cached, or that are cached with a dangling buildtree
# artifact unless element type is expected to have an an empty buildtree directory
if not self._cached_buildtree():
# artifact unless element type is expected to have an an empty buildtree directory. Check
# that this default behaviour is not overriden via a remote configured to allow pushing
# artifacts without their corresponding buildtree.
if not self._cached():
return True
if not self._cached_buildtree() and not self.__artifacts.has_partial_push_remotes(element=self):
return True
# Do not push tainted artifact
......@@ -1818,7 +1823,8 @@ class Element(Plugin):
# _push():
#
# Push locally cached artifact to remote artifact repository.
# Push locally cached artifact to remote artifact repository. An attempt
# will be made to push partial artifacts given current config
#
# Returns:
# (bool): True if the remote was updated, False if it already existed
......@@ -1831,8 +1837,19 @@ class Element(Plugin):
self.warn("Not pushing tainted artifact.")
return False
# Push all keys used for local commit
pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit())
# Push all keys used for local commit, this could be full or partial,
# given previous _skip_push() logic. If buildtree isn't cached, then
# set partial push
partial = False
subdir = 'buildtree'
if not self._cached_buildtree():
partial = True
pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit(), partial=partial, subdir=subdir)
# Artifact might be cached in the server partially with the top level ref existing.
# Check if we need to attempt a push of a locally cached buildtree given current config
if not pushed:
return False
......
......@@ -112,7 +112,7 @@ class ImportElement(BuildElement):
# Ensure target directory parent exists but target directory doesn't
commands.append("mkdir -p {}".format(os.path.dirname(outputdir)))
commands.append("[ ! -e {} ] || rmdir {}".format(outputdir, outputdir))
commands.append("[ ! -e {outputdir} ] || rmdir {outputdir}".format(outputdir=outputdir))
# Move it over
commands.append("mv {} {}".format(inputdir, outputdir))
......
......@@ -231,7 +231,13 @@ class DownloadableFileSource(Source):
if not DownloadableFileSource.__urlopener:
try:
netrc_config = netrc.netrc()
except FileNotFoundError:
except OSError:
# If the .netrc file was not found, FileNotFoundError will be
# raised, but OSError will be raised directly by the netrc package
# in the case that $HOME is not set.
#
# This will catch both cases.
#
DownloadableFileSource.__urlopener = urllib.request.build_opener()
except netrc.NetrcParseError as e:
self.warn('{}: While reading .netrc: {}'.format(self, e))
......
......@@ -499,7 +499,6 @@ class Source(Plugin):
*Since: 1.4*
"""
pass
#############################################################
# Public Methods #
......
......@@ -53,12 +53,10 @@ class IndexEntry():
class ResolutionException(VirtualDirectoryError):
""" Superclass of all exceptions that can be raised by
CasBasedDirectory._resolve. Should not be used outside this module. """
pass
class InfiniteSymlinkException(ResolutionException):
""" Raised when an infinite symlink loop is found. """
pass
class AbsoluteSymlinkException(ResolutionException):
......@@ -66,7 +64,6 @@ class AbsoluteSymlinkException(ResolutionException):
target starts with the path separator) and we have disallowed
following such symlinks.
"""
pass
class UnexpectedFileException(ResolutionException):
......@@ -664,13 +661,11 @@ class CasBasedDirectory(Directory):
""" Sets a static modification time for all regular files in this directory.
Since we don't store any modification time, we don't need to do anything.
"""
pass
def set_deterministic_user(self):
""" Sets all files in this directory to the current user's euid/egid.
We also don't store user data, so this can be ignored.
"""
pass
def export_files(self, to_directory, *, can_link=False, can_destroy=False):
"""Copies everything from this into to_directory, which must be the name
......
......@@ -105,6 +105,12 @@ class CoreWarnings():
is referenced either on the command line or by another element
"""
BAD_CHARACTERS_IN_NAME = "bad-characters-in-name"
"""
This warning will be produces when filename for a target contains invalid
characters in its name.
"""
# _KeyStrength():
#
......
......@@ -526,3 +526,27 @@ read-only variables are also dynamically declared by BuildStream:
build, support for this is conditional on the element type
and the build system used (any element using 'make' can
implement this).
Naming elements
---------------
When naming the element files, use the following rules:
* The name of the file must have ``.bst`` extension.
* All characters in the name must be printable 7-bit ASCII characters.
* Following characters are reserved and must not be part of the name:
- ``<`` (less than)
- ``>`` (greater than)
- ``:`` (colon)
- ``"`` (double quote)
- ``/`` (forward slash)
- ``\`` (backslash)
- ``|`` (vertical bar)
- ``?`` (question mark)
- ``*`` (asterisk)
BuildStream will attempt to raise warnings when any of these rules are violated
but that may not always be possible.
......@@ -85,6 +85,20 @@ def test_build_invalid_suffix_dep(datafiles, cli, strict, hardlinks):
result.assert_main_error(ErrorDomain.LOAD, "bad-element-suffix")
@pytest.mark.datafiles(DATA_DIR)
def test_build_invalid_filename_chars(datafiles, cli):
project = os.path.join(datafiles.dirname, datafiles.basename)
result = cli.run(project=project, args=strict_args(['build', 'invalid-chars|<>-in-name.bst'], 'non-strict'))
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
@pytest.mark.datafiles(DATA_DIR)
def test_build_invalid_filename_chars_dep(datafiles, cli):
project = os.path.join(datafiles.dirname, datafiles.basename)
result = cli.run(project=project, args=strict_args(['build', 'invalid-chars-in-dep.bst'], 'non-strict'))
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("deps", [("run"), ("none"), ("build")])
def test_build_checkout_deps(datafiles, cli, deps):
......
kind: stack
description: |
This element itself has a valid name, but depends on elements that have
invalid names. This should also result in a warning.
depends:
- invalid-chars|<>-in-name.bst
kind: stack
description: |
The name of this files contains characters that are not allowed by
BuildStream, using it should raise a warning.
......@@ -5,3 +5,4 @@ element-path: elements
fatal-warnings:
- bad-element-suffix
- bad-characters-in-name