Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (26)
......@@ -19,7 +19,6 @@
import multiprocessing
import os
import string
from collections.abc import Mapping
from .types import _KeyStrength
......@@ -29,6 +28,7 @@ from . import utils
from . import _yaml
from ._cas import CASRemote, CASRemoteSpec
from .storage._casbaseddirectory import CasBasedDirectory
CACHE_SIZE_FILE = "cache_size"
......@@ -112,37 +112,6 @@ class ArtifactCache():
self._calculate_cache_quota()
# get_artifact_fullname()
#
# Generate a full name for an artifact, including the
# project namespace, element name and cache key.
#
# This can also be used as a relative path safely, and
# will normalize parts of the element name such that only
# digits, letters and some select characters are allowed.
#
# Args:
# element (Element): The Element object
# key (str): The element's cache key
#
# Returns:
# (str): The relative path for the artifact
#
def get_artifact_fullname(self, element, key):
project = element._get_project()
# Normalize ostree ref unsupported chars
valid_chars = string.digits + string.ascii_letters + '-._'
element_name = ''.join([
x if x in valid_chars else '_'
for x in element.normal_name
])
assert key is not None
# assume project and element names are not allowed to contain slashes
return '{0}/{1}/{2}'.format(project.name, element_name, key)
# setup_remotes():
#
# Sets up which remotes to use
......@@ -241,7 +210,7 @@ class ArtifactCache():
for key in (strong_key, weak_key):
if key:
try:
ref = self.get_artifact_fullname(element, key)
ref = element.get_artifact_name(key)
self.cas.update_mtime(ref)
except CASError:
......@@ -521,7 +490,7 @@ class ArtifactCache():
# Returns: True if the artifact is in the cache, False otherwise
#
def contains(self, element, key):
ref = self.get_artifact_fullname(element, key)
ref = element.get_artifact_name(key)
return self.cas.contains(ref)
......@@ -538,19 +507,21 @@ class ArtifactCache():
# Returns: True if the subdir exists & is populated in the cache, False otherwise
#
def contains_subdir_artifact(self, element, key, subdir):
ref = self.get_artifact_fullname(element, key)
ref = element.get_artifact_name(key)
return self.cas.contains_subdir_artifact(ref, subdir)
# list_artifacts():
#
# List artifacts in this cache in LRU order.
#
# Args:
# glob (str): An option glob expression to be used to list artifacts satisfying the glob
#
# Returns:
# ([str]) - A list of artifact names as generated by
# `ArtifactCache.get_artifact_fullname` in LRU order
# ([str]) - A list of artifact names as generated in LRU order
#
def list_artifacts(self):
return self.cas.list_refs()
def list_artifacts(self, *, glob=None):
return self.cas.list_refs(glob=glob)
# remove():
#
......@@ -559,8 +530,7 @@ class ArtifactCache():
#
# Args:
# ref (artifact_name): The name of the artifact to remove (as
# generated by
# `ArtifactCache.get_artifact_fullname`)
# generated by `Element.get_artifact_name`)
#
# Returns:
# (int): The amount of space recovered in the cache, in bytes
......@@ -606,7 +576,7 @@ class ArtifactCache():
# Returns: path to extracted artifact
#
def extract(self, element, key, subdir=None):
ref = self.get_artifact_fullname(element, key)
ref = element.get_artifact_name(key)
path = os.path.join(self.extractdir, element._get_project().name, element.normal_name)
......@@ -622,7 +592,7 @@ class ArtifactCache():
# keys (list): The cache keys to use
#
def commit(self, element, content, keys):
refs = [self.get_artifact_fullname(element, key) for key in keys]
refs = [element.get_artifact_name(key) for key in keys]
self.cas.commit(refs, content)
......@@ -638,8 +608,8 @@ class ArtifactCache():
# subdir (str): A subdirectory to limit the comparison to
#
def diff(self, element, key_a, key_b, *, subdir=None):
ref_a = self.get_artifact_fullname(element, key_a)
ref_b = self.get_artifact_fullname(element, key_b)
ref_a = element.get_artifact_name(key_a)
ref_b = element.get_artifact_name(key_b)
return self.cas.diff(ref_a, ref_b, subdir=subdir)
......@@ -700,7 +670,7 @@ class ArtifactCache():
# (ArtifactError): if there was an error
#
def push(self, element, keys):
refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
refs = [element.get_artifact_name(key) for key in list(keys)]
project = element._get_project()
......@@ -738,7 +708,7 @@ class ArtifactCache():
# (bool): True if pull was successful, False if artifact was not available
#
def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
ref = self.get_artifact_fullname(element, key)
ref = element.get_artifact_name(key)
project = element._get_project()
......@@ -850,11 +820,27 @@ class ArtifactCache():
# newkey (str): A new cache key for the artifact
#
def link_key(self, element, oldkey, newkey):
oldref = self.get_artifact_fullname(element, oldkey)
newref = self.get_artifact_fullname(element, newkey)
oldref = element.get_artifact_name(oldkey)
newref = element.get_artifact_name(newkey)
self.cas.link_ref(oldref, newref)
# get_artifact_logs():
#
# Get the logs of an existing artifact
#
# Args:
# ref (str): The ref of the artifact
#
# Returns:
# logsdir (CasBasedDirectory): A CasBasedDirectory containing the artifact's logs
#
def get_artifact_logs(self, ref):
descend = ["logs"]
cache_id = self.cas.resolve_ref(ref, update_mtime=True)
vdir = CasBasedDirectory(self.cas, cache_id).descend(descend)
return vdir
################################################
# Local Private Methods #
################################################
......
#
# Copyright (C) 2019 Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# James Ennis <james.ennis@codethink.co.uk>
from . import Element
from . import _cachekey
from ._exceptions import ArtifactElementError
from ._loader.metaelement import MetaElement
# ArtifactElement()
#
# Object to be used for directly processing an artifact
#
# Args:
# context (Context): The Context object
# ref (str): The artifact ref
#
class ArtifactElement(Element):
def __init__(self, context, ref):
_, element, key = verify_artifact_ref(ref)
self._ref = ref
self._key = key
project = context.get_toplevel_project()
meta = MetaElement(project, element) # NOTE element has no .bst suffix
plugin_conf = None
super().__init__(context, project, meta, plugin_conf)
# Override Element.get_artifact_name()
def get_artifact_name(self, key=None):
return self._ref
# Dummy configure method
def configure(self, node):
pass
# Dummy preflight method
def preflight(self):
pass
# Override Element._calculate_cache_key
def _calculate_cache_key(self, dependencies=None):
return self._key
# verify_artifact_ref()
#
# Verify that a ref string matches the format of an artifact
#
# Args:
# ref (str): The artifact ref
#
# Returns:
# project (str): The project's name
# element (str): The element's name
# key (str): The cache key
#
# Raises:
# ArtifactElementError if the ref string does not match
# the expected format
#
def verify_artifact_ref(ref):
try:
project, element, key = ref.split('/', 2) # This will raise a Value error if unable to split
# Explicitly raise a ValueError if the key lenght is not as expected
if len(key) != len(_cachekey.generate_key({})):
raise ValueError
except ValueError:
raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
return project, element, key
......@@ -24,6 +24,7 @@ import stat
import errno
import uuid
import contextlib
from fnmatch import fnmatch
import grpc
......@@ -376,9 +377,7 @@ class CASCache():
for chunk in iter(lambda: tmp.read(_BUFFER_SIZE), b""):
h.update(chunk)
else:
tmp = stack.enter_context(utils._tempnamedfile(dir=self.tmpdir))
# Set mode bits to 0644
os.chmod(tmp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
tmp = stack.enter_context(self._temporary_object())
if path:
with open(path, 'rb') as f:
......@@ -474,22 +473,35 @@ class CASCache():
#
# List refs in Least Recently Modified (LRM) order.
#
# Args:
# glob (str) - An optional glob expression to be used to list refs satisfying the glob
#
# Returns:
# (list) - A list of refs in LRM order
#
def list_refs(self):
def list_refs(self, *, glob=None):
# string of: /path/to/repo/refs/heads
ref_heads = os.path.join(self.casdir, 'refs', 'heads')
path = ref_heads
if glob is not None:
globdir = os.path.dirname(glob)
if not any(c in "*?[" for c in globdir):
# path prefix contains no globbing characters so
# append the glob to optimise the os.walk()
path = os.path.join(ref_heads, globdir)
refs = []
mtimes = []
for root, _, files in os.walk(ref_heads):
for root, _, files in os.walk(path):
for filename in files:
ref_path = os.path.join(root, filename)
refs.append(os.path.relpath(ref_path, ref_heads))
# Obtain the mtime (the time a file was last modified)
mtimes.append(os.path.getmtime(ref_path))
relative_path = os.path.relpath(ref_path, ref_heads) # Relative to refs head
if not glob or fnmatch(relative_path, glob):
refs.append(relative_path)
# Obtain the mtime (the time a file was last modified)
mtimes.append(os.path.getmtime(ref_path))
# NOTE: Sorted will sort from earliest to latest, thus the
# first ref of this list will be the file modified earliest.
......@@ -827,6 +839,19 @@ class CASCache():
for dirnode in directory.directories:
yield from self._required_blobs(dirnode.digest)
# _temporary_object():
#
# Returns:
# (file): A file object to a named temporary file.
#
# Create a named temporary file with 0o0644 access rights.
@contextlib.contextmanager
def _temporary_object(self):
with utils._tempnamedfile(dir=self.tmpdir) as f:
os.chmod(f.name,
stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
yield f
# _ensure_blob():
#
# Fetch and add blob if it's not already local.
......@@ -844,7 +869,7 @@ class CASCache():
# already in local repository
return objpath
with utils._tempnamedfile(dir=self.tmpdir) as f:
with self._temporary_object() as f:
remote._fetch_blob(digest, f)
added_digest = self.add_object(path=f.name, link_directly=True)
......@@ -854,7 +879,7 @@ class CASCache():
def _batch_download_complete(self, batch):
for digest, data in batch.send():
with utils._tempnamedfile(dir=self.tmpdir) as f:
with self._temporary_object() as f:
f.write(data)
f.flush()
......
......@@ -361,14 +361,17 @@ class Context():
# (bool): Whether or not to use strict build plan
#
def get_strict(self):
if self._strict_build_plan is None:
# Either we're not overridden or we've never worked it out before
# so work out if we should be strict, and then cache the result
toplevel = self.get_toplevel_project()
overrides = self.get_overrides(toplevel.name)
self._strict_build_plan = _yaml.node_get(overrides, bool, 'strict', default_value=True)
# If it was set by the CLI, it overrides any config
if self._strict_build_plan is not None:
return self._strict_build_plan
toplevel = self.get_toplevel_project()
overrides = self.get_overrides(toplevel.name)
return _yaml.node_get(overrides, bool, 'strict', default_value=True)
# Ditto if we've already computed this, then we return the computed
# value which we cache here too.
return self._strict_build_plan
# get_cache_key():
#
......
......@@ -344,3 +344,12 @@ class AppError(BstError):
#
class SkipJob(Exception):
pass
# ArtifactElementError
#
# Raised when errors are encountered by artifact elements
#
class ArtifactElementError(BstError):
def __init__(self, message, *, detail=None, reason=None):
super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason)
import os
import sys
from contextlib import ExitStack
from fnmatch import fnmatch
from functools import partial
from tempfile import TemporaryDirectory
......@@ -901,38 +900,6 @@ def workspace_list(app):
#############################################################
# Artifact Commands #
#############################################################
def _classify_artifacts(names, cas, project_directory):
element_targets = []
artifact_refs = []
element_globs = []
artifact_globs = []
for name in names:
if name.endswith('.bst'):
if any(c in "*?[" for c in name):
element_globs.append(name)
else:
element_targets.append(name)
else:
if any(c in "*?[" for c in name):
artifact_globs.append(name)
else:
artifact_refs.append(name)
if element_globs:
for dirpath, _, filenames in os.walk(project_directory):
for filename in filenames:
element_path = os.path.join(dirpath, filename).lstrip(project_directory).lstrip('/')
if any(fnmatch(element_path, glob) for glob in element_globs):
element_targets.append(element_path)
if artifact_globs:
artifact_refs.extend(ref for ref in cas.list_refs()
if any(fnmatch(ref, glob) for glob in artifact_globs))
return element_targets, artifact_refs
@cli.group(short_help="Manipulate cached artifacts")
def artifact():
"""Manipulate cached artifacts"""
......@@ -1111,53 +1078,24 @@ def artifact_push(app, elements, deps, remote):
@click.pass_obj
def artifact_log(app, artifacts):
"""Show logs of all artifacts"""
from .._exceptions import CASError
from .._message import MessageType
from .._pipeline import PipelineSelection
from ..storage._casbaseddirectory import CasBasedDirectory
with ExitStack() as stack:
stack.enter_context(app.initialized())
cache = app.context.artifactcache
elements, artifacts = _classify_artifacts(artifacts, cache.cas,
app.project.directory)
vdirs = []
extractdirs = []
if artifacts:
for ref in artifacts:
try:
cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
vdir = CasBasedDirectory(cache.cas, cache_id)
vdirs.append(vdir)
except CASError as e:
app._message(MessageType.WARN, "Artifact {} is not cached".format(ref), detail=str(e))
continue
if elements:
elements = app.stream.load_selection(elements, selection=PipelineSelection.NONE)
for element in elements:
if not element._cached():
app._message(MessageType.WARN, "Element {} is not cached".format(element))
continue
ref = cache.get_artifact_fullname(element, element._get_cache_key())
cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
vdir = CasBasedDirectory(cache.cas, cache_id)
vdirs.append(vdir)
for vdir in vdirs:
# NOTE: If reading the logs feels unresponsive, here would be a good place to provide progress information.
logsdir = vdir.descend(["logs"])
td = stack.enter_context(TemporaryDirectory())
logsdir.export_files(td, can_link=True)
extractdirs.append(td)
for extractdir in extractdirs:
for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
# NOTE: Should click gain the ability to pass files to the pager this can be optimised.
with open(log) as f:
data = f.read()
click.echo_via_pager(data)
with app.initialized():
logsdirs = app.stream.artifact_log(artifacts)
with ExitStack() as stack:
extractdirs = []
for logsdir in logsdirs:
# NOTE: If reading the logs feels unresponsive, here would be a good place
# to provide progress information.
td = stack.enter_context(TemporaryDirectory())
logsdir.export_files(td, can_link=True)
extractdirs.append(td)
for extractdir in extractdirs:
for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
# NOTE: Should click gain the ability to pass files to the pager this can be optimised.
with open(log) as f:
data = f.read()
click.echo_via_pager(data)
##################################################################
......
......@@ -20,8 +20,6 @@
import os
from functools import cmp_to_key
from collections.abc import Mapping
import tempfile
import shutil
from .._exceptions import LoadError, LoadErrorReason
from .. import Consistency
......@@ -49,12 +47,10 @@ from .._message import Message, MessageType
# context (Context): The Context object
# project (Project): The toplevel Project object
# parent (Loader): A parent Loader object, in the case this is a junctioned Loader
# tempdir (str): A directory to cleanup with the Loader, given to the loader by a parent
# loader in the case that this loader is a subproject loader.
#
class Loader():
def __init__(self, context, project, *, parent=None, tempdir=None):
def __init__(self, context, project, *, parent=None):
# Ensure we have an absolute path for the base directory
basedir = project.element_path
......@@ -73,7 +69,6 @@ class Loader():
self._options = project.options # Project options (OptionPool)
self._basedir = basedir # Base project directory
self._first_pass_options = project.first_pass_config.options # Project options (OptionPool)
self._tempdir = tempdir # A directory to cleanup
self._parent = parent # The parent loader
self._meta_elements = {} # Dict of resolved meta elements by name
......@@ -159,30 +154,6 @@ class Loader():
return ret
# cleanup():
#
# Remove temporary checkout directories of subprojects
#
def cleanup(self):
if self._parent and not self._tempdir:
# already done
return
# recurse
for loader in self._loaders.values():
# value may be None with nested junctions without overrides
if loader is not None:
loader.cleanup()
if not self._parent:
# basedir of top-level loader is never a temporary directory
return
# safe guard to not accidentally delete directories outside builddir
if self._tempdir.startswith(self._context.builddir + os.sep):
if os.path.exists(self._tempdir):
shutil.rmtree(self._tempdir)
###########################################
# Private Methods #
###########################################
......@@ -540,23 +511,28 @@ class Loader():
"Subproject has no ref for junction: {}".format(filename),
detail=detail)
if len(sources) == 1 and sources[0]._get_local_path():
workspace = element._get_workspace()
if workspace:
# If a workspace is open, load it from there instead
basedir = workspace.get_absolute_path()
elif len(sources) == 1 and sources[0]._get_local_path():
# Optimization for junctions with a single local source
basedir = sources[0]._get_local_path()
tempdir = None
else:
# Stage sources
os.makedirs(self._context.builddir, exist_ok=True)
basedir = tempfile.mkdtemp(prefix="{}-".format(element.normal_name), dir=self._context.builddir)
element._stage_sources_at(basedir, mount_workspaces=False)
tempdir = basedir
element._update_state()
basedir = os.path.join(self.project.directory, ".bst", "staged-junctions",
filename, element._get_cache_key())
if not os.path.exists(basedir):
os.makedirs(basedir, exist_ok=True)
element._stage_sources_at(basedir, mount_workspaces=False)
# Load the project
project_dir = os.path.join(basedir, element.path)
try:
from .._project import Project
project = Project(project_dir, self._context, junction=element,
parent_loader=self, tempdir=tempdir)
parent_loader=self)
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
raise LoadError(reason=LoadErrorReason.INVALID_JUNCTION,
......
......@@ -38,20 +38,20 @@ class MetaElement():
# sandbox: Configuration specific to the sandbox environment
# first_pass: The element is to be loaded with first pass configuration (junction)
#
def __init__(self, project, name, kind, provenance, sources, config,
variables, environment, env_nocache, public, sandbox,
first_pass):
def __init__(self, project, name, kind=None, provenance=None, sources=None, config=None,
variables=None, environment=None, env_nocache=None, public=None,
sandbox=None, first_pass=False):
self.project = project
self.name = name
self.kind = kind
self.provenance = provenance
self.sources = sources
self.config = config
self.variables = variables
self.environment = environment
self.env_nocache = env_nocache
self.public = public
self.sandbox = sandbox
self.config = config or {}
self.variables = variables or {}
self.environment = environment or {}
self.env_nocache = env_nocache or []
self.public = public or {}
self.sandbox = sandbox or {}
self.build_dependencies = []
self.dependencies = []
self.first_pass = first_pass
......@@ -26,6 +26,7 @@ from . import utils
from . import _cachekey
from . import _site
from . import _yaml
from ._artifactelement import ArtifactElement
from ._profile import Topics, profile_start, profile_end
from ._exceptions import LoadError, LoadErrorReason
from ._options import OptionPool
......@@ -91,7 +92,7 @@ class ProjectConfig:
class Project():
def __init__(self, directory, context, *, junction=None, cli_options=None,
default_mirror=None, parent_loader=None, tempdir=None):
default_mirror=None, parent_loader=None):
# The project name
self.name = None
......@@ -147,7 +148,7 @@ class Project():
self._project_includes = None
profile_start(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
self._load(parent_loader=parent_loader, tempdir=tempdir)
self._load(parent_loader=parent_loader)
profile_end(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
self._partially_loaded = True
......@@ -255,6 +256,19 @@ class Project():
else:
return self.config.element_factory.create(self._context, self, meta)
# create_artifact_element()
#
# Instantiate and return an ArtifactElement
#
# Args:
# ref (str): A string of the artifact ref
#
# Returns:
# (ArtifactElement): A newly created ArtifactElement object of the appropriate kind
#
def create_artifact_element(self, ref):
return ArtifactElement(self._context, ref)
# create_source()
#
# Instantiate and return a Source
......@@ -389,8 +403,6 @@ class Project():
# Cleans up resources used loading elements
#
def cleanup(self):
self.loader.cleanup()
# Reset the element loader state
Element._reset_load_state()
......@@ -439,7 +451,7 @@ class Project():
#
# Raises: LoadError if there was a problem with the project.conf
#
def _load(self, parent_loader=None, tempdir=None):
def _load(self, parent_loader=None):
# Load builtin default
projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE)
......@@ -505,8 +517,7 @@ class Project():
self._fatal_warnings = _yaml.node_get(pre_config_node, list, 'fatal-warnings', default_value=[])
self.loader = Loader(self._context, self,
parent=parent_loader,
tempdir=tempdir)
parent=parent_loader)
self._project_includes = Includes(self.loader, copy_tree=False)
......
......@@ -27,8 +27,10 @@ import shutil
import tarfile
import tempfile
from contextlib import contextmanager, suppress
from fnmatch import fnmatch
from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
from ._artifactelement import verify_artifact_ref
from ._exceptions import StreamError, ImplError, BstError, ArtifactElementError, set_last_task_error
from ._message import Message, MessageType
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
from ._pipeline import Pipeline, PipelineSelection
......@@ -108,19 +110,21 @@ class Stream():
def load_selection(self, targets, *,
selection=PipelineSelection.NONE,
except_targets=(),
use_artifact_config=False):
use_artifact_config=False,
load_refs=False):
profile_start(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
elements, _ = self._load(targets, (),
selection=selection,
except_targets=except_targets,
fetch_subprojects=False,
use_artifact_config=use_artifact_config)
target_objects, _ = self._load(targets, (),
selection=selection,
except_targets=except_targets,
fetch_subprojects=False,
use_artifact_config=use_artifact_config,
load_refs=load_refs)
profile_end(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
return elements
return target_objects
# shell()
#
......@@ -491,6 +495,31 @@ class Stream():
raise StreamError("Error while staging dependencies into a sandbox"
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
# artifact_log()
#
# Show the full log of an artifact
#
# Args:
# targets (str): Targets to view the logs of
#
# Returns:
# logsdir (list): A list of CasBasedDirectory objects containing artifact logs
#
def artifact_log(self, targets):
# Return list of Element and/or ArtifactElement objects
target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
logsdirs = []
for obj in target_objects:
ref = obj.get_artifact_name()
if not obj._cached():
self._message(MessageType.WARN, "{} is not cached".format(ref))
continue
logsdirs.append(self._artifacts.get_artifact_logs(ref))
return logsdirs
# source_checkout()
#
# Checkout sources of the target element to the specified location
......@@ -922,25 +951,36 @@ class Stream():
use_artifact_config=False,
artifact_remote_url=None,
fetch_subprojects=False,
dynamic_plan=False):
dynamic_plan=False,
load_refs=False):
# Classify element and artifact strings
target_elements, target_artifacts = self._classify_artifacts(targets)
if target_artifacts and not load_refs:
detail = '\n'.join(target_artifacts)
raise ArtifactElementError("Cannot perform this operation with artifact refs:", detail=detail)
# Load rewritable if we have any tracking selection to make
rewritable = False
if track_targets:
rewritable = True
# Load all targets
# Load all target elements
elements, except_elements, track_elements, track_except_elements = \
self._pipeline.load([targets, except_targets, track_targets, track_except_targets],
self._pipeline.load([target_elements, except_targets, track_targets, track_except_targets],
rewritable=rewritable,
fetch_subprojects=fetch_subprojects)
# Obtain the ArtifactElement objects
artifacts = [self._project.create_artifact_element(ref) for ref in target_artifacts]
# Optionally filter out junction elements
if ignore_junction_targets:
elements = [e for e in elements if e.get_kind() != 'junction']
# Hold on to the targets
self.targets = elements
self.targets = elements + artifacts
# Here we should raise an error if the track_elements targets
# are not dependencies of the primary targets, this is not
......@@ -997,9 +1037,9 @@ class Stream():
# Now move on to loading primary selection.
#
self._pipeline.resolve_elements(elements)
selected = self._pipeline.get_selection(elements, selection, silent=False)
selected = self._pipeline.except_elements(elements,
self._pipeline.resolve_elements(self.targets)
selected = self._pipeline.get_selection(self.targets, selection, silent=False)
selected = self._pipeline.except_elements(self.targets,
selected,
except_elements)
......@@ -1331,3 +1371,55 @@ class Stream():
required_list.append(element)
return required_list
# _classify_artifacts()
#
# Split up a list of targets into element names and artifact refs
#
# Args:
# targets (list): A list of targets
#
# Returns:
# (list): element names present in the targets
# (list): artifact refs present in the targets
#
def _classify_artifacts(self, targets):
element_targets = []
artifact_refs = []
element_globs = []
artifact_globs = []
for target in targets:
if target.endswith('.bst'):
if any(c in "*?[" for c in target):
element_globs.append(target)
else:
element_targets.append(target)
else:
if any(c in "*?[" for c in target):
artifact_globs.append(target)
else:
try:
verify_artifact_ref(target)
except ArtifactElementError:
element_targets.append(target)
continue
artifact_refs.append(target)
if element_globs:
for dirpath, _, filenames in os.walk(self._project.element_path):
for filename in filenames:
element_path = os.path.join(dirpath, filename)
length = len(self._project.element_path) + 1
element_path = element_path[length:] # Strip out the element_path
if any(fnmatch(element_path, glob) for glob in element_globs):
element_targets.append(element_path)
if artifact_globs:
for glob in artifact_globs:
artifact_refs.extend(self._artifacts.list_artifacts(glob=glob))
if not artifact_refs:
self._message(MessageType.WARN, "No artifacts found for globs: {}".format(', '.join(artifact_globs)))
return element_targets, artifact_refs
......@@ -365,8 +365,8 @@ _sentinel = object()
#
def node_get(node, expected_type, key, indices=None, *, default_value=_sentinel, allow_none=False):
value = node.get(key, default_value)
provenance = node_get_provenance(node)
if value is _sentinel:
provenance = node_get_provenance(node)
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Dictionary did not contain expected key '{}'".format(provenance, key))
......@@ -914,6 +914,10 @@ RoundTripRepresenter.add_representer(SanitizedDict,
SafeRepresenter.represent_dict)
# Types we can short-circuit in node_sanitize for speed.
__SANITIZE_SHORT_CIRCUIT_TYPES = (int, float, str, bool, tuple)
# node_sanitize()
#
# Returnes an alphabetically ordered recursive copy
......@@ -922,9 +926,21 @@ RoundTripRepresenter.add_representer(SanitizedDict,
# Only dicts are ordered, list elements are left in order.
#
def node_sanitize(node):
# Short-circuit None which occurs ca. twice per element
if node is None:
return node
node_type = type(node)
# Next short-circuit integers, floats, strings, booleans, and tuples
if node_type in __SANITIZE_SHORT_CIRCUIT_TYPES:
return node
# Now short-circuit lists. Note this is only for the raw list
# type, CommentedSeq and others get caught later.
elif node_type is list:
return [node_sanitize(elt) for elt in node]
if isinstance(node, collections.abc.Mapping):
# Finally ChainMap and dict, and other Mappings need special handling
if node_type in (dict, ChainMap) or isinstance(node, collections.Mapping):
result = SanitizedDict()
key_list = [key for key, _ in node_items(node)]
......@@ -932,10 +948,12 @@ def node_sanitize(node):
result[key] = node_sanitize(node[key])
return result
# Catch the case of CommentedSeq and friends. This is more rare and so
# we keep complexity down by still using isinstance here.
elif isinstance(node, list):
return [node_sanitize(elt) for elt in node]
# Everything else (such as commented scalars) just gets returned as-is.
return node
......@@ -1064,15 +1082,52 @@ class ChainMap(collections.ChainMap):
return default
# Node copying
#
# Unfortunately we copy nodes a *lot* and `isinstance()` is super-slow when
# things from collections.abc get involved. The result is the following
# intricate but substantially faster group of tuples and the use of `in`.
#
# If any of the {node,list}_{chain_,}_copy routines raise a ValueError
# then it's likely additional types need adding to these tuples.
# When chaining a copy, these types are skipped since the ChainMap will
# retrieve them from the source node when needed. Other copiers might copy
# them, so we call them __QUICK_TYPES.
__QUICK_TYPES = (str, bool,
yaml.scalarstring.PreservedScalarString,
yaml.scalarstring.SingleQuotedScalarString,
yaml.scalarstring.DoubleQuotedScalarString)
# These types have to be iterated like a dictionary
__DICT_TYPES = (dict, ChainMap, yaml.comments.CommentedMap)
# These types have to be iterated like a list
__LIST_TYPES = (list, yaml.comments.CommentedSeq)
# These are the provenance types, which have to be cloned rather than any other
# copying tactic.
__PROVENANCE_TYPES = (Provenance, DictProvenance, MemberProvenance, ElementProvenance)
# These are the directives used to compose lists, we need this because it's
# slightly faster during the node_final_assertions checks
__NODE_ASSERT_COMPOSITION_DIRECTIVES = ('(>)', '(<)', '(=)')
def node_chain_copy(source):
copy = ChainMap({}, source)
for key, value in source.items():
if isinstance(value, collections.abc.Mapping):
value_type = type(value)
if value_type in __DICT_TYPES:
copy[key] = node_chain_copy(value)
elif isinstance(value, list):
elif value_type in __LIST_TYPES:
copy[key] = list_chain_copy(value)
elif isinstance(value, Provenance):
elif value_type in __PROVENANCE_TYPES:
copy[key] = value.clone()
elif value_type in __QUICK_TYPES:
pass # No need to copy these, the chainmap deals with it
else:
raise ValueError("Unable to be quick about node_chain_copy of {}".format(value_type))
return copy
......@@ -1080,14 +1135,17 @@ def node_chain_copy(source):
def list_chain_copy(source):
copy = []
for item in source:
if isinstance(item, collections.abc.Mapping):
item_type = type(item)
if item_type in __DICT_TYPES:
copy.append(node_chain_copy(item))
elif isinstance(item, list):
elif item_type in __LIST_TYPES:
copy.append(list_chain_copy(item))
elif isinstance(item, Provenance):
elif item_type in __PROVENANCE_TYPES:
copy.append(item.clone())
else:
elif item_type in __QUICK_TYPES:
copy.append(item)
else: # Fallback
raise ValueError("Unable to be quick about list_chain_copy of {}".format(item_type))
return copy
......@@ -1095,14 +1153,17 @@ def list_chain_copy(source):
def node_copy(source):
copy = {}
for key, value in source.items():
if isinstance(value, collections.abc.Mapping):
value_type = type(value)
if value_type in __DICT_TYPES:
copy[key] = node_copy(value)
elif isinstance(value, list):
elif value_type in __LIST_TYPES:
copy[key] = list_copy(value)
elif isinstance(value, Provenance):
elif value_type in __PROVENANCE_TYPES:
copy[key] = value.clone()
else:
elif value_type in __QUICK_TYPES:
copy[key] = value
else:
raise ValueError("Unable to be quick about node_copy of {}".format(value_type))
ensure_provenance(copy)
......@@ -1112,14 +1173,17 @@ def node_copy(source):
def list_copy(source):
copy = []
for item in source:
if isinstance(item, collections.abc.Mapping):
item_type = type(item)
if item_type in __DICT_TYPES:
copy.append(node_copy(item))
elif isinstance(item, list):
elif item_type in __LIST_TYPES:
copy.append(list_copy(item))
elif isinstance(item, Provenance):
elif item_type in __PROVENANCE_TYPES:
copy.append(item.clone())
else:
elif item_type in __QUICK_TYPES:
copy.append(item)
else:
raise ValueError("Unable to be quick about list_copy of {}".format(item_type))
return copy
......@@ -1142,22 +1206,26 @@ def node_final_assertions(node):
# indicates that the user intended to override a list which
# never existed in the underlying data
#
if key in ['(>)', '(<)', '(=)']:
if key in __NODE_ASSERT_COMPOSITION_DIRECTIVES:
provenance = node_get_provenance(node, key)
raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE,
"{}: Attempt to override non-existing list".format(provenance))
if isinstance(value, collections.abc.Mapping):
value_type = type(value)
if value_type in __DICT_TYPES:
node_final_assertions(value)
elif isinstance(value, list):
elif value_type in __LIST_TYPES:
list_final_assertions(value)
def list_final_assertions(values):
for value in values:
if isinstance(value, collections.abc.Mapping):
value_type = type(value)
if value_type in __DICT_TYPES:
node_final_assertions(value)
elif isinstance(value, list):
elif value_type in __LIST_TYPES:
list_final_assertions(value)
......
......@@ -82,6 +82,7 @@ import contextlib
from contextlib import contextmanager
import tempfile
import shutil
import string
from . import _yaml
from ._variables import Variables
......@@ -577,6 +578,38 @@ class Element(Plugin):
self.__assert_cached()
return self.__compute_splits(include, exclude, orphans)
def get_artifact_name(self, key=None):
"""Compute and return this element's full artifact name
Generate a full name for an artifact, including the project
namespace, element name and cache key.
This can also be used as a relative path safely, and
will normalize parts of the element name such that only
digits, letters and some select characters are allowed.
Args:
key (str): The element's cache key. Defaults to None
Returns:
(str): The relative path for the artifact
"""
project = self._get_project()
if key is None:
key = self._get_cache_key()
assert key is not None
valid_chars = string.digits + string.ascii_letters + '-._'
element_name = ''.join([
x if x in valid_chars else '_'
for x in self.normal_name
])
# Note that project names are not allowed to contain slashes. Element names containing
# a '/' will have this replaced with a '-' upon Element object instantiation.
return '{0}/{1}/{2}'.format(project.name, element_name, key)
def stage_artifact(self, sandbox, *, path=None, include=None, exclude=None, orphans=True, update_mtimes=None):
"""Stage this element's output artifact in the sandbox
......@@ -1118,7 +1151,7 @@ class Element(Plugin):
e.name for e in self.dependencies(Scope.BUILD, recurse=False)
]
self.__weak_cache_key = self.__calculate_cache_key(dependencies)
self.__weak_cache_key = self._calculate_cache_key(dependencies)
if self.__weak_cache_key is None:
# Weak cache key could not be calculated yet
......@@ -1147,8 +1180,7 @@ class Element(Plugin):
dependencies = [
e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
]
self.__strict_cache_key = self.__calculate_cache_key(dependencies)
self.__strict_cache_key = self._calculate_cache_key(dependencies)
if self.__strict_cache_key is None:
# Strict cache key could not be calculated yet
return
......@@ -1190,7 +1222,7 @@ class Element(Plugin):
dependencies = [
e._get_cache_key() for e in self.dependencies(Scope.BUILD)
]
self.__cache_key = self.__calculate_cache_key(dependencies)
self.__cache_key = self._calculate_cache_key(dependencies)
if self.__cache_key is None:
# Strong cache key could not be calculated yet
......@@ -2032,41 +2064,7 @@ class Element(Plugin):
source._fetch(previous_sources)
previous_sources.append(source)
#############################################################
# Private Local Methods #
#############################################################
# __update_source_state()
#
# Updates source consistency state
#
def __update_source_state(self):
# Cannot resolve source state until tracked
if self.__tracking_scheduled:
return
self.__consistency = Consistency.CACHED
workspace = self._get_workspace()
# Special case for workspaces
if workspace:
# A workspace is considered inconsistent in the case
# that its directory went missing
#
fullpath = workspace.get_absolute_path()
if not os.path.exists(fullpath):
self.__consistency = Consistency.INCONSISTENT
else:
# Determine overall consistency of the element
for source in self.__sources:
source._update_state()
source_consistency = source._get_consistency()
self.__consistency = min(self.__consistency, source_consistency)
# __calculate_cache_key():
# _calculate_cache_key():
#
# Calculates the cache key
#
......@@ -2075,7 +2073,7 @@ class Element(Plugin):
#
# None is returned if information for the cache key is missing.
#
def __calculate_cache_key(self, dependencies):
def _calculate_cache_key(self, dependencies):
# No cache keys for dependencies which have no cache keys
if None in dependencies:
return None
......@@ -2114,6 +2112,40 @@ class Element(Plugin):
return _cachekey.generate_key(cache_key_dict)
#############################################################
# Private Local Methods #
#############################################################
# __update_source_state()
#
# Updates source consistency state
#
def __update_source_state(self):
# Cannot resolve source state until tracked
if self.__tracking_scheduled:
return
self.__consistency = Consistency.CACHED
workspace = self._get_workspace()
# Special case for workspaces
if workspace:
# A workspace is considered inconsistent in the case
# that its directory went missing
#
fullpath = workspace.get_absolute_path()
if not os.path.exists(fullpath):
self.__consistency = Consistency.INCONSISTENT
else:
# Determine overall consistency of the element
for source in self.__sources:
source._update_state()
source_consistency = source._get_consistency()
self.__consistency = min(self.__consistency, source_consistency)
# __can_build_incrementally()
#
# Check if the element can be built incrementally, this
......@@ -2297,6 +2329,8 @@ class Element(Plugin):
defaults['public'] = element_public
def __init_defaults(self, plugin_conf):
if plugin_conf is None:
return
# Defaults are loaded once per class and then reused
#
......
......@@ -211,7 +211,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
assert artifactcache.contains(element, element_key)
# Retrieve the Directory object from the cached artifact
artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
artifact_ref = element.get_artifact_name(element_key)
artifact_digest = cas.resolve_ref(artifact_ref)
queue = multiprocessing.Queue()
......
......@@ -190,7 +190,7 @@ def test_push_directory(cli, tmpdir, datafiles):
assert artifactcache.has_push_remotes(element=element)
# Recreate the CasBasedDirectory object from the cached artifact
artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
artifact_ref = element.get_artifact_name(element_key)
artifact_digest = cas.resolve_ref(artifact_ref)
queue = multiprocessing.Queue()
......
import os
import shutil
import stat
import pytest
from buildstream.plugintestutils import cli
from tests.testutils import create_artifact_share, generate_junction
......@@ -462,3 +463,74 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
assert shareproject.repo not in result.stderr
assert shareuser.repo not in result.stderr
assert sharecli.repo in result.stderr
@pytest.mark.datafiles(DATA_DIR)
def test_pull_access_rights(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
checkout = os.path.join(str(tmpdir), 'checkout')
# Work-around datafiles not preserving mode
os.chmod(os.path.join(project, 'files/bin-files/usr/bin/hello'), 0o0755)
# We need a big file that does not go into a batch to test a different
# code path
os.makedirs(os.path.join(project, 'files/dev-files/usr/share'), exist_ok=True)
with open(os.path.join(project, 'files/dev-files/usr/share/big-file'), 'w') as f:
buf = ' ' * 4096
for _ in range(1024):
f.write(buf)
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
cli.configure({
'artifacts': {'url': share.repo, 'push': True}
})
result = cli.run(project=project, args=['build', 'compose-all.bst'])
result.assert_success()
result = cli.run(project=project,
args=['artifact', 'checkout',
'--hardlinks', '--no-integrate',
'compose-all.bst',
'--directory', checkout])
result.assert_success()
st = os.lstat(os.path.join(checkout, 'usr/include/pony.h'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
st = os.lstat(os.path.join(checkout, 'usr/bin/hello'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0755
st = os.lstat(os.path.join(checkout, 'usr/share/big-file'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
shutil.rmtree(checkout)
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
result = cli.run(project=project, args=['artifact', 'pull', 'compose-all.bst'])
result.assert_success()
result = cli.run(project=project,
args=['artifact', 'checkout',
'--hardlinks', '--no-integrate',
'compose-all.bst',
'--directory', checkout])
result.assert_success()
st = os.lstat(os.path.join(checkout, 'usr/include/pony.h'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
st = os.lstat(os.path.join(checkout, 'usr/bin/hello'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0755
st = os.lstat(os.path.join(checkout, 'usr/share/big-file'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644