Skip to content
Snippets Groups Projects
Commit a3caab8c authored by James Ennis's avatar James Ennis
Browse files

WIP

parent cc32fcaa
No related branches found
No related tags found
No related merge requests found
Pipeline #45648050 failed
......@@ -16,7 +16,9 @@
#
# Authors:
# James Ennis <james.ennis@codethink.co.uk>
from . import Element
from ._exceptions import ArtifactElementError
from ._loader.metaelement import MetaElement
# ArtifactElement()
......@@ -27,20 +29,34 @@ from ._exceptions import ArtifactElementError
# context (Context): The Context object
# ref (str): The artifact ref
#
class ArtifactElement():
class ArtifactElement(Element):
def __init__(self, context, ref):
self._ref = ref
try:
project_name, element, key = ref.split('/', 2)
except ValueError:
raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
self._project_name = project_name
self._element = element
self._key = key
self._context = context
def get_artifact_name(self):
return '{0}/{1}/{2}'.format(self._project_name, self._element, self._key)
project = context.get_toplevel_project()
meta = MetaElement(project, element) # NOTE element has no .bst suffix
plugin_conf = None
def _cached(self):
return self._context.artifactcache.contains_ref(self.get_artifact_name())
super().__init__(context, project, meta, plugin_conf)
# override Element.get_artifact_name()
def get_artifact_name(self, key=None):
return self._ref
# Dummy configure method
def configure(self, node):
pass
# Dummy preflight method
def preflight(self):
pass
# Override Element._calculate_cache_key
def _calculate_cache_key(self, dependencies=None):
return self._key
......@@ -38,20 +38,20 @@ class MetaElement():
# sandbox: Configuration specific to the sandbox environment
# first_pass: The element is to be loaded with first pass configuration (junction)
#
def __init__(self, project, name, kind, provenance, sources, config,
variables, environment, env_nocache, public, sandbox,
first_pass):
def __init__(self, project, name, kind=None, provenance=None, sources=None, config=None,
variables=None, environment=None, env_nocache=None, public=None,
sandbox=None, first_pass=False):
self.project = project
self.name = name
self.kind = kind
self.provenance = provenance
self.sources = sources
self.config = config
self.variables = variables
self.environment = environment
self.env_nocache = env_nocache
self.public = public
self.sandbox = sandbox
self.config = config or {}
self.variables = variables or {}
self.environment = environment or {}
self.env_nocache = env_nocache or []
self.public = public or {}
self.sandbox = sandbox or {}
self.build_dependencies = []
self.dependencies = []
self.first_pass = first_pass
......@@ -29,7 +29,7 @@ import tempfile
from contextlib import contextmanager, suppress
from fnmatch import fnmatch
from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
from ._exceptions import StreamError, ImplError, BstError, ArtifactError, set_last_task_error
from ._message import Message, MessageType
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
from ._pipeline import Pipeline, PipelineSelection
......@@ -109,19 +109,21 @@ class Stream():
def load_selection(self, targets, *,
selection=PipelineSelection.NONE,
except_targets=(),
use_artifact_config=False):
use_artifact_config=False,
load_refs=False):
profile_start(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
elements, _ = self._load(targets, (),
selection=selection,
except_targets=except_targets,
fetch_subprojects=False,
use_artifact_config=use_artifact_config)
target_objects, _ = self._load(targets, (),
selection=selection,
except_targets=except_targets,
fetch_subprojects=False,
use_artifact_config=use_artifact_config,
load_refs=load_refs)
profile_end(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
return elements
return target_objects
# shell()
#
......@@ -493,25 +495,11 @@ class Stream():
# logsdir (list): A list of CasBasedDirectory objects containing artifact logs
#
def artifact_log(self, targets):
# Distinguish the artifacts from the elements
elements, artifacts = self._classify_artifacts(targets)
# Obtain Element objects
if elements:
elements = self.load_selection(elements, selection=PipelineSelection.NONE)
# Obtain ArtifactElement objects
artifact_elements = []
if artifacts:
for ref in artifacts:
artifact_element = self._project.create_artifact_element(ref)
artifact_elements.append(artifact_element)
# Concatenate the lists
objects = elements + artifact_elements
# Returns list of Element and/or ArtifactElement objects
target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
logsdirs = []
for obj in objects:
for obj in target_objects:
ref = obj.get_artifact_name()
if not obj._cached():
self._message(MessageType.WARN, "{} is not cached".format(ref))
......@@ -952,25 +940,35 @@ class Stream():
use_artifact_config=False,
artifact_remote_url=None,
fetch_subprojects=False,
dynamic_plan=False):
dynamic_plan=False,
load_refs=False):
# Classify element and artifact strings
target_elements, target_artifacts = self._classify_artifacts(targets)
if target_artifacts and not load_refs:
detail = ''.join(target_artifacts)
raise ArtifactError("Cannot perform this operation with artifact refs:", detail=detail)
# Load rewritable if we have any tracking selection to make
rewritable = False
if track_targets:
rewritable = True
# Load all targets
# Load all target elements
elements, except_elements, track_elements, track_except_elements = \
self._pipeline.load([targets, except_targets, track_targets, track_except_targets],
self._pipeline.load([target_elements, except_targets, track_targets, track_except_targets],
rewritable=rewritable,
fetch_subprojects=fetch_subprojects)
artifacts = self._load_refs(target_artifacts)
# Optionally filter out junction elements
if ignore_junction_targets:
elements = [e for e in elements if e.get_kind() != 'junction']
# Hold on to the targets
self.targets = elements
self.targets = elements + artifacts
# Here we should raise an error if the track_elements targets
# are not dependencies of the primary targets, this is not
......@@ -1027,9 +1025,9 @@ class Stream():
# Now move on to loading primary selection.
#
self._pipeline.resolve_elements(elements)
selected = self._pipeline.get_selection(elements, selection, silent=False)
selected = self._pipeline.except_elements(elements,
self._pipeline.resolve_elements(self.targets)
selected = self._pipeline.get_selection(self.targets, selection, silent=False)
selected = self._pipeline.except_elements(self.targets,
selected,
except_elements)
......@@ -1053,6 +1051,20 @@ class Stream():
return selected, track_selected
# _load_refs()
#
# Create and resolve ArtifactElement objects
#
def _load_refs(self, refs):
artifact_elements = []
for ref in refs:
artifact_element = self._project.create_artifact_element(ref)
artifact_elements.append(artifact_element)
self._pipeline.resolve_elements(artifact_elements)
return artifact_elements
# _message()
#
# Local message propagator
......@@ -1389,6 +1401,15 @@ class Stream():
if any(c in "*?[" for c in target):
artifact_globs.append(target)
else:
try:
ref = target.split('/', 2)
key = ref[2]
except IndexError:
element_targets.append(target)
continue
if not len(key) == 64:
element_targets.append(target)
continue
artifact_refs.append(target)
if element_globs:
......
......@@ -1150,7 +1150,7 @@ class Element(Plugin):
e.name for e in self.dependencies(Scope.BUILD, recurse=False)
]
self.__weak_cache_key = self.__calculate_cache_key(dependencies)
self.__weak_cache_key = self._calculate_cache_key(dependencies)
if self.__weak_cache_key is None:
# Weak cache key could not be calculated yet
......@@ -1179,8 +1179,7 @@ class Element(Plugin):
dependencies = [
e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
]
self.__strict_cache_key = self.__calculate_cache_key(dependencies)
self.__strict_cache_key = self._calculate_cache_key(dependencies)
if self.__strict_cache_key is None:
# Strict cache key could not be calculated yet
return
......@@ -1222,7 +1221,7 @@ class Element(Plugin):
dependencies = [
e._get_cache_key() for e in self.dependencies(Scope.BUILD)
]
self.__cache_key = self.__calculate_cache_key(dependencies)
self.__cache_key = self._calculate_cache_key(dependencies)
if self.__cache_key is None:
# Strong cache key could not be calculated yet
......@@ -2098,7 +2097,7 @@ class Element(Plugin):
source_consistency = source._get_consistency()
self.__consistency = min(self.__consistency, source_consistency)
# __calculate_cache_key():
# _calculate_cache_key():
#
# Calculates the cache key
#
......@@ -2107,7 +2106,7 @@ class Element(Plugin):
#
# None is returned if information for the cache key is missing.
#
def __calculate_cache_key(self, dependencies):
def _calculate_cache_key(self, dependencies):
# No cache keys for dependencies which have no cache keys
if None in dependencies:
return None
......@@ -2329,6 +2328,8 @@ class Element(Plugin):
defaults['public'] = element_public
def __init_defaults(self, plugin_conf):
if plugin_conf is None:
return
# Defaults are loaded once per class and then reused
#
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment