Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (7)
...@@ -29,6 +29,7 @@ from .. import _yaml ...@@ -29,6 +29,7 @@ from .. import _yaml
from ..element import Element from ..element import Element
from .._profile import Topics, profile_start, profile_end from .._profile import Topics, profile_start, profile_end
from .._includes import Includes from .._includes import Includes
from .._yamlcache import YamlCache
from .types import Symbol, Dependency from .types import Symbol, Dependency
from .loadelement import LoadElement from .loadelement import LoadElement
...@@ -108,13 +109,19 @@ class Loader(): ...@@ -108,13 +109,19 @@ class Loader():
# #
deps = [] deps = []
for target in targets: # XXX This will need to be changed to the context's top-level project if this method
profile_start(Topics.LOAD_PROJECT, target) # is ever used for subprojects
junction, name, loader = self._parse_name(target, rewritable, ticker, top_dir = self.project.directory
fetch_subprojects=fetch_subprojects)
loader._load_file(name, rewritable, ticker, fetch_subprojects) cache_file = YamlCache.get_cache_file(top_dir)
deps.append(Dependency(name, junction=junction)) with YamlCache.open(self._context, cache_file) as yaml_cache:
profile_end(Topics.LOAD_PROJECT, target) for target in targets:
profile_start(Topics.LOAD_PROJECT, target)
junction, name, loader = self._parse_name(target, rewritable, ticker,
fetch_subprojects=fetch_subprojects)
loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
deps.append(Dependency(name, junction=junction))
profile_end(Topics.LOAD_PROJECT, target)
# #
# Now that we've resolve the dependencies, scan them for circular dependencies # Now that we've resolve the dependencies, scan them for circular dependencies
...@@ -201,11 +208,12 @@ class Loader(): ...@@ -201,11 +208,12 @@ class Loader():
# rewritable (bool): Whether we should load in round trippable mode # rewritable (bool): Whether we should load in round trippable mode
# ticker (callable): A callback to report loaded filenames to the frontend # ticker (callable): A callback to report loaded filenames to the frontend
# fetch_subprojects (bool): Whether to fetch subprojects while loading # fetch_subprojects (bool): Whether to fetch subprojects while loading
# yaml_cache (YamlCache): A yaml cache
# #
# Returns: # Returns:
# (LoadElement): A loaded LoadElement # (LoadElement): A loaded LoadElement
# #
def _load_file(self, filename, rewritable, ticker, fetch_subprojects): def _load_file(self, filename, rewritable, ticker, fetch_subprojects, yaml_cache=None):
# Silently ignore already loaded files # Silently ignore already loaded files
if filename in self._elements: if filename in self._elements:
...@@ -218,7 +226,8 @@ class Loader(): ...@@ -218,7 +226,8 @@ class Loader():
# Load the data and process any conditional statements therein # Load the data and process any conditional statements therein
fullpath = os.path.join(self._basedir, filename) fullpath = os.path.join(self._basedir, filename)
try: try:
node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable, project=self.project) node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable,
project=self.project, yaml_cache=yaml_cache)
except LoadError as e: except LoadError as e:
if e.reason == LoadErrorReason.MISSING_FILE: if e.reason == LoadErrorReason.MISSING_FILE:
# If we can't find the file, try to suggest plausible # If we can't find the file, try to suggest plausible
...@@ -261,13 +270,13 @@ class Loader(): ...@@ -261,13 +270,13 @@ class Loader():
# Load all dependency files for the new LoadElement # Load all dependency files for the new LoadElement
for dep in element.deps: for dep in element.deps:
if dep.junction: if dep.junction:
self._load_file(dep.junction, rewritable, ticker, fetch_subprojects) self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker, loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
fetch_subprojects=fetch_subprojects) fetch_subprojects=fetch_subprojects)
else: else:
loader = self loader = self
dep_element = loader._load_file(dep.name, rewritable, ticker, fetch_subprojects) dep_element = loader._load_file(dep.name, rewritable, ticker, fetch_subprojects, yaml_cache)
if _yaml.node_get(dep_element.node, str, Symbol.KIND) == 'junction': if _yaml.node_get(dep_element.node, str, Symbol.KIND) == 'junction':
raise LoadError(LoadErrorReason.INVALID_DATA, raise LoadError(LoadErrorReason.INVALID_DATA,
......
...@@ -183,20 +183,32 @@ class CompositeTypeError(CompositeError): ...@@ -183,20 +183,32 @@ class CompositeTypeError(CompositeError):
# shortname (str): The filename in shorthand for error reporting (or None) # shortname (str): The filename in shorthand for error reporting (or None)
# copy_tree (bool): Whether to make a copy, preserving the original toplevels # copy_tree (bool): Whether to make a copy, preserving the original toplevels
# for later serialization # for later serialization
# yaml_cache (YamlCache): A yaml cache to consult rather than parsing
# #
# Returns (dict): A loaded copy of the YAML file with provenance information # Returns (dict): A loaded copy of the YAML file with provenance information
# #
# Raises: LoadError # Raises: LoadError
# #
def load(filename, shortname=None, copy_tree=False, *, project=None): def load(filename, shortname=None, copy_tree=False, *, project=None, yaml_cache=None):
if not shortname: if not shortname:
shortname = filename shortname = filename
file = ProvenanceFile(filename, shortname, project) file = ProvenanceFile(filename, shortname, project)
try: try:
data = None
with open(filename) as f: with open(filename) as f:
return load_data(f, file, copy_tree=copy_tree) contents = f.read()
if yaml_cache:
data, key = yaml_cache.get(project, filename, contents, copy_tree)
if not data:
data = load_data(contents, file, copy_tree=copy_tree)
if yaml_cache:
yaml_cache.put_from_key(project, filename, key, data)
return data
except FileNotFoundError as e: except FileNotFoundError as e:
raise LoadError(LoadErrorReason.MISSING_FILE, raise LoadError(LoadErrorReason.MISSING_FILE,
"Could not find file at {}".format(filename)) from e "Could not find file at {}".format(filename)) from e
......
#
# Copyright 2018 Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Jonathan Maw <jonathan.maw@codethink.co.uk>
import os
import pickle
import hashlib
import io
import sys
from contextlib import contextmanager
from collections import namedtuple
from ._cachekey import generate_key
from ._context import Context
from . import utils, _yaml
YAML_CACHE_FILENAME = "yaml_cache.pickle"
# YamlCache()
#
# A cache that wraps around the loading of yaml in projects.
#
# The recommended way to use a YamlCache is:
# with YamlCache.open(context) as yamlcache:
# # Load all the yaml
# ...
#
# Args:
# context (Context): The invocation Context
#
class YamlCache():
def __init__(self, context):
self._project_caches = {}
self._context = context
##################
# Public Methods #
##################
# is_cached():
#
# Checks whether a file is cached.
#
# Args:
# project (Project): The project this file is in.
# filepath (str): The path to the file, *relative to the project's directory*.
#
# Returns:
# (bool): Whether the file is cached.
def is_cached(self, project, filepath):
cache_path = self._get_filepath(project, filepath)
project_name = project.name if project else ""
try:
project_cache = self._project_caches[project_name]
if cache_path in project_cache.elements:
return True
except KeyError:
pass
return False
# open():
#
# Return an instance of the YamlCache which writes to disk when it leaves scope.
#
# Args:
# context (Context): The context.
# cachefile (str): The path to the cache file.
#
# Returns:
# (YamlCache): A YamlCache.
@staticmethod
@contextmanager
def open(context, cachefile):
# Try to load from disk first
cache = None
if os.path.exists(cachefile):
try:
with open(cachefile, "rb") as f:
cache = BstUnpickler(f, context).load()
except EOFError:
# The file was empty
pass
except pickle.UnpicklingError as e:
sys.stderr.write("Failed to load YamlCache, {}\n".format(e))
# Failed to load from disk, create a new one
if not cache:
cache = YamlCache(context)
yield cache
cache._write(cachefile)
# get_cache_file():
#
# Retrieves a path to the yaml cache file.
#
# Returns:
# (str): The path to the cache file
@staticmethod
def get_cache_file(top_dir):
return os.path.join(top_dir, ".bst", YAML_CACHE_FILENAME)
# get():
#
# Gets a parsed file from the cache.
#
# Args:
# project (Project) or None: The project this file is in, if it exists.
# filepath (str): The absolute path to the file.
# contents (str): The contents of the file to be cached
# copy_tree (bool): Whether the data should make a copy when it's being generated
# (i.e. exactly as when called in yaml)
#
# Returns:
# (decorated dict): The parsed yaml from the cache, or None if the file isn't in the cache.
# (str): The key used to look up the parsed yaml in the cache
def get(self, project, filepath, contents, copy_tree):
key = self._calculate_key(contents, copy_tree)
data = self._get(project, filepath, key)
return data, key
# put():
#
# Puts a parsed file into the cache.
#
# Args:
# project (Project): The project this file is in.
# filepath (str): The path to the file.
# contents (str): The contents of the file that has been cached
# copy_tree (bool): Whether the data should make a copy when it's being generated
# (i.e. exactly as when called in yaml)
# value (decorated dict): The data to put into the cache.
def put(self, project, filepath, contents, copy_tree, value):
key = self._calculate_key(contents, copy_tree)
self.put_from_key(project, filepath, key, value)
# put_from_key():
#
# Put a parsed file into the cache when given a key.
#
# Args:
# project (Project): The project this file is in.
# filepath (str): The path to the file.
# key (str): The key to the file within the cache. Typically, this is the
# value of `calculate_key()` with the file's unparsed contents
# and any relevant metadata passed in.
# value (decorated dict): The data to put into the cache.
def put_from_key(self, project, filepath, key, value):
cache_path = self._get_filepath(project, filepath)
project_name = project.name if project else ""
try:
project_cache = self._project_caches[project_name]
except KeyError:
project_cache = self._project_caches[project_name] = CachedProject({})
project_cache.elements[cache_path] = CachedYaml(key, value)
###################
# Private Methods #
###################
# Writes the yaml cache to the specified path.
#
# Args:
# path (str): The path to the cache file.
def _write(self, path):
parent_dir = os.path.dirname(path)
os.makedirs(parent_dir, exist_ok=True)
with open(path, "wb") as f:
BstPickler(f).dump(self)
# _get_filepath():
#
# Returns a file path relative to a project if passed, or the original path if
# the project is None
#
# Args:
# project (Project) or None: The project the filepath exists within
# full_path (str): The path that the returned path is based on
#
# Returns:
# (str): The path to the file, relative to a project if it exists
def _get_filepath(self, project, full_path):
if project:
assert full_path.startswith(project.directory)
filepath = os.path.relpath(full_path, project.directory)
else:
filepath = full_path
return full_path
# _calculate_key():
#
# Calculates a key for putting into the cache.
#
# Args:
# (basic object)... : Any number of strictly-ordered basic objects
#
# Returns:
# (str): A key made out of every arg passed in
@staticmethod
def _calculate_key(*args):
string = pickle.dumps(args)
return hashlib.sha1(string).hexdigest()
# _get():
#
# Gets a parsed file from the cache when given a key.
#
# Args:
# project (Project): The project this file is in.
# filepath (str): The path to the file.
# key (str): The key to the file within the cache. Typically, this is the
# value of `calculate_key()` with the file's unparsed contents
# and any relevant metadata passed in.
#
# Returns:
# (decorated dict): The parsed yaml from the cache, or None if the file isn't in the cache.
def _get(self, project, filepath, key):
cache_path = self._get_filepath(project, filepath)
project_name = project.name if project else ""
try:
project_cache = self._project_caches[project_name]
try:
cachedyaml = project_cache.elements[cache_path]
if cachedyaml._key == key:
# We've unpickled the YamlCache, but not the specific file
if cachedyaml._contents is None:
cachedyaml._contents = BstUnpickler.loads(cachedyaml._pickled_contents, self._context)
return cachedyaml._contents
except KeyError:
pass
except KeyError:
pass
return None
CachedProject = namedtuple('CachedProject', ['elements'])
class CachedYaml():
def __init__(self, key, contents):
self._key = key
self.set_contents(contents)
# Sets the contents of the CachedYaml.
#
# Args:
# contents (provenanced dict): The contents to put in the cache.
#
def set_contents(self, contents):
self._contents = contents
self._pickled_contents = BstPickler.dumps(contents)
# Pickling helper method, prevents 'contents' from being serialised
def __getstate__(self):
data = self.__dict__.copy()
data['_contents'] = None
return data
# In _yaml.load, we have a ProvenanceFile that stores the project the file
# came from. Projects can't be pickled, but it's always going to be the same
# project between invocations (unless the entire project is moved but the
# file stayed in the same place)
class BstPickler(pickle.Pickler):
def persistent_id(self, obj):
if isinstance(obj, _yaml.ProvenanceFile):
if obj.project:
# ProvenanceFile's project object cannot be stored as it is.
project_tag = obj.project.name
# ProvenanceFile's filename must be stored relative to the
# project, as the project dir may move.
name = os.path.relpath(obj.name, obj.project.directory)
else:
project_tag = None
name = obj.name
return ("ProvenanceFile", name, obj.shortname, project_tag)
elif isinstance(obj, Context):
return ("Context",)
else:
return None
@staticmethod
def dumps(obj):
stream = io.BytesIO()
BstPickler(stream).dump(obj)
stream.seek(0)
return stream.read()
class BstUnpickler(pickle.Unpickler):
def __init__(self, file, context):
super().__init__(file)
self._context = context
def persistent_load(self, pid):
if pid[0] == "ProvenanceFile":
_, tagged_name, shortname, project_tag = pid
if project_tag is not None:
for p in self._context.get_projects():
if project_tag == p.name:
project = p
break
name = os.path.join(project.directory, tagged_name)
if not project:
projects = [p.name for p in self._context.get_projects()]
raise pickle.UnpicklingError("No project with name {} found in {}"
.format(key_id, projects))
else:
project = None
name = tagged_name
return _yaml.ProvenanceFile(name, shortname, project)
elif pid[0] == "Context":
return self._context
else:
raise pickle.UnpicklingError("Unsupported persistent object, {}".format(pid))
@staticmethod
def loads(text, context):
stream = io.BytesIO()
stream.write(bytes(text))
stream.seek(0)
return BstUnpickler(stream, context).load()
import os
import pytest
import hashlib
import tempfile
from ruamel import yaml
from tests.testutils import cli, generate_junction, create_element_size, create_repo
from buildstream import _yaml
from buildstream._yamlcache import YamlCache
from buildstream._project import Project
from buildstream._context import Context
from contextlib import contextmanager
def generate_project(tmpdir, ref_storage, with_junction, name="test"):
if with_junction == 'junction':
subproject_dir = generate_project(
tmpdir, ref_storage,
'no-junction', name='test-subproject'
)
project_dir = os.path.join(tmpdir, name)
os.makedirs(project_dir)
# project.conf
project_conf_path = os.path.join(project_dir, 'project.conf')
elements_path = 'elements'
project_conf = {
'name': name,
'element-path': elements_path,
'ref-storage': ref_storage,
}
_yaml.dump(project_conf, project_conf_path)
# elements
if with_junction == 'junction':
junction_name = 'junction.bst'
junction_dir = os.path.join(project_dir, elements_path)
junction_path = os.path.join(project_dir, elements_path, junction_name)
os.makedirs(junction_dir)
generate_junction(tmpdir, subproject_dir, junction_path)
element_depends = [{'junction': junction_name, 'filename': 'test.bst'}]
else:
element_depends = []
element_name = 'test.bst'
create_element_size(element_name, project_dir, elements_path, element_depends, 1)
return project_dir
@contextmanager
def with_yamlcache(project_dir):
context = Context()
project = Project(project_dir, context)
cache_file = YamlCache.get_cache_file(project_dir)
with YamlCache.open(context, cache_file) as yamlcache:
yield yamlcache, project
def yamlcache_key(yamlcache, in_file, copy_tree=False):
with open(in_file) as f:
key = yamlcache._calculate_key(f.read(), copy_tree)
return key
def modified_file(input_file, tmpdir):
with open(input_file) as f:
data = f.read()
assert 'variables' not in data
data += '\nvariables: {modified: True}\n'
_, temppath = tempfile.mkstemp(dir=tmpdir, text=True)
with open(temppath, 'w') as f:
f.write(data)
return temppath
@pytest.mark.parametrize('ref_storage', ['inline', 'project.refs'])
@pytest.mark.parametrize('with_junction', ['no-junction', 'junction'])
@pytest.mark.parametrize('move_project', ['move', 'no-move'])
def test_yamlcache_used(cli, tmpdir, ref_storage, with_junction, move_project):
# Generate the project
project = generate_project(str(tmpdir), ref_storage, with_junction)
if with_junction == 'junction':
result = cli.run(project=project, args=['fetch', '--track', 'junction.bst'])
result.assert_success()
# bst show to put it in the cache
result = cli.run(project=project, args=['show', 'test.bst'])
result.assert_success()
element_path = os.path.join(project, 'elements', 'test.bst')
with with_yamlcache(project) as (yc, prj):
# Check that it's in the cache
assert yc.is_cached(prj, element_path)
# *Absolutely* horrible cache corruption to check it's being used
# Modifying the data from the cache is fraught with danger,
# so instead I'll load a modified version of the original file
temppath = modified_file(element_path, str(tmpdir))
contents = _yaml.load(temppath, copy_tree=False, project=prj)
key = yamlcache_key(yc, element_path)
yc.put_from_key(prj, element_path, key, contents)
# Show that a variable has been added
result = cli.run(project=project, args=['show', '--format', '%{vars}', 'test.bst'])
result.assert_success()
data = yaml.safe_load(result.output)
assert 'modified' in data
assert data['modified'] == 'True'
@pytest.mark.parametrize('ref_storage', ['inline', 'project.refs'])
@pytest.mark.parametrize('with_junction', ['junction', 'no-junction'])
def test_yamlcache_changed_file(cli, tmpdir, ref_storage, with_junction):
# i.e. a file is cached, the file is changed, loading the file (with cache) returns new data
# inline and junction can only be changed by opening a workspace
# Generate the project
project = generate_project(str(tmpdir), ref_storage, with_junction)
if with_junction == 'junction':
result = cli.run(project=project, args=['fetch', '--track', 'junction.bst'])
result.assert_success()
# bst show to put it in the cache
result = cli.run(project=project, args=['show', 'test.bst'])
result.assert_success()
element_path = os.path.join(project, 'elements', 'test.bst')
with with_yamlcache(project) as (yc, prj):
# Check that it's in the cache then modify
assert yc.is_cached(prj, element_path)
with open(element_path, "a") as f:
f.write('\nvariables: {modified: True}\n')
# Load modified yaml cache file into cache
_yaml.load(element_path, copy_tree=False, project=prj, yaml_cache=yc)
# Show that a variable has been added
result = cli.run(project=project, args=['show', '--format', '%{vars}', 'test.bst'])
result.assert_success()
data = yaml.safe_load(result.output)
assert 'modified' in data
assert data['modified'] == 'True'
import os import os
import pytest import pytest
import tempfile
from collections import Mapping from collections import Mapping
from buildstream import _yaml from buildstream import _yaml
from buildstream._exceptions import LoadError, LoadErrorReason from buildstream._exceptions import LoadError, LoadErrorReason
from buildstream._context import Context
from buildstream._yamlcache import YamlCache
DATA_DIR = os.path.join( DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)), os.path.dirname(os.path.realpath(__file__)),
...@@ -150,6 +153,21 @@ def test_composite_preserve_originals(datafiles): ...@@ -150,6 +153,21 @@ def test_composite_preserve_originals(datafiles):
assert(_yaml.node_get(orig_extra, str, 'old') == 'new') assert(_yaml.node_get(orig_extra, str, 'old') == 'new')
def load_yaml_file(filename, *, cache_path, shortname=None, from_cache='raw'):
_, temppath = tempfile.mkstemp(dir=os.path.join(cache_path.dirname, cache_path.basename), text=True)
context = Context()
with YamlCache.open(context, temppath) as yc:
if from_cache == 'raw':
return _yaml.load(filename, shortname)
elif from_cache == 'cached':
_yaml.load(filename, shortname, yaml_cache=yc)
return _yaml.load(filename, shortname, yaml_cache=yc)
else:
assert False
# Tests for list composition # Tests for list composition
# #
# Each test composits a filename on top of basics.yaml, and tests # Each test composits a filename on top of basics.yaml, and tests
...@@ -165,6 +183,7 @@ def test_composite_preserve_originals(datafiles): ...@@ -165,6 +183,7 @@ def test_composite_preserve_originals(datafiles):
# prov_col: The expected provenance column of "mood" # prov_col: The expected provenance column of "mood"
# #
@pytest.mark.datafiles(os.path.join(DATA_DIR)) @pytest.mark.datafiles(os.path.join(DATA_DIR))
@pytest.mark.parametrize('caching', [('raw'), ('cached')])
@pytest.mark.parametrize("filename,index,length,mood,prov_file,prov_line,prov_col", [ @pytest.mark.parametrize("filename,index,length,mood,prov_file,prov_line,prov_col", [
# Test results of compositing with the (<) prepend directive # Test results of compositing with the (<) prepend directive
...@@ -195,14 +214,15 @@ def test_composite_preserve_originals(datafiles): ...@@ -195,14 +214,15 @@ def test_composite_preserve_originals(datafiles):
('implicitoverwrite.yaml', 0, 2, 'overwrite1', 'implicitoverwrite.yaml', 4, 8), ('implicitoverwrite.yaml', 0, 2, 'overwrite1', 'implicitoverwrite.yaml', 4, 8),
('implicitoverwrite.yaml', 1, 2, 'overwrite2', 'implicitoverwrite.yaml', 6, 8), ('implicitoverwrite.yaml', 1, 2, 'overwrite2', 'implicitoverwrite.yaml', 6, 8),
]) ])
def test_list_composition(datafiles, filename, def test_list_composition(datafiles, filename, tmpdir,
index, length, mood, index, length, mood,
prov_file, prov_line, prov_col): prov_file, prov_line, prov_col, caching):
base = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml') base_file = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
overlay = os.path.join(datafiles.dirname, datafiles.basename, filename) overlay_file = os.path.join(datafiles.dirname, datafiles.basename, filename)
base = load_yaml_file(base_file, cache_path=tmpdir, shortname='basics.yaml', from_cache=caching)
overlay = load_yaml_file(overlay_file, cache_path=tmpdir, shortname=filename, from_cache=caching)
base = _yaml.load(base, shortname='basics.yaml')
overlay = _yaml.load(overlay, shortname=filename)
_yaml.composite_dict(base, overlay) _yaml.composite_dict(base, overlay)
children = _yaml.node_get(base, list, 'children') children = _yaml.node_get(base, list, 'children')
...@@ -254,6 +274,7 @@ def test_list_deletion(datafiles): ...@@ -254,6 +274,7 @@ def test_list_deletion(datafiles):
# prov_col: The expected provenance column of "mood" # prov_col: The expected provenance column of "mood"
# #
@pytest.mark.datafiles(os.path.join(DATA_DIR)) @pytest.mark.datafiles(os.path.join(DATA_DIR))
@pytest.mark.parametrize('caching', [('raw'), ('cached')])
@pytest.mark.parametrize("filename1,filename2,index,length,mood,prov_file,prov_line,prov_col", [ @pytest.mark.parametrize("filename1,filename2,index,length,mood,prov_file,prov_line,prov_col", [
# Test results of compositing literal list with (>) and then (<) # Test results of compositing literal list with (>) and then (<)
...@@ -310,9 +331,9 @@ def test_list_deletion(datafiles): ...@@ -310,9 +331,9 @@ def test_list_deletion(datafiles):
('listoverwrite.yaml', 'listprepend.yaml', 2, 4, 'overwrite1', 'listoverwrite.yaml', 5, 10), ('listoverwrite.yaml', 'listprepend.yaml', 2, 4, 'overwrite1', 'listoverwrite.yaml', 5, 10),
('listoverwrite.yaml', 'listprepend.yaml', 3, 4, 'overwrite2', 'listoverwrite.yaml', 7, 10), ('listoverwrite.yaml', 'listprepend.yaml', 3, 4, 'overwrite2', 'listoverwrite.yaml', 7, 10),
]) ])
def test_list_composition_twice(datafiles, filename1, filename2, def test_list_composition_twice(datafiles, tmpdir, filename1, filename2,
index, length, mood, index, length, mood,
prov_file, prov_line, prov_col): prov_file, prov_line, prov_col, caching):
file_base = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml') file_base = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
file1 = os.path.join(datafiles.dirname, datafiles.basename, filename1) file1 = os.path.join(datafiles.dirname, datafiles.basename, filename1)
file2 = os.path.join(datafiles.dirname, datafiles.basename, filename2) file2 = os.path.join(datafiles.dirname, datafiles.basename, filename2)
...@@ -320,9 +341,9 @@ def test_list_composition_twice(datafiles, filename1, filename2, ...@@ -320,9 +341,9 @@ def test_list_composition_twice(datafiles, filename1, filename2,
##################### #####################
# Round 1 - Fight ! # Round 1 - Fight !
##################### #####################
base = _yaml.load(file_base, shortname='basics.yaml') base = load_yaml_file(file_base, cache_path=tmpdir, shortname='basics.yaml', from_cache=caching)
overlay1 = _yaml.load(file1, shortname=filename1) overlay1 = load_yaml_file(file1, cache_path=tmpdir, shortname=filename1, from_cache=caching)
overlay2 = _yaml.load(file2, shortname=filename2) overlay2 = load_yaml_file(file2, cache_path=tmpdir, shortname=filename2, from_cache=caching)
_yaml.composite_dict(base, overlay1) _yaml.composite_dict(base, overlay1)
_yaml.composite_dict(base, overlay2) _yaml.composite_dict(base, overlay2)
...@@ -337,9 +358,9 @@ def test_list_composition_twice(datafiles, filename1, filename2, ...@@ -337,9 +358,9 @@ def test_list_composition_twice(datafiles, filename1, filename2,
##################### #####################
# Round 2 - Fight ! # Round 2 - Fight !
##################### #####################
base = _yaml.load(file_base, shortname='basics.yaml') base = load_yaml_file(file_base, cache_path=tmpdir, shortname='basics.yaml', from_cache=caching)
overlay1 = _yaml.load(file1, shortname=filename1) overlay1 = load_yaml_file(file1, cache_path=tmpdir, shortname=filename1, from_cache=caching)
overlay2 = _yaml.load(file2, shortname=filename2) overlay2 = load_yaml_file(file2, cache_path=tmpdir, shortname=filename2, from_cache=caching)
_yaml.composite_dict(overlay1, overlay2) _yaml.composite_dict(overlay1, overlay2)
_yaml.composite_dict(base, overlay1) _yaml.composite_dict(base, overlay1)
......