Commit 3816dcf8 authored by Daniel Silverstone's avatar Daniel Silverstone

The new YAML World Order

Replace YAML internals with a new Node type, and refactor everything
to use it cleanly.

This work was also by James Ennis <james.ennis@codethink.co.uk>
Signed-off-by: Daniel Silverstone's avatarDaniel Silverstone <daniel.silverstone@codethink.co.uk>
parent 1e698622
......@@ -279,7 +279,12 @@ class Artifact():
return build_result
data = _yaml.load(meta_file, shortname='meta/build-result.yaml')
build_result = (data["success"], data.get("description"), data.get("detail"))
success = _yaml.node_get(data, bool, 'success')
description = _yaml.node_get(data, str, 'description', default_value=None)
detail = _yaml.node_get(data, str, 'detail', default_value=None)
build_result = (success, description, detail)
return build_result
......@@ -310,13 +315,13 @@ class Artifact():
# Parse the expensive yaml now and cache the result
meta_file = artifact_vdir._objpath('meta', 'keys.yaml')
meta = _yaml.load(meta_file, shortname='meta/keys.yaml')
strong_key = meta['strong']
weak_key = meta['weak']
strong_key = _yaml.node_get(meta, str, 'strong')
weak_key = _yaml.node_get(meta, str, 'weak')
assert key in (strong_key, weak_key)
metadata_keys[strong_key] = meta
metadata_keys[weak_key] = meta
metadata_keys[strong_key] = _yaml.node_sanitize(meta)
metadata_keys[weak_key] = _yaml.node_sanitize(meta)
return (strong_key, weak_key, metadata_keys)
......@@ -351,8 +356,8 @@ class Artifact():
# Cache it under both strong and weak keys
strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
metadata_dependencies[strong_key] = meta
metadata_dependencies[weak_key] = meta
metadata_dependencies[strong_key] = _yaml.node_sanitize(meta)
metadata_dependencies[weak_key] = _yaml.node_sanitize(meta)
return (meta, metadata_dependencies, metadata_keys)
......@@ -385,7 +390,7 @@ class Artifact():
# Parse the expensive yaml now and cache the result
meta_file = artifact_vdir._objpath('meta', 'workspaced.yaml')
meta = _yaml.load(meta_file, shortname='meta/workspaced.yaml')
workspaced = meta['workspaced']
workspaced = _yaml.node_get(meta, bool, 'workspaced')
# Cache it under both strong and weak keys
strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
......@@ -424,7 +429,7 @@ class Artifact():
# Parse the expensive yaml now and cache the result
meta_file = artifact_vdir._objpath('meta', 'workspaced-dependencies.yaml')
meta = _yaml.load(meta_file, shortname='meta/workspaced-dependencies.yaml')
workspaced = meta['workspaced-dependencies']
workspaced = _yaml.node_sanitize(_yaml.node_get(meta, list, 'workspaced-dependencies'))
# Cache it under both strong and weak keys
strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
......
......@@ -84,7 +84,7 @@ def verify_artifact_ref(ref):
try:
project, element, key = ref.split('/', 2) # This will raise a Value error if unable to split
# Explicitly raise a ValueError if the key lenght is not as expected
if len(key) != len(_cachekey.generate_key({})):
if len(key) != len(_cachekey.generate_key(_yaml.new_empty_node())):
raise ValueError
except ValueError:
raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
......
......@@ -16,13 +16,13 @@
# Authors:
# Raoul Hidalgo Charman <raoul.hidalgocharman@codethink.co.uk>
#
from collections.abc import Mapping
import multiprocessing
from . import utils
from . import _yaml
from ._cas import CASRemote
from ._message import Message, MessageType
from ._exceptions import LoadError
# Base Cache for Caches to derive from
......@@ -70,11 +70,12 @@ class BaseCache():
def specs_from_config_node(cls, config_node, basedir=None):
cache_specs = []
artifacts = config_node.get(cls.config_node_name, [])
if isinstance(artifacts, Mapping):
# pylint: disable=not-callable
cache_specs.append(cls.spec_class._new_from_config_node(artifacts, basedir))
elif isinstance(artifacts, list):
try:
artifacts = [_yaml.node_get(config_node, dict, cls.config_node_name)]
except LoadError:
artifacts = _yaml.node_get(config_node, list, cls.config_node_name, default_value=[])
if isinstance(artifacts, list):
for spec_node in artifacts:
cache_specs.append(cls.spec_class._new_from_config_node(spec_node, basedir))
else:
......
......@@ -157,7 +157,7 @@ class Context():
self._artifactcache = None
self._sourcecache = None
self._projects = []
self._project_overrides = {}
self._project_overrides = _yaml.new_empty_node()
self._workspaces = None
self._workspace_project_cache = WorkspaceProjectCache()
self._log_handle = None
......@@ -203,11 +203,11 @@ class Context():
_yaml.composite(defaults, user_config)
# Give obsoletion warnings
if defaults.get('builddir'):
if _yaml.node_contains(defaults, 'builddir'):
raise LoadError(LoadErrorReason.INVALID_DATA,
"builddir is obsolete, use cachedir")
if defaults.get('artifactdir'):
if _yaml.node_contains(defaults, 'artifactdir'):
raise LoadError(LoadErrorReason.INVALID_DATA,
"artifactdir is obsolete")
......@@ -306,7 +306,7 @@ class Context():
self.sched_network_retries = _yaml.node_get(scheduler, int, 'network-retries')
# Load per-projects overrides
self._project_overrides = _yaml.node_get(defaults, Mapping, 'projects', default_value={})
self._project_overrides = _yaml.node_get(defaults, dict, 'projects', default_value={})
# Shallow validation of overrides, parts of buildstream which rely
# on the overrides are expected to validate elsewhere.
......@@ -441,7 +441,7 @@ class Context():
if self._cache_key is None:
# Anything that alters the build goes into the unique key
self._cache_key = _cachekey.generate_key({})
self._cache_key = _cachekey.generate_key(_yaml.new_empty_node())
return self._cache_key
......
......@@ -100,7 +100,7 @@ def complete_target(args, incomplete):
return []
# The project is not required to have an element-path
element_directory = project.get('element-path')
element_directory = _yaml.node_get(project, str, 'element-path', default_value='')
# If a project was loaded, use its element-path to
# adjust our completion's base directory
......
......@@ -422,7 +422,7 @@ class _GitSourceBase(Source):
self.mark_download_url(url, primary=False)
self.submodule_overrides[path] = url
if 'checkout' in submodule:
if self.node_has_member(submodule, 'checkout'):
checkout = self.node_get_member(submodule, bool, 'checkout')
self.submodule_checkout_overrides[path] = checkout
......
import os
from collections.abc import Mapping
from . import _yaml
from ._exceptions import LoadError, LoadErrorReason
......@@ -36,17 +35,20 @@ class Includes:
if current_loader is None:
current_loader = self._loader
if isinstance(node.get('(@)'), str):
includes = [_yaml.node_get(node, str, '(@)')]
else:
includes = _yaml.node_get(node, list, '(@)', default_value=None)
includes = _yaml.node_get(node, None, '(@)', default_value=None)
if isinstance(includes, str):
includes = [includes]
if not isinstance(includes, list) and includes is not None:
provenance = _yaml.node_get_provenance(node, key='(@)')
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: {} must either be list or str".format(provenance, includes))
include_provenance = None
if '(@)' in node:
if includes:
include_provenance = _yaml.node_get_provenance(node, key='(@)')
del node['(@)']
_yaml.node_del(node, '(@)')
if includes:
for include in reversed(includes):
if only_local and ':' in include:
continue
......@@ -130,7 +132,7 @@ class Includes:
included=set(),
current_loader=None,
only_local=False):
if isinstance(value, Mapping):
if _yaml.is_node(value):
self.process(value,
included=included,
current_loader=current_loader,
......
......@@ -176,6 +176,6 @@ def _extract_depends_from_node(node, *, key=None):
output_deps.append(dependency)
# Now delete the field, we dont want it anymore
del node[key]
_yaml.node_del(node, key, safe=True)
return output_deps
......@@ -121,7 +121,7 @@ class Loader():
# Set up a dummy element that depends on all top-level targets
# to resolve potential circular dependencies between them
dummy_target = LoadElement("", "", self)
dummy_target = LoadElement(_yaml.new_empty_node(), "", self)
dummy_target.dependencies.extend(
LoadElement.Dependency(element, Symbol.RUNTIME)
for element in target_elements
......@@ -420,12 +420,12 @@ class Loader():
for i in range(len(sources)):
source = _yaml.node_get(node, Mapping, Symbol.SOURCES, indices=[i])
kind = _yaml.node_get(source, str, Symbol.KIND)
del source[Symbol.KIND]
_yaml.node_del(source, Symbol.KIND)
# Directory is optional
directory = _yaml.node_get(source, str, Symbol.DIRECTORY, default_value=None)
if directory:
del source[Symbol.DIRECTORY]
_yaml.node_del(source, Symbol.DIRECTORY)
index = sources.index(source)
meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
......
......@@ -17,6 +17,8 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
from .. import _yaml
class MetaElement():
......@@ -46,12 +48,12 @@ class MetaElement():
self.kind = kind
self.provenance = provenance
self.sources = sources
self.config = config or {}
self.variables = variables or {}
self.environment = environment or {}
self.config = config or _yaml.new_empty_node()
self.variables = variables or _yaml.new_empty_node()
self.environment = environment or _yaml.new_empty_node()
self.env_nocache = env_nocache or []
self.public = public or {}
self.sandbox = sandbox or {}
self.public = public or _yaml.new_empty_node()
self.sandbox = sandbox or _yaml.new_empty_node()
self.build_dependencies = []
self.dependencies = []
self.first_pass = first_pass
......@@ -17,8 +17,6 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
from collections.abc import Mapping
from .._exceptions import LoadError, LoadErrorReason
from .. import _yaml
......@@ -69,7 +67,7 @@ class Dependency():
self.dep_type = default_dep_type
self.junction = None
elif isinstance(dep, Mapping):
elif _yaml.is_node(dep):
if default_dep_type:
_yaml.node_validate(dep, ['filename', 'junction'])
dep_type = default_dep_type
......
......@@ -18,7 +18,6 @@
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
#
from collections.abc import Mapping
import jinja2
from .. import _yaml
......@@ -153,7 +152,7 @@ class OptionPool():
def export_variables(self, variables):
for _, option in self._options.items():
if option.variable:
variables[option.variable] = option.get_value()
_yaml.node_set(variables, option.variable, option.get_value())
# printable_variables()
#
......@@ -170,7 +169,7 @@ class OptionPool():
# process_node()
#
# Args:
# node (Mapping): A YAML Loaded dictionary
# node (node): A YAML Loaded dictionary
#
def process_node(self, node):
......@@ -187,7 +186,7 @@ class OptionPool():
# and process any indirectly nested conditionals.
#
for _, value in _yaml.node_items(node):
if isinstance(value, Mapping):
if _yaml.is_node(value):
self.process_node(value)
elif isinstance(value, list):
self._process_list(value)
......@@ -238,7 +237,7 @@ class OptionPool():
#
def _process_list(self, values):
for value in values:
if isinstance(value, Mapping):
if _yaml.is_node(value):
self.process_node(value)
elif isinstance(value, list):
self._process_list(value)
......@@ -268,7 +267,7 @@ class OptionPool():
_yaml.node_get_provenance(node, '(?)', indices=[i])
for i in range(len(conditions))
]
del node['(?)']
_yaml.node_del(node, '(?)')
for condition, p in zip(conditions, provenance):
tuples = list(_yaml.node_items(condition))
......@@ -283,7 +282,7 @@ class OptionPool():
# Prepend the provenance of the error
raise LoadError(e.reason, "{}: {}".format(p, e)) from e
if not hasattr(value, 'get'):
if not _yaml.is_node(value):
raise LoadError(LoadErrorReason.ILLEGAL_COMPOSITE,
"{}: Only values of type 'dict' can be composed.".format(p))
......
......@@ -22,6 +22,7 @@ import inspect
from ._exceptions import PluginError, LoadError, LoadErrorReason
from . import utils
from . import _yaml
# A Context for loading plugin types
......@@ -135,18 +136,21 @@ class PluginContext():
source = None
defaults = None
loaded_dependency = False
for origin in self._plugin_origins:
if kind not in origin['plugins']:
if kind not in _yaml.node_get(origin, list, 'plugins'):
continue
if origin['origin'] == 'local':
source = self._get_local_plugin_source(origin['path'])
elif origin['origin'] == 'pip':
source, defaults = self._get_pip_plugin_source(origin['package-name'], kind)
if _yaml.node_get(origin, str, 'origin') == 'local':
local_path = _yaml.node_get(origin, str, 'path')
source = self._get_local_plugin_source(local_path)
elif _yaml.node_get(origin, str, 'origin') == 'pip':
package_name = _yaml.node_get(origin, str, 'package-name')
source, defaults = self._get_pip_plugin_source(package_name, kind)
else:
raise PluginError("Failed to load plugin '{}': "
"Unexpected plugin origin '{}'"
.format(kind, origin['origin']))
.format(kind, _yaml.node_get(origin, str, 'origin')))
loaded_dependency = True
break
......
......@@ -201,7 +201,7 @@ class Project():
if url and utils._ALIAS_SEPARATOR in url:
url_alias, url_body = url.split(utils._ALIAS_SEPARATOR, 1)
alias_url = config._aliases.get(url_alias)
alias_url = _yaml.node_get(config._aliases, str, url_alias, default_value=None)
if alias_url:
url = alias_url + url_body
......@@ -231,7 +231,7 @@ class Project():
# Anything that alters the build goes into the unique key
# (currently nothing here)
self._cache_key = _cachekey.generate_key({})
self._cache_key = _cachekey.generate_key(_yaml.new_empty_node())
return self._cache_key
......@@ -406,7 +406,7 @@ class Project():
else:
config = self.config
return config._aliases.get(alias)
return _yaml.node_get(config._aliases, str, alias, default_value=None)
# get_alias_uris()
#
......@@ -421,7 +421,7 @@ class Project():
else:
config = self.config
if not alias or alias not in config._aliases:
if not alias or not _yaml.node_contains(config._aliases, alias):
return [None]
mirror_list = []
......@@ -431,7 +431,7 @@ class Project():
mirror_list = alias_mapping[alias] + mirror_list
else:
mirror_list += alias_mapping[alias]
mirror_list.append(config._aliases[alias])
mirror_list.append(_yaml.node_get(config._aliases, str, alias))
return mirror_list
# load_elements()
......@@ -589,20 +589,9 @@ class Project():
self._validate_node(pre_config_node)
# FIXME:
#
# Performing this check manually in the absense
# of proper support from _yaml.node_get(), this should
# be removed in favor of a proper accessor function
# from the _yaml module when #591 is fixed.
#
if self._project_conf.get('name') is None:
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: project.conf does not contain expected key '{}'".format(projectfile, 'name'))
# The project name, element path and option declarations
# are constant and cannot be overridden by option conditional statements
self.name = _yaml.node_get(pre_config_node, str, 'name')
self.name = _yaml.node_get(self._project_conf, str, 'name')
# Validate that project name is a valid symbol name
_yaml.assert_symbol_name(_yaml.node_get_provenance(pre_config_node, 'name'),
......@@ -772,8 +761,8 @@ class Project():
# assertion after.
output.element_overrides = _yaml.node_get(config, Mapping, 'elements', default_value={})
output.source_overrides = _yaml.node_get(config, Mapping, 'sources', default_value={})
config.pop('elements', None)
config.pop('sources', None)
_yaml.node_del(config, 'elements', safe=True)
_yaml.node_del(config, 'sources', safe=True)
_yaml.node_final_assertions(config)
self._load_plugin_factories(config, output)
......@@ -809,7 +798,7 @@ class Project():
output.base_variables = _yaml.node_get(config, Mapping, 'variables')
# Add the project name as a default variable
output.base_variables['project-name'] = self.name
_yaml.node_set(output.base_variables, 'project-name', self.name)
# Extend variables with automatic variables and option exports
# Initialize it as a string as all variables are processed as strings.
......@@ -817,7 +806,7 @@ class Project():
# max-jobs value seems to be around 8-10 if we have enough cores
# users should set values based on workload and build infrastructure
platform = Platform.get_platform()
output.base_variables['max-jobs'] = str(platform.get_cpu_count(8))
_yaml.node_set(output.base_variables, 'max-jobs', str(platform.get_cpu_count(8)))
# Export options into variables, if that was requested
output.options.export_variables(output.base_variables)
......@@ -834,7 +823,7 @@ class Project():
_yaml.node_validate(mirror, allowed_mirror_fields)
mirror_name = _yaml.node_get(mirror, str, 'name')
alias_mappings = {}
for alias_mapping, uris in _yaml.node_items(mirror['aliases']):
for alias_mapping, uris in _yaml.node_items(_yaml.node_get(mirror, Mapping, 'aliases')):
assert isinstance(uris, list)
alias_mappings[alias_mapping] = list(uris)
output.mirrors[mirror_name] = alias_mappings
......@@ -897,11 +886,12 @@ class Project():
allowed_origins = ['core', 'local', 'pip']
_yaml.node_validate(origin, allowed_origin_fields)
if origin['origin'] not in allowed_origins:
origin_value = _yaml.node_get(origin, str, 'origin')
if origin_value not in allowed_origins:
raise LoadError(
LoadErrorReason.INVALID_YAML,
"Origin '{}' is not one of the allowed types"
.format(origin['origin']))
.format(origin_value))
# Store source versions for checking later
source_versions = _yaml.node_get(origin, Mapping, 'sources', default_value={})
......@@ -940,11 +930,11 @@ class Project():
# Helper function to store plugin origins
#
# Args:
# origin (dict) - a dictionary indicating the origin of a group of
# origin (node) - a node indicating the origin of a group of
# plugins.
# plugin_group (str) - The name of the type of plugin that is being
# loaded
# destination (list) - A list of dicts to store the origins in
# destination (list) - A list of nodes to store the origins in
#
# Raises:
# LoadError if 'origin' is an unexpected value
......@@ -954,19 +944,21 @@ class Project():
raise LoadError(LoadErrorReason.INVALID_DATA,
"Unexpected plugin group: {}, expecting {}"
.format(plugin_group, expected_groups))
if plugin_group in origin:
origin_dict = _yaml.node_copy(origin)
node_keys = [key for key, _ in _yaml.node_items(origin)]
if plugin_group in node_keys:
origin_node = _yaml.node_copy(origin)
plugins = _yaml.node_get(origin, Mapping, plugin_group, default_value={})
origin_dict['plugins'] = [k for k, _ in _yaml.node_items(plugins)]
_yaml.node_set(origin_node, 'plugins', [k for k, _ in _yaml.node_items(plugins)])
for group in expected_groups:
if group in origin_dict:
del origin_dict[group]
if origin_dict['origin'] == 'local':
if _yaml.node_contains(origin_node, group):
_yaml.node_del(origin_node, group)
if _yaml.node_get(origin_node, str, 'origin') == 'local':
path = self.get_path_from_node(origin, 'path',
check_is_dir=True)
# paths are passed in relative to the project, but must be absolute
origin_dict['path'] = os.path.join(self.directory, path)
destination.append(origin_dict)
_yaml.node_set(origin_node, 'path', os.path.join(self.directory, path))
destination.append(origin_node)
# _warning_is_fatal():
#
......
......@@ -61,7 +61,6 @@ class ProjectRefs():
# options (OptionPool): To resolve conditional statements
#
def load(self, options):
try:
self._toplevel_node = _yaml.load(self._fullpath, shortname=self._base_name, copy_tree=True)
provenance = _yaml.node_get_provenance(self._toplevel_node)
......@@ -80,22 +79,15 @@ class ProjectRefs():
# Ignore failure if the file doesnt exist, it'll be created and
# for now just assumed to be empty
self._toplevel_node = {}
self._toplevel_node = _yaml.new_synthetic_file(self._fullpath)
self._toplevel_save = self._toplevel_node
_yaml.node_validate(self._toplevel_node, ['projects'])
# Ensure we create our toplevel entry point on the fly here
for node in [self._toplevel_node, self._toplevel_save]:
if 'projects' not in node:
node['projects'] = {}
# save()
#
# Save the project.refs file with any local changes
#
def save(self):
_yaml.dump(self._toplevel_save, self._fullpath)
if not _yaml.node_contains(node, 'projects'):
_yaml.node_set(node, 'projects', _yaml.new_empty_node(ref_node=node))
# lookup_ref()
#
......@@ -117,11 +109,6 @@ class ProjectRefs():
if write:
if node is not None:
provenance = _yaml.node_get_provenance(node)
if provenance:
node = provenance.node
# If we couldnt find the orignal, create a new one.
#
if node is None:
......@@ -134,22 +121,24 @@ class ProjectRefs():
# Looks up a ref node in the project.refs file, creates one if ensure is True.
#
def _lookup(self, toplevel, project, element, source_index, *, ensure=False):
# Fetch the project
try:
project_node = toplevel['projects'][project]
except KeyError:
projects = _yaml.node_get(toplevel, dict, 'projects')
project_node = _yaml.node_get(projects, dict, project)
except LoadError:
if not ensure:
return None
project_node = toplevel['projects'][project] = {}
project_node = _yaml.new_empty_node(ref_node=projects)
_yaml.node_set(projects, project, project_node)
# Fetch the element
try:
element_list = project_node[element]
except KeyError:
element_list = _yaml.node_get(project_node, list, element)
except LoadError:
if not ensure:
return None
element_list = project_node[element] = []
element_list = []
_yaml.node_set(project_node, element, element_list)
# Fetch the source index
try:
......@@ -159,8 +148,8 @@ class ProjectRefs():
return None
# Pad the list with empty newly created dictionaries
element_list.extend({} for _ in range(len(element_list), source_index + 1))
_yaml.node_extend_list(project_node, element, source_index + 1, {})
node = element_list[source_index]
node = _yaml.node_get(project_node, dict, element, indices=[source_index])
return node
......@@ -118,7 +118,7 @@ class Variables():
# Initialize it as a string as all variables are processed as strings.
#
if _yaml.node_get(node, bool, 'notparallel', default_value=False):
node['max-jobs'] = str(1)
_yaml.node_set(node, 'max-jobs', str(1))
ret = {}
for key, value in _yaml.node_items(node):
......
......@@ -114,7 +114,7 @@ class WorkspaceProject():
def load(cls, directory):
workspace_file = os.path.join(directory, WORKSPACE_PROJECT_FILE)
if os.path.exists(workspace_file):
data_dict = _yaml.load(workspace_file)
data_dict = _yaml.node_sanitize(_yaml.roundtrip_load(workspace_file), dict_type=dict)
return cls.from_dict(directory, data_dict)
else:
return None
......@@ -417,7 +417,7 @@ class Workspaces():
# A tuple in the following format: (str, Workspace), where the
# first element is the name of the workspaced element.
def list(self):
for element, _ in _yaml.node_items(self._workspaces):
for element in self._workspaces.keys():
yield (element, self._workspaces[element])
# create_workspace()
......@@ -526,12 +526,11 @@ class Workspaces():
'format-version': BST_WORKSPACE_FORMAT_VERSION,
'workspaces': {
element: workspace.to_dict()
for element, workspace in _yaml.node_items(self._workspaces)
for element, workspace in self._workspaces.items()
}
}
os.makedirs(self._bst_directory, exist_ok=True)
_yaml.dump(_yaml.node_sanitize(config),
self._get_filename())
_yaml.dump(config, self._get_filename())
# _load_config()
#
......@@ -570,16 +569,24 @@ class Workspaces():
# Raises: LoadError if there was a problem with the workspace config
#
def _parse_workspace_config(self, workspaces):
version = _yaml.node_get(workspaces, int, "format-version", default_value=0)
try:
version = _yaml.node_get(workspaces, int, 'format-version', default_value=0)
except ValueError:
raise LoadError(LoadErrorReason.INVALID_DATA,
"Format version is not an integer in workspace configuration")
if version == 0:
# Pre-versioning format can be of two forms
for element, config in _yaml.node_items(workspaces):
if _yaml.is_node(config):
# Get a dict
config = _yaml.node_sanitize(config, dict_type=dict)