Skip to content
Snippets Groups Projects
Commit 02f8d840 authored by Valentin David's avatar Valentin David Committed by Valentin David
Browse files

Invalidate reverse dependencies to scheduled element with workspace

Issues:

Keys would change from one build to another. So building a second time
an element depending a workspaced element, would fail to find the
artifact that was already built.

Artifact cache would be populated with artifacts with the wrong
key. This is a serious issue were an artifact that was built with a
workspaced dependency would not rebuild after closing the workspace.

Solution:

Elements depending on another element with workspace which has
been scheduled should not have any keys until we have managed to
built the workspaced element.

Fixes #461.
parent 01c4ac57
Branches
Tags
Loading
Pipeline #28036685 passed
......@@ -194,6 +194,7 @@ class Element(Plugin):
self.__runtime_dependencies = [] # Direct runtime dependency Elements
self.__build_dependencies = [] # Direct build dependency Elements
self.__reverse_dependencies = [] # Direct reverse dependency Elements
self.__sources = [] # List of Sources
self.__weak_cache_key = None # Our cached weak cache key
self.__strict_cache_key = None # Our cached cache key for strict builds
......@@ -892,9 +893,11 @@ class Element(Plugin):
for meta_dep in meta.dependencies:
dependency = Element._new_from_meta(meta_dep, artifacts)
element.__runtime_dependencies.append(dependency)
dependency.__reverse_dependencies.append(element)
for meta_dep in meta.build_dependencies:
dependency = Element._new_from_meta(meta_dep, artifacts)
element.__build_dependencies.append(dependency)
dependency.__reverse_dependencies.append(element)
return element
......@@ -1428,6 +1431,16 @@ class Element(Plugin):
self._update_state()
if workspace:
# We need to invalidate reverse dependencies
for reverse_dep in self.__get_reverse_dependencies():
reverse_dep.__cache_key_dict = None
reverse_dep.__cache_key = None
reverse_dep.__weak_cache_key = None
reverse_dep.__strict_cache_key = None
reverse_dep.__strong_cached = None
reverse_dep._update_state()
# _assemble_done():
#
# This is called in the main process after the element has been assembled
......@@ -1465,8 +1478,14 @@ class Element(Plugin):
# This does *not* cause a race condition, because
# _assemble_done is called before a cleanup job may be
# launched.
#
self.__artifacts.append_required_artifacts([self])
required_artifacts = [self]
# Reverse dependencies can now compute their keys
for reverse_dep in self.__get_reverse_dependencies():
reverse_dep._update_state()
required_artifacts.append(reverse_dep)
self.__artifacts.append_required_artifacts(required_artifacts)
# _assemble():
#
......@@ -2598,6 +2617,31 @@ class Element(Plugin):
return utils._deduplicate(keys)
# __get_reverse_dependencies():
#
# Iterates through the closure of reverse dependenices.
#
# Args:
# visited (set): The elements to skip (only for recursion)
# recursed (bool): Whether to emit the current element (only for recursion)
#
# Yields:
# (:class:`.Element`): The reverse dependent elements
def __get_reverse_dependencies(self, *, visited=None, recursed=False):
if visited is None:
visited = set()
full_name = self._get_full_name()
if full_name in visited:
return
if recursed:
yield self
for reverse_dep in self.__reverse_dependencies:
yield from reverse_dep.__get_reverse_dependencies(visited=visited, recursed=True)
def _overlap_error_detail(f, forbidden_overlap_elements, elements):
if forbidden_overlap_elements:
......
......@@ -767,3 +767,73 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
# Check that workspace config is converted correctly if necessary
loaded_config = _yaml.node_sanitize(_yaml.load(workspace_config_path))
assert loaded_config == parse_dict_as_yaml(expected)
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("kind", repo_kinds)
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, kind, strict):
checkout = os.path.join(str(tmpdir), 'checkout')
element_name, project, workspace = open_workspace(cli, os.path.join(str(tmpdir), 'repo-a'), datafiles, kind, False)
element_path = os.path.join(project, 'elements')
back_dep_element_name = 'workspace-test-{}-back-dep.bst'.format(kind)
# Write out our test target
element = {
'kind': 'compose',
'depends': [
{
'filename': element_name,
'type': 'build'
}
]
}
_yaml.dump(element,
os.path.join(element_path,
back_dep_element_name))
# Modify workspace
shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
os.makedirs(os.path.join(workspace, 'etc'))
with open(os.path.join(workspace, 'etc', 'pony.conf'), 'w') as f:
f.write("PONY='pink'")
# Configure strict mode
strict_mode = True
if strict != 'strict':
strict_mode = False
cli.configure({
'projects': {
'test': {
'strict': strict_mode
}
}
})
# Build artifact with dependency's modified workspace
assert cli.get_element_state(project, element_name) == 'buildable'
assert cli.get_element_key(project, element_name) == "{:?<64}".format('')
assert cli.get_element_state(project, back_dep_element_name) == 'waiting'
assert cli.get_element_key(project, back_dep_element_name) == "{:?<64}".format('')
result = cli.run(project=project, args=['build', back_dep_element_name])
result.assert_success()
assert cli.get_element_state(project, element_name) == 'cached'
assert cli.get_element_key(project, element_name) != "{:?<64}".format('')
assert cli.get_element_state(project, back_dep_element_name) == 'cached'
assert cli.get_element_key(project, back_dep_element_name) != "{:?<64}".format('')
result = cli.run(project=project, args=['build', back_dep_element_name])
result.assert_success()
# Checkout the result
result = cli.run(project=project, args=[
'checkout', back_dep_element_name, checkout
])
result.assert_success()
# Check that the pony.conf from the modified workspace exists
filename = os.path.join(checkout, 'etc', 'pony.conf')
assert os.path.exists(filename)
# Check that the original /usr/bin/hello is not in the checkout
assert not os.path.exists(os.path.join(checkout, 'usr', 'bin', 'hello'))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment