Commit af10c1ba authored by Valentin David's avatar Valentin David Committed by Tristan Van Berkom

Interpret names as colon separated junction path in loader.

'a.bst:b.bst' gets interpreted as 'b.bst' from junction 'a.bst'.

Part of #359.
parent 8f2bf4e6
......@@ -107,9 +107,13 @@ class Loader():
# First pass, recursively load files and populate our table of LoadElements
#
deps = []
for target in self._targets:
profile_start(Topics.LOAD_PROJECT, target)
self._load_file(target, rewritable, ticker)
junction, name, loader = self._parse_name(target, rewritable, ticker)
loader._load_file(name, rewritable, ticker)
deps.append(Dependency(name, junction=junction))
profile_end(Topics.LOAD_PROJECT, target)
#
......@@ -119,7 +123,8 @@ class Loader():
# Set up a dummy element that depends on all top-level targets
# to resolve potential circular dependencies between them
DummyTarget = namedtuple('DummyTarget', ['name', 'full_name', 'deps'])
dummy = DummyTarget(name='', full_name='', deps=[Dependency(e) for e in self._targets])
dummy = DummyTarget(name='', full_name='', deps=deps)
self._elements[''] = dummy
profile_key = "_".join(t for t in self._targets)
......@@ -127,17 +132,20 @@ class Loader():
self._check_circular_deps('')
profile_end(Topics.CIRCULAR_CHECK, profile_key)
ret = []
#
# Sort direct dependencies of elements by their dependency ordering
#
for target in self._targets:
profile_start(Topics.SORT_DEPENDENCIES, target)
self._sort_dependencies(target)
junction, name, loader = self._parse_name(target, rewritable, ticker)
loader._sort_dependencies(name)
profile_end(Topics.SORT_DEPENDENCIES, target)
# Finally, wrap what we have into LoadElements and return the target
#
ret.append(loader._collect_element(name))
# Finally, wrap what we have into LoadElements and return the target
#
return [self._collect_element(target) for target in self._targets]
return ret
# cleanup():
#
......@@ -554,3 +562,30 @@ class Loader():
return self._loaders[dep.junction]
else:
return self
# _parse_name():
#
# Get junction and base name of element along with loader for the sub-project
#
# Args:
# name (str): Name of target
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
# ticker (callable): An optional function for tracking load progress
#
# Returns:
# (tuple): - (str): name of the junction element
# - (str): name of the element
# - (Loader): loader for sub-project
#
def _parse_name(self, name, rewritable, ticker):
# We allow to split only once since deep junctions names are forbidden.
# Users who want to refer to elements in sub-sub-projects are required
# to create junctions on the top level project.
junction_path = name.rsplit(':', 1)
if len(junction_path) == 1:
return None, junction_path[-1], self
else:
self._load_file(junction_path[-2], rewritable, ticker)
loader = self._get_loader(junction_path[-2], rewritable=rewritable, ticker=ticker)
return junction_path[-2], junction_path[-1], loader
......@@ -390,3 +390,22 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
with open(filename, 'r') as f:
contents = f.read()
assert contents == 'animal=Horsy\n'
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_cross_junction(datafiles, cli, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
subproject_path = os.path.join(project, 'files', 'sub-project')
junction_path = os.path.join(project, 'elements', 'junction.bst')
checkout = os.path.join(cli.directory, 'checkout')
generate_junction(tmpdir, subproject_path, junction_path)
result = cli.run(project=project, args=['build', 'junction.bst:import-etc.bst'])
result.assert_success()
result = cli.run(project=project, args=['checkout', 'junction.bst:import-etc.bst', checkout])
result.assert_success()
filename = os.path.join(checkout, 'etc', 'animal.conf')
assert os.path.exists(filename)
......@@ -157,3 +157,41 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
# informing the user to track the junction first
result = cli.run(project=project, args=['fetch', 'junction-dep.bst'])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
subproject_path = os.path.join(project, 'files', 'sub-project')
junction_path = os.path.join(project, 'elements', 'junction.bst')
import_etc_path = os.path.join(subproject_path, 'elements', 'import-etc-repo.bst')
etc_files_path = os.path.join(subproject_path, 'files', 'etc-files')
repo = create_repo(kind, str(tmpdir.join('import-etc')))
ref = repo.create(etc_files_path)
element = {
'kind': 'import',
'sources': [
repo.source_config(ref=(ref if ref_storage == 'inline' else None))
]
}
_yaml.dump(element, import_etc_path)
configure_project(project, {
'ref-storage': ref_storage
})
generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline'))
if ref_storage == 'project.refs':
result = cli.run(project=project, args=['track', 'junction.bst'])
result.assert_success()
result = cli.run(project=project, args=['track', 'junction.bst:import-etc.bst'])
result.assert_success()
result = cli.run(project=project, args=['fetch', 'junction.bst:import-etc.bst'])
result.assert_success()
......@@ -4,6 +4,8 @@ import pytest
from tests.testutils import cli, create_artifact_share
from tests.testutils.site import IS_LINUX
from . import generate_junction
# Project directory
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
......@@ -276,3 +278,35 @@ def test_push_pull_track_non_strict(cli, tmpdir, datafiles):
result = cli.run(project=project, args=['build', '--track-all', '--all', 'target.bst'])
result.assert_success()
assert set(result.get_pulled_elements()) == all_elements
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_cross_junction(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
subproject_path = os.path.join(project, 'files', 'sub-project')
junction_path = os.path.join(project, 'elements', 'junction.bst')
generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
# First build the target element and push to the remote.
cli.configure({
'artifacts': {'url': share.repo, 'push': True}
})
result = cli.run(project=project, args=['build', 'junction.bst:import-etc.bst'])
result.assert_success()
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
cache_dir = os.path.join(project, 'cache', 'artifacts')
shutil.rmtree(cache_dir)
share.update_summary()
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
# Now try bst pull
result = cli.run(project=project, args=['pull', 'junction.bst:import-etc.bst'])
result.assert_success()
# And assert that it's again in the local cache, without having built
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
......@@ -7,6 +7,8 @@ from unittest.mock import MagicMock
from buildstream._exceptions import ErrorDomain
from tests.testutils import cli, create_artifact_share, create_element_size
from tests.testutils.site import IS_LINUX
from . import configure_project, generate_junction
# Project directory
DATA_DIR = os.path.join(
......@@ -377,3 +379,26 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
# Ensure that element2 was deleted from the share and element1 remains
assert_not_shared(cli, share, project, 'element2.bst')
assert_shared(cli, share, project, 'element1.bst')
@pytest.mark.datafiles(DATA_DIR)
def test_push_cross_junction(cli, tmpdir, datafiles):
project = str(datafiles)
subproject_path = os.path.join(project, 'files', 'sub-project')
junction_path = os.path.join(project, 'elements', 'junction.bst')
generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
result = cli.run(project=project, args=['build', 'junction.bst:import-etc.bst'])
result.assert_success()
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
cli.configure({
'artifacts': {'url': share.repo, 'push': True},
})
result = cli.run(project=project, args=['push', 'junction.bst:import-etc.bst'])
cache_key = cli.get_element_key(project, 'junction.bst:import-etc.bst')
assert share.has_artifact('subtest', 'import-etc.bst', cache_key)
......@@ -111,7 +111,8 @@ def test_target_is_dependency(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage):
@pytest.mark.parametrize("element_name", ['junction-dep.bst', 'junction.bst:import-etc.bst'])
def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage, element_name):
project = os.path.join(datafiles.dirname, datafiles.basename)
subproject_path = os.path.join(project, 'files', 'sub-project')
junction_path = os.path.join(project, 'elements', 'junction.bst')
......@@ -155,14 +156,15 @@ def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage):
# Assert the correct error when trying to show the pipeline
result = cli.run(project=project, silent=True, args=[
'show', 'junction-dep.bst'])
'show', element_name])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_FETCH_NEEDED)
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
@pytest.mark.parametrize("element_name", ['junction-dep.bst', 'junction.bst:import-etc.bst'])
def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, element_name):
project = os.path.join(datafiles.dirname, datafiles.basename)
subproject_path = os.path.join(project, 'files', 'sub-project')
junction_path = os.path.join(project, 'elements', 'junction.bst')
......@@ -190,6 +192,43 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
# Assert the correct error when trying to show the pipeline
result = cli.run(project=project, silent=True, args=[
'show', 'junction-dep.bst'])
'show', element_name])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("element_name", ['junction-dep.bst', 'junction.bst:import-etc.bst'])
def test_fetched_junction(cli, tmpdir, datafiles, element_name):
project = os.path.join(datafiles.dirname, datafiles.basename)
subproject_path = os.path.join(project, 'files', 'sub-project')
junction_path = os.path.join(project, 'elements', 'junction.bst')
element_path = os.path.join(project, 'elements', 'junction-dep.bst')
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
# Create a stack element to depend on a cross junction element
#
element = {
'kind': 'stack',
'depends': [
{
'junction': 'junction.bst',
'filename': 'import-etc.bst'
}
]
}
_yaml.dump(element, element_path)
result = cli.run(project=project, silent=True, args=[
'fetch', 'junction.bst'])
result.assert_success()
# Assert the correct error when trying to show the pipeline
result = cli.run(project=project, silent=True, args=[
'show', '--format', '%{name}-%{state}', element_name])
results = result.output.strip().splitlines()
assert 'junction.bst:import-etc.bst-buildable' in results
......@@ -260,3 +260,43 @@ def test_git_build(cli, tmpdir, datafiles):
# Check that the checkout contains the expected files from both projects
assert(os.path.exists(os.path.join(checkoutdir, 'base.txt')))
assert(os.path.exists(os.path.join(checkoutdir, 'foo.txt')))
@pytest.mark.datafiles(DATA_DIR)
def test_cross_junction_names(cli, tmpdir, datafiles):
project = os.path.join(str(datafiles), 'foo')
copy_subprojects(project, datafiles, ['base'])
element_list = cli.get_pipeline(project, ['base.bst:target.bst'])
assert 'base.bst:target.bst' in element_list
@pytest.mark.datafiles(DATA_DIR)
def test_build_git_cross_junction_names(cli, tmpdir, datafiles):
project = os.path.join(str(datafiles), 'foo')
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Create the repo from 'base' subdir
repo = create_repo('git', str(tmpdir))
ref = repo.create(os.path.join(str(datafiles), 'base'))
# Write out junction element with git source
element = {
'kind': 'junction',
'sources': [
repo.source_config(ref=ref)
]
}
_yaml.dump(element, os.path.join(project, 'base.bst'))
print(element)
print(cli.get_pipeline(project, ['base.bst']))
# Build (with implicit fetch of subproject), checkout
result = cli.run(project=project, args=['build', 'base.bst:target.bst'])
assert result.exit_code == 0
result = cli.run(project=project, args=['checkout', 'base.bst:target.bst', checkoutdir])
assert result.exit_code == 0
# Check that the checkout contains the expected files from both projects
assert(os.path.exists(os.path.join(checkoutdir, 'base.txt')))
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment