Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Select Git revision
Show changes
Commits on Source (6)
...@@ -51,7 +51,7 @@ class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert cl ...@@ -51,7 +51,7 @@ class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert cl
url = _yaml.node_get(spec_node, str, 'url') url = _yaml.node_get(spec_node, str, 'url')
push = _yaml.node_get(spec_node, bool, 'push', default_value=False) push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
if not url: if not url:
provenance = _yaml.node_get_provenance(spec_node) provenance = _yaml.node_get_provenance(spec_node, 'url')
raise LoadError(LoadErrorReason.INVALID_DATA, raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: empty artifact cache URL".format(provenance)) "{}: empty artifact cache URL".format(provenance))
...@@ -67,6 +67,16 @@ class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert cl ...@@ -67,6 +67,16 @@ class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert cl
if client_cert and basedir: if client_cert and basedir:
client_cert = os.path.join(basedir, client_cert) client_cert = os.path.join(basedir, client_cert)
if client_key and not client_cert:
provenance = _yaml.node_get_provenance(spec_node, 'client-key')
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: 'client-key' was specified without 'client-cert'".format(provenance))
if client_cert and not client_key:
provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: 'client-cert' was specified without 'client-key'".format(provenance))
return ArtifactCacheSpec(url, push, server_cert, client_key, client_cert) return ArtifactCacheSpec(url, push, server_cert, client_key, client_cert)
...@@ -91,6 +101,7 @@ class ArtifactCache(): ...@@ -91,6 +101,7 @@ class ArtifactCache():
self._cache_size = None # The current cache size, sometimes it's an estimate self._cache_size = None # The current cache size, sometimes it's an estimate
self._cache_quota = None # The cache quota self._cache_quota = None # The cache quota
self._cache_lower_threshold = None # The target cache size for a cleanup self._cache_lower_threshold = None # The target cache size for a cleanup
self._remotes_setup = False # Check to prevent double-setup of remotes
os.makedirs(self.extractdir, exist_ok=True) os.makedirs(self.extractdir, exist_ok=True)
os.makedirs(self.tmpdir, exist_ok=True) os.makedirs(self.tmpdir, exist_ok=True)
...@@ -143,6 +154,10 @@ class ArtifactCache(): ...@@ -143,6 +154,10 @@ class ArtifactCache():
# #
def setup_remotes(self, *, use_config=False, remote_url=None): def setup_remotes(self, *, use_config=False, remote_url=None):
# Ensure we do not double-initialise since this can be expensive
assert(not self._remotes_setup)
self._remotes_setup = True
# Initialize remote artifact caches. We allow the commandline to override # Initialize remote artifact caches. We allow the commandline to override
# the user config in some cases (for example `bst push --remote=...`). # the user config in some cases (for example `bst push --remote=...`).
has_remote_caches = False has_remote_caches = False
......
...@@ -27,7 +27,7 @@ from . import Sandbox ...@@ -27,7 +27,7 @@ from . import Sandbox
from ..storage._filebaseddirectory import FileBasedDirectory from ..storage._filebaseddirectory import FileBasedDirectory
from ..storage._casbaseddirectory import CasBasedDirectory from ..storage._casbaseddirectory import CasBasedDirectory
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
from .._artifactcache.cascache import CASCache from .._platform import Platform
class SandboxError(Exception): class SandboxError(Exception):
...@@ -43,7 +43,6 @@ class SandboxRemote(Sandbox): ...@@ -43,7 +43,6 @@ class SandboxRemote(Sandbox):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.cascache = None
url = urlparse(kwargs['server_url']) url = urlparse(kwargs['server_url'])
if not url.scheme or not url.hostname or not url.port: if not url.scheme or not url.hostname or not url.port:
...@@ -56,12 +55,6 @@ class SandboxRemote(Sandbox): ...@@ -56,12 +55,6 @@ class SandboxRemote(Sandbox):
self.server_url = '{}:{}'.format(url.hostname, url.port) self.server_url = '{}:{}'.format(url.hostname, url.port)
def _get_cascache(self):
if self.cascache is None:
self.cascache = CASCache(self._get_context())
self.cascache.setup_remotes(use_config=True)
return self.cascache
def run_remote_command(self, command, input_root_digest, working_directory, environment): def run_remote_command(self, command, input_root_digest, working_directory, environment):
# Sends an execution request to the remote execution server. # Sends an execution request to the remote execution server.
# #
...@@ -78,8 +71,8 @@ class SandboxRemote(Sandbox): ...@@ -78,8 +71,8 @@ class SandboxRemote(Sandbox):
output_files=[], output_files=[],
output_directories=[self._output_directory], output_directories=[self._output_directory],
platform=None) platform=None)
platform = Platform.get_platform()
cascache = self._get_cascache() cascache = platform.artifactcache
# Upload the Command message to the remote CAS server # Upload the Command message to the remote CAS server
command_digest = cascache.push_message(self._get_project(), remote_command) command_digest = cascache.push_message(self._get_project(), remote_command)
if not command_digest or not cascache.verify_digest_pushed(self._get_project(), command_digest): if not command_digest or not cascache.verify_digest_pushed(self._get_project(), command_digest):
...@@ -141,7 +134,8 @@ class SandboxRemote(Sandbox): ...@@ -141,7 +134,8 @@ class SandboxRemote(Sandbox):
if tree_digest is None or not tree_digest.hash: if tree_digest is None or not tree_digest.hash:
raise SandboxError("Output directory structure had no digest attached.") raise SandboxError("Output directory structure had no digest attached.")
cascache = self._get_cascache() platform = Platform.get_platform()
cascache = platform.artifactcache
# Now do a pull to ensure we have the necessary parts. # Now do a pull to ensure we have the necessary parts.
dir_digest = cascache.pull_tree(self._get_project(), tree_digest) dir_digest = cascache.pull_tree(self._get_project(), tree_digest)
if dir_digest is None or not dir_digest.hash or not dir_digest.size_bytes: if dir_digest is None or not dir_digest.hash or not dir_digest.size_bytes:
...@@ -176,7 +170,8 @@ class SandboxRemote(Sandbox): ...@@ -176,7 +170,8 @@ class SandboxRemote(Sandbox):
upload_vdir.recalculate_hash() upload_vdir.recalculate_hash()
cascache = self._get_cascache() platform = Platform.get_platform()
cascache = platform.artifactcache
# Now, push that key (without necessarily needing a ref) to the remote. # Now, push that key (without necessarily needing a ref) to the remote.
vdir_digest = cascache.push_directory(self._get_project(), upload_vdir) vdir_digest = cascache.push_directory(self._get_project(), upload_vdir)
if not vdir_digest or not cascache.verify_digest_pushed(self._get_project(), vdir_digest): if not vdir_digest or not cascache.verify_digest_pushed(self._get_project(), vdir_digest):
......
...@@ -9,8 +9,12 @@ from buildstream._context import Context ...@@ -9,8 +9,12 @@ from buildstream._context import Context
from buildstream._project import Project from buildstream._project import Project
from buildstream.utils import _deduplicate from buildstream.utils import _deduplicate
from buildstream import _yaml from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
from tests.testutils.runcli import cli
DATA_DIR = os.path.dirname(os.path.realpath(__file__))
cache1 = ArtifactCacheSpec(url='https://example.com/cache1', push=True) cache1 = ArtifactCacheSpec(url='https://example.com/cache1', push=True)
cache2 = ArtifactCacheSpec(url='https://example.com/cache2', push=False) cache2 = ArtifactCacheSpec(url='https://example.com/cache2', push=False)
cache3 = ArtifactCacheSpec(url='https://example.com/cache3', push=False) cache3 = ArtifactCacheSpec(url='https://example.com/cache3', push=False)
...@@ -106,3 +110,33 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user ...@@ -106,3 +110,33 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
# Verify that it was correctly read. # Verify that it was correctly read.
expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches))) expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches)))
assert parsed_cache_specs == expected_cache_specs assert parsed_cache_specs == expected_cache_specs
# Assert that if either the client key or client cert is specified
# without specifying it's counterpart, we get a comprehensive LoadError
# instead of an unhandled exception.
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize('config_key, config_value', [
('client-cert', 'client.crt'),
('client-key', 'client.key')
])
def test_missing_certs(cli, datafiles, config_key, config_value):
project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
project_conf = {
'name': 'test',
'artifacts': {
'url': 'https://cache.example.com:12345',
'push': 'true',
config_key: config_value
}
}
project_conf_file = os.path.join(project, 'project.conf')
_yaml.dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
# This does not happen for a simple `bst show`.
result = cli.run(project=project, args=['pull', 'element.bst'])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
kind: autotools
...@@ -137,7 +137,6 @@ def _test_pull(user_config_file, project_dir, artifact_dir, ...@@ -137,7 +137,6 @@ def _test_pull(user_config_file, project_dir, artifact_dir,
# Manually setup the CAS remote # Manually setup the CAS remote
cas.setup_remotes(use_config=True) cas.setup_remotes(use_config=True)
cas.initialize_remotes()
if cas.has_push_remotes(element=element): if cas.has_push_remotes(element=element):
# Push the element's artifact # Push the element's artifact
...@@ -274,7 +273,6 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest ...@@ -274,7 +273,6 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
# Manually setup the CAS remote # Manually setup the CAS remote
cas.setup_remotes(use_config=True) cas.setup_remotes(use_config=True)
cas.initialize_remotes()
if cas.has_push_remotes(): if cas.has_push_remotes():
directory = remote_execution_pb2.Directory() directory = remote_execution_pb2.Directory()
...@@ -310,7 +308,6 @@ def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest ...@@ -310,7 +308,6 @@ def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest
# Manually setup the CAS remote # Manually setup the CAS remote
cas.setup_remotes(use_config=True) cas.setup_remotes(use_config=True)
cas.initialize_remotes()
if cas.has_push_remotes(): if cas.has_push_remotes():
# Pull the artifact using the Tree object # Pull the artifact using the Tree object
......