Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (70)
Showing
with 209 additions and 85 deletions
...@@ -34,3 +34,4 @@ doc/source/modules.rst ...@@ -34,3 +34,4 @@ doc/source/modules.rst
doc/source/buildstream.rst doc/source/buildstream.rst
doc/source/buildstream.*.rst doc/source/buildstream.*.rst
doc/build/ doc/build/
versioneer.pyc
image: buildstream/testsuite-debian:9-master-114-4cab18e3 image: buildstream/testsuite-debian:9-master-119-552f5fc6
cache: cache:
key: "$CI_JOB_NAME-" key: "$CI_JOB_NAME-"
...@@ -78,7 +78,7 @@ source_dist: ...@@ -78,7 +78,7 @@ source_dist:
# Go back to the toplevel and collect our reports # Go back to the toplevel and collect our reports
- cd ../.. - cd ../..
- mkdir -p coverage-linux/ - mkdir -p coverage-linux/
- cp dist/buildstream/.coverage.* coverage-linux/coverage."${CI_JOB_NAME}" - cp dist/buildstream/.coverage coverage-linux/coverage."${CI_JOB_NAME}"
except: except:
- schedules - schedules
artifacts: artifacts:
...@@ -86,25 +86,25 @@ source_dist: ...@@ -86,25 +86,25 @@ source_dist:
- coverage-linux/ - coverage-linux/
tests-debian-9: tests-debian-9:
image: buildstream/testsuite-debian:9-master-117-aa3a33b3 image: buildstream/testsuite-debian:9-master-119-552f5fc6
<<: *linux-tests <<: *linux-tests
tests-fedora-27: tests-fedora-27:
image: buildstream/testsuite-fedora:27-master-117-aa3a33b3 image: buildstream/testsuite-fedora:27-master-119-552f5fc6
<<: *linux-tests <<: *linux-tests
tests-fedora-28: tests-fedora-28:
image: buildstream/testsuite-fedora:28-master-117-aa3a33b3 image: buildstream/testsuite-fedora:28-master-119-552f5fc6
<<: *linux-tests <<: *linux-tests
tests-ubuntu-18.04: tests-ubuntu-18.04:
image: buildstream/testsuite-ubuntu:18.04-master-117-aa3a33b3 image: buildstream/testsuite-ubuntu:18.04-master-119-552f5fc6
<<: *linux-tests <<: *linux-tests
tests-unix: tests-unix:
# Use fedora here, to a) run a test on fedora and b) ensure that we # Use fedora here, to a) run a test on fedora and b) ensure that we
# can get rid of ostree - this is not possible with debian-8 # can get rid of ostree - this is not possible with debian-8
image: buildstream/testsuite-fedora:27-master-117-aa3a33b3 image: buildstream/testsuite-fedora:27-master-119-552f5fc6
stage: test stage: test
variables: variables:
BST_FORCE_BACKEND: "unix" BST_FORCE_BACKEND: "unix"
...@@ -128,7 +128,7 @@ tests-unix: ...@@ -128,7 +128,7 @@ tests-unix:
# Go back to the toplevel and collect our reports # Go back to the toplevel and collect our reports
- cd ../.. - cd ../..
- mkdir -p coverage-unix/ - mkdir -p coverage-unix/
- cp dist/buildstream/.coverage.* coverage-unix/coverage.unix - cp dist/buildstream/.coverage coverage-unix/coverage.unix
except: except:
- schedules - schedules
artifacts: artifacts:
......
...@@ -8,19 +8,27 @@ include README.rst ...@@ -8,19 +8,27 @@ include README.rst
# Documentation package includes # Documentation package includes
include doc/Makefile include doc/Makefile
include doc/badges.py
include doc/bst2html.py
include doc/source/conf.py include doc/source/conf.py
include doc/source/index.rst include doc/source/plugin.rsttemplate
recursive-include doc/source *.rst
recursive-include doc/source *.py
recursive-include doc/source *.in
recursive-include doc/source *.html
recursive-include doc/examples *
# Tests # Tests
recursive-include tests *.py recursive-include tests *
recursive-include tests *.yaml include conftest.py
recursive-include tests *.bst include .coveragerc
recursive-include tests *.conf include .pylintrc
recursive-include tests *.sh
recursive-include tests *.expected
# Protocol Buffers # Protocol Buffers
recursive-include buildstream/_protos *.proto recursive-include buildstream/_protos *.proto
# Requirements files # Requirements files
include dev-requirements.txt include dev-requirements.txt
# Versioneer
include versioneer.py
...@@ -156,7 +156,7 @@ class ArtifactCache(): ...@@ -156,7 +156,7 @@ class ArtifactCache():
def setup_remotes(self, *, use_config=False, remote_url=None): def setup_remotes(self, *, use_config=False, remote_url=None):
# Ensure we do not double-initialise since this can be expensive # Ensure we do not double-initialise since this can be expensive
assert(not self._remotes_setup) assert not self._remotes_setup
self._remotes_setup = True self._remotes_setup = True
# Initialize remote artifact caches. We allow the commandline to override # Initialize remote artifact caches. We allow the commandline to override
...@@ -252,7 +252,7 @@ class ArtifactCache(): ...@@ -252,7 +252,7 @@ class ArtifactCache():
# (int): The size of the cache after having cleaned up # (int): The size of the cache after having cleaned up
# #
def clean(self): def clean(self):
artifacts = self.list_artifacts() artifacts = self.list_artifacts() # pylint: disable=assignment-from-no-return
# Build a set of the cache keys which are required # Build a set of the cache keys which are required
# based on the required elements at cleanup time # based on the required elements at cleanup time
...@@ -294,7 +294,7 @@ class ArtifactCache(): ...@@ -294,7 +294,7 @@ class ArtifactCache():
if key not in required_artifacts: if key not in required_artifacts:
# Remove the actual artifact, if it's not required. # Remove the actual artifact, if it's not required.
size = self.remove(to_remove) size = self.remove(to_remove) # pylint: disable=assignment-from-no-return
# Remove the size from the removed size # Remove the size from the removed size
self.set_cache_size(self._cache_size - size) self.set_cache_size(self._cache_size - size)
...@@ -311,7 +311,7 @@ class ArtifactCache(): ...@@ -311,7 +311,7 @@ class ArtifactCache():
# (int): The size of the artifact cache. # (int): The size of the artifact cache.
# #
def compute_cache_size(self): def compute_cache_size(self):
self._cache_size = self.calculate_cache_size() self._cache_size = self.calculate_cache_size() # pylint: disable=assignment-from-no-return
return self._cache_size return self._cache_size
......
...@@ -33,11 +33,11 @@ import grpc ...@@ -33,11 +33,11 @@ import grpc
from .. import _yaml from .. import _yaml
from .._protos.google.rpc import code_pb2
from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
from .._message import MessageType, Message
from .. import _signals, utils from .. import _signals, utils
from .._exceptions import ArtifactError from .._exceptions import ArtifactError
...@@ -81,8 +81,9 @@ class CASCache(ArtifactCache): ...@@ -81,8 +81,9 @@ class CASCache(ArtifactCache):
################################################ ################################################
def preflight(self): def preflight(self):
if (not os.path.isdir(os.path.join(self.casdir, 'refs', 'heads')) or headdir = os.path.join(self.casdir, 'refs', 'heads')
not os.path.isdir(os.path.join(self.casdir, 'objects'))): objdir = os.path.join(self.casdir, 'objects')
if not (os.path.isdir(headdir) and os.path.isdir(objdir)):
raise ArtifactError("CAS repository check failed for '{}'" raise ArtifactError("CAS repository check failed for '{}'"
.format(self.casdir)) .format(self.casdir))
...@@ -918,7 +919,7 @@ class CASCache(ArtifactCache): ...@@ -918,7 +919,7 @@ class CASCache(ArtifactCache):
# Skip download, already in local cache. # Skip download, already in local cache.
pass pass
elif (digest.size_bytes >= remote.max_batch_total_size_bytes or elif (digest.size_bytes >= remote.max_batch_total_size_bytes or
not remote.batch_read_supported): not remote.batch_read_supported):
# Too large for batch request, download in independent request. # Too large for batch request, download in independent request.
self._ensure_blob(remote, digest) self._ensure_blob(remote, digest)
in_local_cache = True in_local_cache = True
...@@ -958,7 +959,7 @@ class CASCache(ArtifactCache): ...@@ -958,7 +959,7 @@ class CASCache(ArtifactCache):
batch = _CASBatchRead(remote) batch = _CASBatchRead(remote)
while len(fetch_queue) + len(fetch_next_queue) > 0: while len(fetch_queue) + len(fetch_next_queue) > 0:
if len(fetch_queue) == 0: if not fetch_queue:
batch = self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue) batch = self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
dir_digest = fetch_queue.pop(0) dir_digest = fetch_queue.pop(0)
...@@ -1087,6 +1088,10 @@ class _CASRemote(): ...@@ -1087,6 +1088,10 @@ class _CASRemote():
self.bytestream = None self.bytestream = None
self.cas = None self.cas = None
self.ref_storage = None self.ref_storage = None
self.batch_update_supported = None
self.batch_read_supported = None
self.capabilities = None
self.max_batch_total_size_bytes = None
def init(self): def init(self):
if not self._initialized: if not self._initialized:
...@@ -1191,13 +1196,13 @@ class _CASBatchRead(): ...@@ -1191,13 +1196,13 @@ class _CASBatchRead():
assert not self._sent assert not self._sent
self._sent = True self._sent = True
if len(self._request.digests) == 0: if not self._request.digests:
return return
batch_response = self._remote.cas.BatchReadBlobs(self._request) batch_response = self._remote.cas.BatchReadBlobs(self._request)
for response in batch_response.responses: for response in batch_response.responses:
if response.status.code != grpc.StatusCode.OK.value[0]: if response.status.code != code_pb2.OK:
raise ArtifactError("Failed to download blob {}: {}".format( raise ArtifactError("Failed to download blob {}: {}".format(
response.digest.hash, response.status.code)) response.digest.hash, response.status.code))
if response.digest.size_bytes != len(response.data): if response.digest.size_bytes != len(response.data):
...@@ -1236,13 +1241,13 @@ class _CASBatchUpdate(): ...@@ -1236,13 +1241,13 @@ class _CASBatchUpdate():
assert not self._sent assert not self._sent
self._sent = True self._sent = True
if len(self._request.requests) == 0: if not self._request.requests:
return return
batch_response = self._remote.cas.BatchUpdateBlobs(self._request) batch_response = self._remote.cas.BatchUpdateBlobs(self._request)
for response in batch_response.responses: for response in batch_response.responses:
if response.status.code != grpc.StatusCode.OK.value[0]: if response.status.code != code_pb2.OK:
raise ArtifactError("Failed to upload blob {}: {}".format( raise ArtifactError("Failed to upload blob {}: {}".format(
response.digest.hash, response.status.code)) response.digest.hash, response.status.code))
......
...@@ -364,7 +364,6 @@ class Context(): ...@@ -364,7 +364,6 @@ class Context():
assert self._message_handler assert self._message_handler
self._message_handler(message, context=self) self._message_handler(message, context=self)
return
# silence() # silence()
# #
......
...@@ -20,7 +20,6 @@ ...@@ -20,7 +20,6 @@
from contextlib import contextmanager from contextlib import contextmanager
import os import os
import sys import sys
import resource
import traceback import traceback
import datetime import datetime
from textwrap import TextWrapper from textwrap import TextWrapper
......
...@@ -662,6 +662,33 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): ...@@ -662,6 +662,33 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
tar=tar) tar=tar)
##################################################################
# Source Checkout Command #
##################################################################
@cli.command(name='source-checkout', short_help='Checkout sources for an element')
@click.option('--except', 'except_', multiple=True,
type=click.Path(readable=False),
help="Except certain dependencies")
@click.option('--deps', '-d', default='none',
type=click.Choice(['build', 'none', 'run', 'all']),
help='The dependencies whose sources to checkout (default: none)')
@click.option('--fetch', default=False, is_flag=True,
help='Fetch elements if they are not fetched')
@click.argument('element',
type=click.Path(readable=False))
@click.argument('location', type=click.Path())
@click.pass_obj
def source_checkout(app, element, location, deps, fetch, except_):
"""Checkout sources of an element to the specified location
"""
with app.initialized():
app.stream.source_checkout(element,
location=location,
deps=deps,
fetch=fetch,
except_targets=except_)
################################################################## ##################################################################
# Workspace Command # # Workspace Command #
################################################################## ##################################################################
......
...@@ -18,8 +18,8 @@ ...@@ -18,8 +18,8 @@
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk> # Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
import os import os
import sys import sys
import click
import curses import curses
import click
# Import a widget internal for formatting time codes # Import a widget internal for formatting time codes
from .widget import TimeCode from .widget import TimeCode
......
...@@ -42,9 +42,11 @@ from .mount import Mount ...@@ -42,9 +42,11 @@ from .mount import Mount
# #
class SafeHardlinks(Mount): class SafeHardlinks(Mount):
def __init__(self, directory, tempdir, fuse_mount_options={}): def __init__(self, directory, tempdir, fuse_mount_options=None):
self.directory = directory self.directory = directory
self.tempdir = tempdir self.tempdir = tempdir
if fuse_mount_options is None:
fuse_mount_options = {}
super().__init__(fuse_mount_options=fuse_mount_options) super().__init__(fuse_mount_options=fuse_mount_options)
def create_operations(self): def create_operations(self):
......
...@@ -87,8 +87,8 @@ class Mount(): ...@@ -87,8 +87,8 @@ class Mount():
# User Facing API # # User Facing API #
################################################ ################################################
def __init__(self, fuse_mount_options={}): def __init__(self, fuse_mount_options=None):
self._fuse_mount_options = fuse_mount_options self._fuse_mount_options = {} if fuse_mount_options is None else fuse_mount_options
# mount(): # mount():
# #
...@@ -182,7 +182,7 @@ class Mount(): ...@@ -182,7 +182,7 @@ class Mount():
# Ask the subclass to give us an Operations object # Ask the subclass to give us an Operations object
# #
self.__operations = self.create_operations() self.__operations = self.create_operations() # pylint: disable=assignment-from-no-return
# Run fuse in foreground in this child process, internally libfuse # Run fuse in foreground in this child process, internally libfuse
# will handle SIGTERM and gracefully exit its own little main loop. # will handle SIGTERM and gracefully exit its own little main loop.
......
...@@ -146,8 +146,8 @@ def _extract_depends_from_node(node, *, key=None): ...@@ -146,8 +146,8 @@ def _extract_depends_from_node(node, *, key=None):
depends = _yaml.node_get(node, list, key, default_value=[]) depends = _yaml.node_get(node, list, key, default_value=[])
output_deps = [] output_deps = []
for dep in depends: for index, dep in enumerate(depends):
dep_provenance = _yaml.node_get_provenance(node, key=key, indices=[depends.index(dep)]) dep_provenance = _yaml.node_get_provenance(node, key=key, indices=[index])
if isinstance(dep, str): if isinstance(dep, str):
dependency = Dependency(dep, provenance=dep_provenance, dep_type=default_dep_type) dependency = Dependency(dep, provenance=dep_provenance, dep_type=default_dep_type)
...@@ -177,10 +177,8 @@ def _extract_depends_from_node(node, *, key=None): ...@@ -177,10 +177,8 @@ def _extract_depends_from_node(node, *, key=None):
provenance=dep_provenance) provenance=dep_provenance)
else: else:
index = depends.index(dep)
p = _yaml.node_get_provenance(node, key=key, indices=[index])
raise LoadError(LoadErrorReason.INVALID_DATA, raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Dependency is not specified as a string or a dictionary".format(p)) "{}: Dependency is not specified as a string or a dictionary".format(dep_provenance))
output_deps.append(dependency) output_deps.append(dependency)
......
...@@ -43,9 +43,9 @@ class OptionBool(Option): ...@@ -43,9 +43,9 @@ class OptionBool(Option):
self.value = _yaml.node_get(node, bool, self.name) self.value = _yaml.node_get(node, bool, self.name)
def set_value(self, value): def set_value(self, value):
if value == 'True' or value == 'true': if value in ('True', 'true'):
self.value = True self.value = True
elif value == 'False' or value == 'false': elif value in ('False', 'false'):
self.value = False self.value = False
else: else:
raise LoadError(LoadErrorReason.INVALID_DATA, raise LoadError(LoadErrorReason.INVALID_DATA,
......
...@@ -370,7 +370,7 @@ class Pipeline(): ...@@ -370,7 +370,7 @@ class Pipeline():
detail += " Element: {} is inconsistent\n".format(element._get_full_name()) detail += " Element: {} is inconsistent\n".format(element._get_full_name())
for source in element.sources(): for source in element.sources():
if source._get_consistency() == Consistency.INCONSISTENT: if source._get_consistency() == Consistency.INCONSISTENT:
detail += " Source {} is missing ref\n".format(source) detail += " {} is missing ref\n".format(source)
detail += '\n' detail += '\n'
detail += "Try tracking these elements first with `bst track`\n" detail += "Try tracking these elements first with `bst track`\n"
...@@ -383,6 +383,33 @@ class Pipeline(): ...@@ -383,6 +383,33 @@ class Pipeline():
detail += " " + element._get_full_name() + "\n" detail += " " + element._get_full_name() + "\n"
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced") raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
# assert_sources_cached()
#
# Asserts that sources for the given list of elements are cached.
#
# Args:
# elements (list): The list of elements
#
def assert_sources_cached(self, elements):
uncached = []
with self._context.timed_activity("Checking sources"):
for element in elements:
if element._get_consistency() != Consistency.CACHED:
uncached.append(element)
if uncached:
detail = "Sources are not cached for the following elements:\n\n"
for element in uncached:
detail += " Following sources for element: {} are not cached:\n".format(element._get_full_name())
for source in element.sources():
if source._get_consistency() != Consistency.CACHED:
detail += " {}\n".format(source)
detail += '\n'
detail += "Try fetching these elements first with `bst fetch`,\n" + \
"or run this command with `--fetch` option\n"
raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
############################################################# #############################################################
# Private Methods # # Private Methods #
############################################################# #############################################################
......
...@@ -16,9 +16,7 @@ ...@@ -16,9 +16,7 @@
# License along with this library. If not, see <http://www.gnu.org/licenses/>. # License along with this library. If not, see <http://www.gnu.org/licenses/>.
import os import os
import resource
from .._exceptions import PlatformError
from ..sandbox import SandboxDummy from ..sandbox import SandboxDummy
from . import Platform from . import Platform
...@@ -29,10 +27,6 @@ class Darwin(Platform): ...@@ -29,10 +27,6 @@ class Darwin(Platform):
# This value comes from OPEN_MAX in syslimits.h # This value comes from OPEN_MAX in syslimits.h
OPEN_MAX = 10240 OPEN_MAX = 10240
def __init__(self):
super().__init__()
def create_sandbox(self, *args, **kwargs): def create_sandbox(self, *args, **kwargs):
kwargs['dummy_reason'] = \ kwargs['dummy_reason'] = \
"OSXFUSE is not supported and there are no supported sandbox" + \ "OSXFUSE is not supported and there are no supported sandbox" + \
......
...@@ -22,7 +22,6 @@ import subprocess ...@@ -22,7 +22,6 @@ import subprocess
from .. import _site from .. import _site
from .. import utils from .. import utils
from .._message import Message, MessageType
from ..sandbox import SandboxDummy from ..sandbox import SandboxDummy
from . import Platform from . import Platform
...@@ -112,8 +111,4 @@ class Linux(Platform): ...@@ -112,8 +111,4 @@ class Linux(Platform):
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
output = '' output = ''
if output == 'root': return output == 'root'
return True
else:
return False
...@@ -414,7 +414,7 @@ class Job(): ...@@ -414,7 +414,7 @@ class Job():
try: try:
# Try the task action # Try the task action
result = self.child_process() result = self.child_process() # pylint: disable=assignment-from-no-return
except SkipJob as e: except SkipJob as e:
elapsed = datetime.datetime.now() - starttime elapsed = datetime.datetime.now() - starttime
self.message(MessageType.SKIPPED, str(e), self.message(MessageType.SKIPPED, str(e),
......
...@@ -57,7 +57,7 @@ class PullQueue(Queue): ...@@ -57,7 +57,7 @@ class PullQueue(Queue):
def done(self, _, element, result, success): def done(self, _, element, result, success):
if not success: if not success:
return False return
element._pull_done() element._pull_done()
......
...@@ -20,7 +20,6 @@ ...@@ -20,7 +20,6 @@
# BuildStream toplevel imports # BuildStream toplevel imports
from ...plugin import _plugin_lookup from ...plugin import _plugin_lookup
from ... import SourceError
# Local imports # Local imports
from . import Queue, QueueStatus from . import Queue, QueueStatus
......
...@@ -379,27 +379,7 @@ class Stream(): ...@@ -379,27 +379,7 @@ class Stream():
elements, _ = self._load((target,), (), fetch_subprojects=True) elements, _ = self._load((target,), (), fetch_subprojects=True)
target = elements[0] target = elements[0]
if not tar: self._check_location_writable(location, force=force, tar=tar)
try:
os.makedirs(location, exist_ok=True)
except OSError as e:
raise StreamError("Failed to create checkout directory: '{}'"
.format(e)) from e
if not tar:
if not os.access(location, os.W_OK):
raise StreamError("Checkout directory '{}' not writable"
.format(location))
if not force and os.listdir(location):
raise StreamError("Checkout directory '{}' not empty"
.format(location))
elif os.path.exists(location) and location != '-':
if not os.access(location, os.W_OK):
raise StreamError("Output file '{}' not writable"
.format(location))
if not force and os.path.exists(location):
raise StreamError("Output file '{}' already exists"
.format(location))
# Stage deps into a temporary sandbox first # Stage deps into a temporary sandbox first
try: try:
...@@ -436,6 +416,42 @@ class Stream(): ...@@ -436,6 +416,42 @@ class Stream():
raise StreamError("Error while staging dependencies into a sandbox" raise StreamError("Error while staging dependencies into a sandbox"
": '{}'".format(e), detail=e.detail, reason=e.reason) from e ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
# source_checkout()
#
# Checkout sources of the target element to the specified location
#
# Args:
# target (str): The target element whose sources to checkout
# location (str): Location to checkout the sources to
# deps (str): The dependencies to checkout
# fetch (bool): Whether to fetch missing sources
# except_targets (list): List of targets to except from staging
#
def source_checkout(self, target, *,
location=None,
deps='none',
fetch=False,
except_targets=()):
self._check_location_writable(location)
elements, _ = self._load((target,), (),
selection=deps,
except_targets=except_targets,
fetch_subprojects=True)
# Assert all sources are cached
if fetch:
self._fetch(elements)
self._pipeline.assert_sources_cached(elements)
# Stage all sources determined by scope
try:
self._write_element_sources(location, elements)
except BstError as e:
raise StreamError("Error while writing sources"
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
# workspace_open # workspace_open
# #
# Open a project workspace # Open a project workspace
...@@ -719,7 +735,7 @@ class Stream(): ...@@ -719,7 +735,7 @@ class Stream():
if self._write_element_script(source_directory, element) if self._write_element_script(source_directory, element)
] ]
self._write_element_sources(tempdir, elements) self._write_element_sources(os.path.join(tempdir, "source"), elements)
self._write_build_script(tempdir, elements) self._write_build_script(tempdir, elements)
self._collect_sources(tempdir, tar_location, self._collect_sources(tempdir, tar_location,
target.normal_name, compression) target.normal_name, compression)
...@@ -1061,6 +1077,39 @@ class Stream(): ...@@ -1061,6 +1077,39 @@ class Stream():
self._enqueue_plan(fetch_plan) self._enqueue_plan(fetch_plan)
self._run() self._run()
# _check_location_writable()
#
# Check if given location is writable.
#
# Args:
# location (str): Destination path
# force (bool): Allow files to be overwritten
# tar (bool): Whether destination is a tarball
#
# Raises:
# (StreamError): If the destination is not writable
#
def _check_location_writable(self, location, force=False, tar=False):
if not tar:
try:
os.makedirs(location, exist_ok=True)
except OSError as e:
raise StreamError("Failed to create destination directory: '{}'"
.format(e)) from e
if not os.access(location, os.W_OK):
raise StreamError("Destination directory '{}' not writable"
.format(location))
if not force and os.listdir(location):
raise StreamError("Destination directory '{}' not empty"
.format(location))
elif os.path.exists(location) and location != '-':
if not os.access(location, os.W_OK):
raise StreamError("Output file '{}' not writable"
.format(location))
if not force and os.path.exists(location):
raise StreamError("Output file '{}' already exists"
.format(location))
# Helper function for checkout() # Helper function for checkout()
# #
def _checkout_hardlinks(self, sandbox_vroot, directory): def _checkout_hardlinks(self, sandbox_vroot, directory):
...@@ -1082,11 +1131,10 @@ class Stream(): ...@@ -1082,11 +1131,10 @@ class Stream():
# Write all source elements to the given directory # Write all source elements to the given directory
def _write_element_sources(self, directory, elements): def _write_element_sources(self, directory, elements):
for element in elements: for element in elements:
source_dir = os.path.join(directory, "source") element_source_dir = self._get_element_dirname(directory, element)
element_source_dir = os.path.join(source_dir, element.normal_name) if list(element.sources()):
os.makedirs(element_source_dir) os.makedirs(element_source_dir)
element._stage_sources_at(element_source_dir)
element._stage_sources_at(element_source_dir)
# Write a master build script to the sandbox # Write a master build script to the sandbox
def _write_build_script(self, directory, elements): def _write_build_script(self, directory, elements):
...@@ -1115,3 +1163,25 @@ class Stream(): ...@@ -1115,3 +1163,25 @@ class Stream():
with tarfile.open(tar_name, permissions) as tar: with tarfile.open(tar_name, permissions) as tar:
tar.add(directory, arcname=element_name) tar.add(directory, arcname=element_name)
# _get_element_dirname()
#
# Get path to directory for an element based on its normal name.
#
# For cross-junction elements, the path will be prefixed with the name
# of the junction element.
#
# Args:
# directory (str): path to base directory
# element (Element): the element
#
# Returns:
# (str): Path to directory for this element
#
def _get_element_dirname(self, directory, element):
parts = [element.normal_name]
while element._get_project() != self._project:
element = element._get_project().junction
parts.append(element.normal_name)
return os.path.join(directory, *reversed(parts))