Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (13)
Showing
with 574 additions and 156 deletions
...@@ -24,7 +24,6 @@ import os ...@@ -24,7 +24,6 @@ import os
import stat import stat
import tempfile import tempfile
import uuid import uuid
import errno
from urllib.parse import urlparse from urllib.parse import urlparse
import grpc import grpc
...@@ -140,17 +139,13 @@ class CASCache(): ...@@ -140,17 +139,13 @@ class CASCache():
checkoutdir = os.path.join(tmpdir, ref) checkoutdir = os.path.join(tmpdir, ref)
self._checkout(checkoutdir, tree) self._checkout(checkoutdir, tree)
os.makedirs(os.path.dirname(dest), exist_ok=True)
try: try:
os.rename(checkoutdir, dest) utils.move_atomic(checkoutdir, dest)
except utils.DirectoryExistsError:
# Another process beat us to rename
pass
except OSError as e: except OSError as e:
# With rename it's possible to get either ENOTEMPTY or EEXIST raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
# in the case that the destination path is a not empty directory.
#
# If rename fails with these errors, another process beat
# us to it so just ignore.
if e.errno not in [errno.ENOTEMPTY, errno.EEXIST]:
raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
return originaldest return originaldest
......
...@@ -664,6 +664,33 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): ...@@ -664,6 +664,33 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
tar=tar) tar=tar)
##################################################################
# Source Checkout Command #
##################################################################
@cli.command(name='source-checkout', short_help='Checkout sources for an element')
@click.option('--except', 'except_', multiple=True,
type=click.Path(readable=False),
help="Except certain dependencies")
@click.option('--deps', '-d', default='none',
type=click.Choice(['build', 'none', 'run', 'all']),
help='The dependencies whose sources to checkout (default: none)')
@click.option('--fetch', 'fetch_', default=False, is_flag=True,
help='Fetch elements if they are not fetched')
@click.argument('element',
type=click.Path(readable=False))
@click.argument('location', type=click.Path())
@click.pass_obj
def source_checkout(app, element, location, deps, fetch_, except_):
"""Checkout sources of an element to the specified location
"""
with app.initialized():
app.stream.source_checkout(element,
location=location,
deps=deps,
fetch=fetch_,
except_targets=except_)
################################################################## ##################################################################
# Workspace Command # # Workspace Command #
################################################################## ##################################################################
......
...@@ -370,7 +370,7 @@ class Pipeline(): ...@@ -370,7 +370,7 @@ class Pipeline():
detail += " Element: {} is inconsistent\n".format(element._get_full_name()) detail += " Element: {} is inconsistent\n".format(element._get_full_name())
for source in element.sources(): for source in element.sources():
if source._get_consistency() == Consistency.INCONSISTENT: if source._get_consistency() == Consistency.INCONSISTENT:
detail += " Source {} is missing ref\n".format(source) detail += " {} is missing ref\n".format(source)
detail += '\n' detail += '\n'
detail += "Try tracking these elements first with `bst track`\n" detail += "Try tracking these elements first with `bst track`\n"
...@@ -383,6 +383,33 @@ class Pipeline(): ...@@ -383,6 +383,33 @@ class Pipeline():
detail += " " + element._get_full_name() + "\n" detail += " " + element._get_full_name() + "\n"
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced") raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
# assert_sources_cached()
#
# Asserts that sources for the given list of elements are cached.
#
# Args:
# elements (list): The list of elements
#
def assert_sources_cached(self, elements):
uncached = []
with self._context.timed_activity("Checking sources"):
for element in elements:
if element._get_consistency() != Consistency.CACHED:
uncached.append(element)
if uncached:
detail = "Sources are not cached for the following elements:\n\n"
for element in uncached:
detail += " Following sources for element: {} are not cached:\n".format(element._get_full_name())
for source in element.sources():
if source._get_consistency() != Consistency.CACHED:
detail += " {}\n".format(source)
detail += '\n'
detail += "Try fetching these elements first with `bst fetch`,\n" + \
"or run this command with `--fetch` option\n"
raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
############################################################# #############################################################
# Private Methods # # Private Methods #
############################################################# #############################################################
......
...@@ -379,27 +379,7 @@ class Stream(): ...@@ -379,27 +379,7 @@ class Stream():
elements, _ = self._load((target,), (), fetch_subprojects=True) elements, _ = self._load((target,), (), fetch_subprojects=True)
target = elements[0] target = elements[0]
if not tar: self._check_location_writable(location, force=force, tar=tar)
try:
os.makedirs(location, exist_ok=True)
except OSError as e:
raise StreamError("Failed to create checkout directory: '{}'"
.format(e)) from e
if not tar:
if not os.access(location, os.W_OK):
raise StreamError("Checkout directory '{}' not writable"
.format(location))
if not force and os.listdir(location):
raise StreamError("Checkout directory '{}' not empty"
.format(location))
elif os.path.exists(location) and location != '-':
if not os.access(location, os.W_OK):
raise StreamError("Output file '{}' not writable"
.format(location))
if not force and os.path.exists(location):
raise StreamError("Output file '{}' already exists"
.format(location))
# Stage deps into a temporary sandbox first # Stage deps into a temporary sandbox first
try: try:
...@@ -443,6 +423,42 @@ class Stream(): ...@@ -443,6 +423,42 @@ class Stream():
raise StreamError("Error while staging dependencies into a sandbox" raise StreamError("Error while staging dependencies into a sandbox"
": '{}'".format(e), detail=e.detail, reason=e.reason) from e ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
# source_checkout()
#
# Checkout sources of the target element to the specified location
#
# Args:
# target (str): The target element whose sources to checkout
# location (str): Location to checkout the sources to
# deps (str): The dependencies to checkout
# fetch (bool): Whether to fetch missing sources
# except_targets (list): List of targets to except from staging
#
def source_checkout(self, target, *,
location=None,
deps='none',
fetch=False,
except_targets=()):
self._check_location_writable(location)
elements, _ = self._load((target,), (),
selection=deps,
except_targets=except_targets,
fetch_subprojects=True)
# Assert all sources are cached
if fetch:
self._fetch(elements)
self._pipeline.assert_sources_cached(elements)
# Stage all sources determined by scope
try:
self._write_element_sources(location, elements)
except BstError as e:
raise StreamError("Error while writing sources"
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
# workspace_open # workspace_open
# #
# Open a project workspace # Open a project workspace
...@@ -726,7 +742,7 @@ class Stream(): ...@@ -726,7 +742,7 @@ class Stream():
if self._write_element_script(source_directory, element) if self._write_element_script(source_directory, element)
] ]
self._write_element_sources(tempdir, elements) self._write_element_sources(os.path.join(tempdir, "source"), elements)
self._write_build_script(tempdir, elements) self._write_build_script(tempdir, elements)
self._collect_sources(tempdir, tar_location, self._collect_sources(tempdir, tar_location,
target.normal_name, compression) target.normal_name, compression)
...@@ -1068,6 +1084,39 @@ class Stream(): ...@@ -1068,6 +1084,39 @@ class Stream():
self._enqueue_plan(fetch_plan) self._enqueue_plan(fetch_plan)
self._run() self._run()
# _check_location_writable()
#
# Check if given location is writable.
#
# Args:
# location (str): Destination path
# force (bool): Allow files to be overwritten
# tar (bool): Whether destination is a tarball
#
# Raises:
# (StreamError): If the destination is not writable
#
def _check_location_writable(self, location, force=False, tar=False):
if not tar:
try:
os.makedirs(location, exist_ok=True)
except OSError as e:
raise StreamError("Failed to create destination directory: '{}'"
.format(e)) from e
if not os.access(location, os.W_OK):
raise StreamError("Destination directory '{}' not writable"
.format(location))
if not force and os.listdir(location):
raise StreamError("Destination directory '{}' not empty"
.format(location))
elif os.path.exists(location) and location != '-':
if not os.access(location, os.W_OK):
raise StreamError("Output file '{}' not writable"
.format(location))
if not force and os.path.exists(location):
raise StreamError("Output file '{}' already exists"
.format(location))
# Helper function for checkout() # Helper function for checkout()
# #
def _checkout_hardlinks(self, sandbox_vroot, directory): def _checkout_hardlinks(self, sandbox_vroot, directory):
...@@ -1089,11 +1138,10 @@ class Stream(): ...@@ -1089,11 +1138,10 @@ class Stream():
# Write all source elements to the given directory # Write all source elements to the given directory
def _write_element_sources(self, directory, elements): def _write_element_sources(self, directory, elements):
for element in elements: for element in elements:
source_dir = os.path.join(directory, "source") element_source_dir = self._get_element_dirname(directory, element)
element_source_dir = os.path.join(source_dir, element.normal_name) if list(element.sources()):
os.makedirs(element_source_dir) os.makedirs(element_source_dir)
element._stage_sources_at(element_source_dir)
element._stage_sources_at(element_source_dir)
# Write a master build script to the sandbox # Write a master build script to the sandbox
def _write_build_script(self, directory, elements): def _write_build_script(self, directory, elements):
...@@ -1122,3 +1170,25 @@ class Stream(): ...@@ -1122,3 +1170,25 @@ class Stream():
with tarfile.open(tar_name, permissions) as tar: with tarfile.open(tar_name, permissions) as tar:
tar.add(directory, arcname=element_name) tar.add(directory, arcname=element_name)
# _get_element_dirname()
#
# Get path to directory for an element based on its normal name.
#
# For cross-junction elements, the path will be prefixed with the name
# of the junction element.
#
# Args:
# directory (str): path to base directory
# element (Element): the element
#
# Returns:
# (str): Path to directory for this element
#
def _get_element_dirname(self, directory, element):
parts = [element.normal_name]
while element._get_project() != self._project:
element = element._get_project().junction
parts.append(element.normal_name)
return os.path.join(directory, *reversed(parts))
...@@ -85,7 +85,8 @@ import shutil ...@@ -85,7 +85,8 @@ import shutil
from . import _yaml from . import _yaml
from ._variables import Variables from ._variables import Variables
from ._versions import BST_CORE_ARTIFACT_VERSION from ._versions import BST_CORE_ARTIFACT_VERSION
from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, ErrorDomain from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
ErrorDomain
from .utils import UtilError from .utils import UtilError
from . import Plugin, Consistency, Scope from . import Plugin, Consistency, Scope
from . import SandboxFlags from . import SandboxFlags
...@@ -1553,7 +1554,6 @@ class Element(Plugin): ...@@ -1553,7 +1554,6 @@ class Element(Plugin):
self.__dynamic_public = _yaml.node_copy(self.__public) self.__dynamic_public = _yaml.node_copy(self.__public)
# Call the abstract plugin methods # Call the abstract plugin methods
collect = None
try: try:
# Step 1 - Configure # Step 1 - Configure
self.configure_sandbox(sandbox) self.configure_sandbox(sandbox)
...@@ -1564,7 +1564,7 @@ class Element(Plugin): ...@@ -1564,7 +1564,7 @@ class Element(Plugin):
# Step 4 - Assemble # Step 4 - Assemble
collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
self.__set_build_result(success=True, description="succeeded") self.__set_build_result(success=True, description="succeeded")
except BstError as e: except ElementError as e:
# Shelling into a sandbox is useful to debug this error # Shelling into a sandbox is useful to debug this error
e.sandbox = True e.sandbox = True
...@@ -1586,104 +1586,105 @@ class Element(Plugin): ...@@ -1586,104 +1586,105 @@ class Element(Plugin):
self.warn("Failed to preserve workspace state for failed build sysroot: {}" self.warn("Failed to preserve workspace state for failed build sysroot: {}"
.format(e)) .format(e))
if isinstance(e, ElementError):
collect = e.collect # pylint: disable=no-member
self.__set_build_result(success=False, description=str(e), detail=e.detail) self.__set_build_result(success=False, description=str(e), detail=e.detail)
self._cache_artifact(rootdir, sandbox, e.collect)
raise raise
else:
return self._cache_artifact(rootdir, sandbox, collect)
finally: finally:
if collect is not None:
try:
sandbox_vroot = sandbox.get_virtual_directory()
collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
except VirtualDirectoryError:
# No collect directory existed
collectvdir = None
# Create artifact directory structure
assembledir = os.path.join(rootdir, 'artifact')
filesdir = os.path.join(assembledir, 'files')
logsdir = os.path.join(assembledir, 'logs')
metadir = os.path.join(assembledir, 'meta')
buildtreedir = os.path.join(assembledir, 'buildtree')
os.mkdir(assembledir)
if collect is not None and collectvdir is not None:
os.mkdir(filesdir)
os.mkdir(logsdir)
os.mkdir(metadir)
os.mkdir(buildtreedir)
# Hard link files from collect dir to files directory
if collect is not None and collectvdir is not None:
collectvdir.export_files(filesdir, can_link=True)
try:
sandbox_vroot = sandbox.get_virtual_directory()
sandbox_build_dir = sandbox_vroot.descend(
self.get_variable('build-root').lstrip(os.sep).split(os.sep))
# Hard link files from build-root dir to buildtreedir directory
sandbox_build_dir.export_files(buildtreedir)
except VirtualDirectoryError:
# Directory could not be found. Pre-virtual
# directory behaviour was to continue silently
# if the directory could not be found.
pass
# Copy build log
log_filename = context.get_log_filename()
self._build_log_path = os.path.join(logsdir, 'build.log')
if log_filename:
shutil.copyfile(log_filename, self._build_log_path)
# Store public data
_yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
# Store result
build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
if self.__build_result[2] is not None:
build_result_dict["detail"] = self.__build_result[2]
_yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
# ensure we have cache keys
self._assemble_done()
# Store keys.yaml
_yaml.dump(_yaml.node_sanitize({
'strong': self._get_cache_key(),
'weak': self._get_cache_key(_KeyStrength.WEAK),
}), os.path.join(metadir, 'keys.yaml'))
# Store dependencies.yaml
_yaml.dump(_yaml.node_sanitize({
e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
}), os.path.join(metadir, 'dependencies.yaml'))
# Store workspaced.yaml
_yaml.dump(_yaml.node_sanitize({
'workspaced': True if self._get_workspace() else False
}), os.path.join(metadir, 'workspaced.yaml'))
# Store workspaced-dependencies.yaml
_yaml.dump(_yaml.node_sanitize({
'workspaced-dependencies': [
e.name for e in self.dependencies(Scope.BUILD)
if e._get_workspace()
]
}), os.path.join(metadir, 'workspaced-dependencies.yaml'))
with self.timed_activity("Caching artifact"):
artifact_size = utils._get_dir_size(assembledir)
self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
if collect is not None and collectvdir is None:
raise ElementError(
"Directory '{}' was not found inside the sandbox, "
"unable to collect artifact contents"
.format(collect))
# Finally cleanup the build dir
cleanup_rootdir() cleanup_rootdir()
def _cache_artifact(self, rootdir, sandbox, collect):
if collect is not None:
try:
sandbox_vroot = sandbox.get_virtual_directory()
collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
except VirtualDirectoryError:
# No collect directory existed
collectvdir = None
# Create artifact directory structure
assembledir = os.path.join(rootdir, 'artifact')
filesdir = os.path.join(assembledir, 'files')
logsdir = os.path.join(assembledir, 'logs')
metadir = os.path.join(assembledir, 'meta')
buildtreedir = os.path.join(assembledir, 'buildtree')
os.mkdir(assembledir)
if collect is not None and collectvdir is not None:
os.mkdir(filesdir)
os.mkdir(logsdir)
os.mkdir(metadir)
os.mkdir(buildtreedir)
# Hard link files from collect dir to files directory
if collect is not None and collectvdir is not None:
collectvdir.export_files(filesdir, can_link=True)
try:
sandbox_vroot = sandbox.get_virtual_directory()
sandbox_build_dir = sandbox_vroot.descend(
self.get_variable('build-root').lstrip(os.sep).split(os.sep))
# Hard link files from build-root dir to buildtreedir directory
sandbox_build_dir.export_files(buildtreedir)
except VirtualDirectoryError:
# Directory could not be found. Pre-virtual
# directory behaviour was to continue silently
# if the directory could not be found.
pass
# Copy build log
log_filename = self._get_context().get_log_filename()
self._build_log_path = os.path.join(logsdir, 'build.log')
if log_filename:
shutil.copyfile(log_filename, self._build_log_path)
# Store public data
_yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
# Store result
build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
if self.__build_result[2] is not None:
build_result_dict["detail"] = self.__build_result[2]
_yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
# ensure we have cache keys
self._assemble_done()
# Store keys.yaml
_yaml.dump(_yaml.node_sanitize({
'strong': self._get_cache_key(),
'weak': self._get_cache_key(_KeyStrength.WEAK),
}), os.path.join(metadir, 'keys.yaml'))
# Store dependencies.yaml
_yaml.dump(_yaml.node_sanitize({
e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
}), os.path.join(metadir, 'dependencies.yaml'))
# Store workspaced.yaml
_yaml.dump(_yaml.node_sanitize({
'workspaced': True if self._get_workspace() else False
}), os.path.join(metadir, 'workspaced.yaml'))
# Store workspaced-dependencies.yaml
_yaml.dump(_yaml.node_sanitize({
'workspaced-dependencies': [
e.name for e in self.dependencies(Scope.BUILD)
if e._get_workspace()
]
}), os.path.join(metadir, 'workspaced-dependencies.yaml'))
with self.timed_activity("Caching artifact"):
artifact_size = utils._get_dir_size(assembledir)
self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
if collect is not None and collectvdir is None:
raise ElementError(
"Directory '{}' was not found inside the sandbox, "
"unable to collect artifact contents"
.format(collect))
return artifact_size return artifact_size
def _get_build_log(self): def _get_build_log(self):
......
...@@ -86,7 +86,6 @@ This plugin also utilises the following configurable core plugin warnings: ...@@ -86,7 +86,6 @@ This plugin also utilises the following configurable core plugin warnings:
""" """
import os import os
import errno
import re import re
import shutil import shutil
from collections.abc import Mapping from collections.abc import Mapping
...@@ -97,6 +96,7 @@ from configparser import RawConfigParser ...@@ -97,6 +96,7 @@ from configparser import RawConfigParser
from buildstream import Source, SourceError, Consistency, SourceFetcher from buildstream import Source, SourceError, Consistency, SourceFetcher
from buildstream import utils from buildstream import utils
from buildstream.plugin import CoreWarnings from buildstream.plugin import CoreWarnings
from buildstream.utils import move_atomic, DirectoryExistsError
GIT_MODULES = '.gitmodules' GIT_MODULES = '.gitmodules'
...@@ -141,21 +141,16 @@ class GitMirror(SourceFetcher): ...@@ -141,21 +141,16 @@ class GitMirror(SourceFetcher):
fail="Failed to clone git repository {}".format(url), fail="Failed to clone git repository {}".format(url),
fail_temporarily=True) fail_temporarily=True)
# Attempt atomic rename into destination, this will fail if
# another process beat us to the punch
try: try:
os.rename(tmpdir, self.mirror) move_atomic(tmpdir, self.mirror)
except DirectoryExistsError:
# Another process was quicker to download this repository.
# Let's discard our own
self.source.status("{}: Discarding duplicate clone of {}"
.format(self.source, url))
except OSError as e: except OSError as e:
raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
# When renaming and the destination repo already exists, os.rename() .format(self.source, url, tmpdir, self.mirror, e)) from e
# will fail with ENOTEMPTY, since an empty directory will be silently
# replaced
if e.errno == errno.ENOTEMPTY:
self.source.status("{}: Discarding duplicate clone of {}"
.format(self.source, url))
else:
raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
.format(self.source, url, tmpdir, self.mirror, e)) from e
def _fetch(self, alias_override=None): def _fetch(self, alias_override=None):
url = self.source.translate_url(self.url, url = self.source.translate_url(self.url,
......
...@@ -68,7 +68,6 @@ details on common configuration options for sources. ...@@ -68,7 +68,6 @@ details on common configuration options for sources.
The ``pip`` plugin is available since :ref:`format version 16 <project_format_version>` The ``pip`` plugin is available since :ref:`format version 16 <project_format_version>`
""" """
import errno
import hashlib import hashlib
import os import os
import re import re
...@@ -80,6 +79,7 @@ _PYPI_INDEX_URL = 'https://pypi.org/simple/' ...@@ -80,6 +79,7 @@ _PYPI_INDEX_URL = 'https://pypi.org/simple/'
# Used only for finding pip command # Used only for finding pip command
_PYTHON_VERSIONS = [ _PYTHON_VERSIONS = [
'python', # when running in a venv, we might not have the exact version
'python2.7', 'python2.7',
'python3.0', 'python3.0',
'python3.1', 'python3.1',
...@@ -192,13 +192,14 @@ class PipSource(Source): ...@@ -192,13 +192,14 @@ class PipSource(Source):
# process has fetched the sources before us and ensure that we do # process has fetched the sources before us and ensure that we do
# not raise an error in that case. # not raise an error in that case.
try: try:
os.makedirs(self._mirror) utils.move_atomic(package_dir, self._mirror)
os.rename(package_dir, self._mirror) except utils.DirectoryExistsError:
except FileExistsError: # Another process has beaten us and has fetched the sources
return # before us.
pass
except OSError as e: except OSError as e:
if e.errno != errno.ENOTEMPTY: raise SourceError("{}: Failed to move downloaded pip packages from '{}' to '{}': {}"
raise .format(self, package_dir, self._mirror, e)) from e
def stage(self, directory): def stage(self, directory):
with self.timed_activity("Staging Python packages", silent_nested=True): with self.timed_activity("Staging Python packages", silent_nested=True):
......
...@@ -72,6 +72,11 @@ class ProgramNotFoundError(BstError): ...@@ -72,6 +72,11 @@ class ProgramNotFoundError(BstError):
super().__init__(message, domain=ErrorDomain.PROG_NOT_FOUND, reason=reason) super().__init__(message, domain=ErrorDomain.PROG_NOT_FOUND, reason=reason)
class DirectoryExistsError(OSError):
"""Raised when a `os.rename` is attempted but the destination is an existing directory.
"""
class FileListResult(): class FileListResult():
"""An object which stores the result of one of the operations """An object which stores the result of one of the operations
which run on a list of files. which run on a list of files.
...@@ -500,6 +505,38 @@ def get_bst_version(): ...@@ -500,6 +505,38 @@ def get_bst_version():
.format(__version__)) .format(__version__))
def move_atomic(source, destination, ensure_parents=True):
"""Move the source to the destination using atomic primitives.
This uses `os.rename` to move a file or directory to a new destination.
It wraps some `OSError` thrown errors to ensure their handling is correct.
The main reason for this to exist is that rename can throw different errors
for the same symptom (https://www.unix.com/man-page/POSIX/3posix/rename/).
We are especially interested here in the case when the destination already
exists. In this case, either EEXIST or ENOTEMPTY are thrown.
In order to ensure consistent handling of these exceptions, this function
should be used instead of `os.rename`
Args:
source (str or Path): source to rename
destination (str or Path): destination to which to move the source
ensure_parents (bool): Whether or not to create the parent's directories
of the destination (default: True)
"""
if ensure_parents:
os.makedirs(os.path.dirname(str(destination)), exist_ok=True)
try:
os.rename(str(source), str(destination))
except OSError as exc:
if exc.errno in (errno.EEXIST, errno.ENOTEMPTY):
raise DirectoryExistsError(*exc.args) from exc
raise
@contextmanager @contextmanager
def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None, def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
errors=None, newline=None, closefd=True, opener=None, tempdir=None): errors=None, newline=None, closefd=True, opener=None, tempdir=None):
......
...@@ -56,6 +56,10 @@ def integration_cache(request): ...@@ -56,6 +56,10 @@ def integration_cache(request):
pass pass
@pytest.fixture(autouse=True)
def clean_platform_cache(): def clean_platform_cache():
Platform._instance = None Platform._instance = None
@pytest.fixture(autouse=True)
def ensure_platform_cache_is_clean():
clean_platform_cache()
...@@ -15,6 +15,7 @@ MAIN_COMMANDS = [ ...@@ -15,6 +15,7 @@ MAIN_COMMANDS = [
'push ', 'push ',
'shell ', 'shell ',
'show ', 'show ',
'source-checkout ',
'source-bundle ', 'source-bundle ',
'track ', 'track ',
'workspace ' 'workspace '
......
...@@ -115,6 +115,7 @@ def test_build_track(cli, datafiles, tmpdir, ref_storage, ...@@ -115,6 +115,7 @@ def test_build_track(cli, datafiles, tmpdir, ref_storage,
args += ['0.bst'] args += ['0.bst']
result = cli.run(project=project, silent=True, args=args) result = cli.run(project=project, silent=True, args=args)
result.assert_success()
tracked_elements = result.get_tracked_elements() tracked_elements = result.get_tracked_elements()
assert set(tracked_elements) == set(tracked) assert set(tracked_elements) == set(tracked)
......
kind: import
description: It is important for this element to have both build and runtime dependencies
sources:
- kind: local
path: files/etc-files
depends:
- filename: import-dev.bst
type: build
- filename: import-bin.bst
type: runtime
config
import os
import pytest
from tests.testutils import cli
from buildstream import utils, _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
# Project directory
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'project',
)
def generate_remote_import_element(input_path, output_path):
return {
'kind': 'import',
'sources': [
{
'kind': 'remote',
'url': 'file://{}'.format(input_path),
'filename': output_path,
'ref': utils.sha256sum(input_path),
}
]
}
@pytest.mark.datafiles(DATA_DIR)
def test_source_checkout(datafiles, cli):
project = os.path.join(datafiles.dirname, datafiles.basename)
checkout = os.path.join(cli.directory, 'source-checkout')
target = 'checkout-deps.bst'
result = cli.run(project=project, args=['source-checkout', target, '--deps', 'none', checkout])
result.assert_success()
assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize('deps', [('build'), ('none'), ('run'), ('all')])
def test_source_checkout_deps(datafiles, cli, deps):
project = os.path.join(datafiles.dirname, datafiles.basename)
checkout = os.path.join(cli.directory, 'source-checkout')
target = 'checkout-deps.bst'
result = cli.run(project=project, args=['source-checkout', target, '--deps', deps, checkout])
result.assert_success()
# Sources of the target
if deps == 'build':
assert not os.path.exists(os.path.join(checkout, 'checkout-deps'))
else:
assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
# Sources of the target's build dependencies
if deps in ('build', 'all'):
assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
else:
assert not os.path.exists(os.path.join(checkout, 'import-dev'))
# Sources of the target's runtime dependencies
if deps in ('run', 'all'):
assert os.path.exists(os.path.join(checkout, 'import-bin', 'usr', 'bin', 'hello'))
else:
assert not os.path.exists(os.path.join(checkout, 'import-bin'))
@pytest.mark.datafiles(DATA_DIR)
def test_source_checkout_except(datafiles, cli):
project = os.path.join(datafiles.dirname, datafiles.basename)
checkout = os.path.join(cli.directory, 'source-checkout')
target = 'checkout-deps.bst'
result = cli.run(project=project, args=['source-checkout', target,
'--deps', 'all',
'--except', 'import-bin.bst',
checkout])
result.assert_success()
# Sources for the target should be present
assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
# Sources for import-bin.bst should not be present
assert not os.path.exists(os.path.join(checkout, 'import-bin'))
# Sources for other dependencies should be present
assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize('fetch', [(False), (True)])
def test_source_checkout_fetch(datafiles, cli, fetch):
project = os.path.join(datafiles.dirname, datafiles.basename)
checkout = os.path.join(cli.directory, 'source-checkout')
target = 'remote-import-dev.bst'
target_path = os.path.join(project, 'elements', target)
# Create an element with remote source
element = generate_remote_import_element(
os.path.join(project, 'files', 'dev-files', 'usr', 'include', 'pony.h'),
'pony.h')
_yaml.dump(element, target_path)
# Testing --fetch option requires that we do not have the sources
# cached already
assert cli.get_element_state(project, target) == 'fetch needed'
args = ['source-checkout']
if fetch:
args += ['--fetch']
args += [target, checkout]
result = cli.run(project=project, args=args)
if fetch:
result.assert_success()
assert os.path.exists(os.path.join(checkout, 'remote-import-dev', 'pony.h'))
else:
result.assert_main_error(ErrorDomain.PIPELINE, 'uncached-sources')
...@@ -4,6 +4,8 @@ import pytest ...@@ -4,6 +4,8 @@ import pytest
from buildstream import _yaml from buildstream import _yaml
from buildstream._exceptions import ErrorDomain from buildstream._exceptions import ErrorDomain
from conftest import clean_platform_cache
from tests.testutils import cli_integration as cli, create_artifact_share from tests.testutils import cli_integration as cli, create_artifact_share
from tests.testutils.site import IS_LINUX from tests.testutils.site import IS_LINUX
...@@ -158,3 +160,40 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error): ...@@ -158,3 +160,40 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
assert cli.get_element_state(project, 'element.bst') == 'failed' assert cli.get_element_state(project, 'element.bst') == 'failed'
# This element should have been pushed to the remote # This element should have been pushed to the remote
assert share.has_artifact('test', 'element.bst', cli.get_element_key(project, 'element.bst')) assert share.has_artifact('test', 'element.bst', cli.get_element_key(project, 'element.bst'))
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_host_tools_errors_are_not_cached(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
element_path = os.path.join(project, 'elements', 'element.bst')
# Write out our test target
element = {
'kind': 'script',
'depends': [
{
'filename': 'base.bst',
'type': 'build',
},
],
'config': {
'commands': [
'true',
],
},
}
_yaml.dump(element, element_path)
# Build without access to host tools, this will fail
result1 = cli.run(project=project, args=['build', 'element.bst'], env={'PATH': ''})
result1.assert_task_error(ErrorDomain.SANDBOX, 'unavailable-local-sandbox')
assert cli.get_element_state(project, 'element.bst') == 'buildable'
# clean the cache before running again
clean_platform_cache()
# When rebuilding, this should work
result2 = cli.run(project=project, args=['build', 'element.bst'])
result2.assert_success()
assert cli.get_element_state(project, 'element.bst') == 'cached'
import pytest
from buildstream.utils import move_atomic, DirectoryExistsError
@pytest.fixture
def src(tmp_path):
src = tmp_path.joinpath("src")
src.mkdir()
with src.joinpath("test").open("w") as fp:
fp.write("test")
return src
def test_move_to_empty_dir(src, tmp_path):
dst = tmp_path.joinpath("dst")
move_atomic(src, dst)
assert dst.joinpath("test").exists()
def test_move_to_empty_dir_create_parents(src, tmp_path):
dst = tmp_path.joinpath("nested/dst")
move_atomic(src, dst)
assert dst.joinpath("test").exists()
def test_move_to_empty_dir_no_create_parents(src, tmp_path):
dst = tmp_path.joinpath("nested/dst")
with pytest.raises(FileNotFoundError):
move_atomic(src, dst, ensure_parents=False)
def test_move_non_existing_dir(tmp_path):
dst = tmp_path.joinpath("dst")
src = tmp_path.joinpath("src")
with pytest.raises(FileNotFoundError):
move_atomic(src, dst)
def test_move_to_existing_empty_dir(src, tmp_path):
dst = tmp_path.joinpath("dst")
dst.mkdir()
move_atomic(src, dst)
assert dst.joinpath("test").exists()
def test_move_to_existing_file(src, tmp_path):
dst = tmp_path.joinpath("dst")
with dst.open("w") as fp:
fp.write("error")
with pytest.raises(NotADirectoryError):
move_atomic(src, dst)
def test_move_file_to_existing_file(tmp_path):
dst = tmp_path.joinpath("dst")
src = tmp_path.joinpath("src")
with src.open("w") as fp:
fp.write("src")
with dst.open("w") as fp:
fp.write("dst")
move_atomic(src, dst)
with dst.open() as fp:
assert fp.read() == "src"
def test_move_to_existing_non_empty_dir(src, tmp_path):
dst = tmp_path.joinpath("dst")
dst.mkdir()
with dst.joinpath("existing").open("w") as fp:
fp.write("already there")
with pytest.raises(DirectoryExistsError):
move_atomic(src, dst)