Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 108-integration-tests-not-idempotent-and-self-contained
  • 131-behavior-of-except-argument-is-frustrating-and-confusing
  • 132-loading-external-plugins-works-without-explicit-requirement-in-project-conf
  • 135-expire-artifacts-in-local-cache
  • 135-expire-artifacts-in-local-cache-clean
  • 138-aborting-bst-push-command-causes-stack-trace-3
  • 142-potentially-printing-provenance-more-than-once-in-loaderrors
  • 188-trigger-external-commands-on-certain-events
  • 214-filter-workspacing-rework
  • 218-allow-specifying-the-chroot-binary-to-use-for-sandboxes-on-unix-platforms
  • 239-use-pylint-for-linting
  • 372-allow-queues-to-run-auxilliary-jobs-after-an-element-s-job-finishes
  • 380-untagged-bst
  • 463-make-dependency-type-default-to-build
  • 537-mirror-fallback-does-not-work-for-git
  • 64-clarify-about-plugins-importing-other-plugins
  • 716-add-example-with-build-directory-outside-of-source-directory
  • 716-add-example-with-build-directory-outside-of-source-directory-2
  • 81-non-empty-read-only-directories-not-handled-during-bst-build-and-others
  • BenjaminSchubert/fix-quota-tests
  • Qinusty/235-manifest
  • Qinusty/397
  • Qinusty/470-bst-track-yaml-indent
  • Qinusty/553-backport-1.2
  • Qinusty/663-missing-cache-key-workspace-open
  • Qinusty/backport-576
  • Qinusty/backport-skipped-562
  • Qinusty/gitlab-ci
  • Qinusty/gitlab-ci-duration
  • Qinusty/message-helpers
  • Qinusty/pytest_cache_gitignore
  • abderrahim/cached-failure
  • abderrahim/cachekey-strictrebuild
  • abderrahim/cleanup-speedup
  • abderrahim/makemaker
  • abderrahim/resolve-remotes
  • abderrahim/source-cache
  • abderrahim/stage-artifact-scriptelement
  • abderrahim/virtual-extract
  • adamjones/contributing
  • adamjones/contribution-guide
  • aevri/assert_no_unexpected_size_writes
  • aevri/casdprocessmanager2
  • aevri/check_spawn_ci_working
  • aevri/enable_spawn_ci_4
  • aevri/enable_spawn_ci_6
  • aevri/enable_spawn_ci_7
  • aevri/json_artifact_meta
  • aevri/picklable_jobs
  • aevri/plugin_venvs
  • aevri/provenance_scope
  • aevri/pylint_ignore_argsdiff
  • aevri/safe_noninteractive
  • aevri/win32
  • aevri/win32_minimal
  • aevri/win32_minimal_seemstowork_20190829
  • aevri/win32_receive_signals
  • aevri/win32_temptext
  • alexfazakas/add-bst-init-argument
  • alexfazakas/use-merge-trains
  • always-do-linting
  • another-segfault
  • becky/locally_downloaded_files
  • becky/shell_launch_errors
  • bschubert/add-isolated-tests
  • bschubert/isort
  • bschubert/merge-parent-child-job
  • bschubert/more-mypy
  • bschubert/no-multiprocessing-bak
  • bschubert/no-multiprocessing-full
  • bschubert/optimize-deps
  • bschubert/optimize-element-init
  • bschubert/optimize-loader-sorting
  • bschubert/optimize-mapping-node
  • bschubert/optimize-splits
  • bschubert/remove-multiline-switch-for-re
  • bschubert/remove-parent-child-pipe
  • bschubert/remove-pip-source
  • bschubert/standardize-source-tests
  • bschubert/test-plugins
  • bschubert/update-coverage
  • bst-1
  • bst-1.0
  • bst-1.2
  • bst-1.4
  • bst-pull
  • bst-push
  • buildbox-pre-will
  • cache-key-v0
  • caching_build_trees
  • cascache_timeouts
  • chandan/automate-pypi-release
  • chandan/cli-deps
  • chandan/contrib-dependencies
  • chandan/element-cache
  • chandan/enums
  • chandan/extras-require
  • chandan/macos-multiprocessing
  • chandan/moar-parallelism
  • chandan/moar-runners
  • 1.0.0
  • 1.0.1
  • 1.1.0
  • 1.1.1
  • 1.1.2
  • 1.1.3
  • 1.1.4
  • 1.1.5
  • 1.1.6
  • 1.1.7
  • 1.2.0
  • 1.2.1
  • 1.2.2
  • 1.2.3
  • 1.2.4
  • 1.2.5
  • 1.2.6
  • 1.2.7
  • 1.2.8
  • 1.3.0
  • 1.3.1
  • 1.4.0
  • 1.4.1
  • 1.4.2
  • 1.4.3
  • 1.5.0
  • 1.5.1
  • 1.6.0
  • 1.6.1
  • 1.91.0
  • 1.91.1
  • 1.91.2
  • 1.91.3
  • 1.93.0
  • 1.93.1
  • 1.93.2
  • 1.93.3
  • 1.93.4
  • 1.93.5
  • CROSS_PLATFORM_SEPT_2017
  • PRE_CAS_MERGE_JULY_2018
  • bst-1-branchpoint
  • bst-1.2-branchpoint
  • bst-1.4-branchpoint
144 results

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Select Git revision
  • 108-integration-tests-not-idempotent-and-self-contained
  • 131-behavior-of-except-argument-is-frustrating-and-confusing
  • 132-loading-external-plugins-works-without-explicit-requirement-in-project-conf
  • 135-expire-artifacts-in-local-cache
  • 135-expire-artifacts-in-local-cache-clean
  • 138-aborting-bst-push-command-causes-stack-trace-3
  • 142-potentially-printing-provenance-more-than-once-in-loaderrors
  • 188-trigger-external-commands-on-certain-events
  • 214-filter-workspacing-rework
  • 218-allow-specifying-the-chroot-binary-to-use-for-sandboxes-on-unix-platforms
  • 239-use-pylint-for-linting
  • 372-allow-queues-to-run-auxilliary-jobs-after-an-element-s-job-finishes
  • 380-untagged-bst
  • 463-make-dependency-type-default-to-build
  • 537-mirror-fallback-does-not-work-for-git
  • 64-clarify-about-plugins-importing-other-plugins
  • 716-add-example-with-build-directory-outside-of-source-directory
  • 716-add-example-with-build-directory-outside-of-source-directory-2
  • 81-non-empty-read-only-directories-not-handled-during-bst-build-and-others
  • BenjaminSchubert/fix-quota-tests
  • Qinusty/235-manifest
  • Qinusty/397
  • Qinusty/470-bst-track-yaml-indent
  • Qinusty/553-backport-1.2
  • Qinusty/663-missing-cache-key-workspace-open
  • Qinusty/backport-576
  • Qinusty/backport-skipped-562
  • Qinusty/gitlab-ci
  • Qinusty/gitlab-ci-duration
  • Qinusty/message-helpers
  • Qinusty/pytest_cache_gitignore
  • abderrahim/cached-failure
  • abderrahim/cachekey-strictrebuild
  • abderrahim/cleanup-speedup
  • abderrahim/makemaker
  • abderrahim/resolve-remotes
  • abderrahim/source-cache
  • abderrahim/stage-artifact-scriptelement
  • abderrahim/virtual-extract
  • adamjones/contributing
  • adamjones/contribution-guide
  • aevri/assert_no_unexpected_size_writes
  • aevri/casdprocessmanager2
  • aevri/check_spawn_ci_working
  • aevri/enable_spawn_ci_4
  • aevri/enable_spawn_ci_6
  • aevri/enable_spawn_ci_7
  • aevri/json_artifact_meta
  • aevri/picklable_jobs
  • aevri/plugin_venvs
  • aevri/provenance_scope
  • aevri/pylint_ignore_argsdiff
  • aevri/safe_noninteractive
  • aevri/win32
  • aevri/win32_minimal
  • aevri/win32_minimal_seemstowork_20190829
  • aevri/win32_receive_signals
  • aevri/win32_temptext
  • alexfazakas/add-bst-init-argument
  • alexfazakas/use-merge-trains
  • always-do-linting
  • another-segfault
  • becky/locally_downloaded_files
  • becky/shell_launch_errors
  • bschubert/add-isolated-tests
  • bschubert/isort
  • bschubert/merge-parent-child-job
  • bschubert/more-mypy
  • bschubert/no-multiprocessing-bak
  • bschubert/no-multiprocessing-full
  • bschubert/optimize-deps
  • bschubert/optimize-element-init
  • bschubert/optimize-loader-sorting
  • bschubert/optimize-mapping-node
  • bschubert/optimize-splits
  • bschubert/remove-multiline-switch-for-re
  • bschubert/remove-parent-child-pipe
  • bschubert/remove-pip-source
  • bschubert/standardize-source-tests
  • bschubert/test-plugins
  • bschubert/update-coverage
  • bst-1
  • bst-1.0
  • bst-1.2
  • bst-1.4
  • bst-pull
  • bst-push
  • buildbox-pre-will
  • cache-key-v0
  • caching_build_trees
  • cascache_timeouts
  • chandan/automate-pypi-release
  • chandan/cli-deps
  • chandan/contrib-dependencies
  • chandan/element-cache
  • chandan/enums
  • chandan/extras-require
  • chandan/macos-multiprocessing
  • chandan/moar-parallelism
  • chandan/moar-runners
  • 1.0.0
  • 1.0.1
  • 1.1.0
  • 1.1.1
  • 1.1.2
  • 1.1.3
  • 1.1.4
  • 1.1.5
  • 1.1.6
  • 1.1.7
  • 1.2.0
  • 1.2.1
  • 1.2.2
  • 1.2.3
  • 1.2.4
  • 1.2.5
  • 1.2.6
  • 1.2.7
  • 1.2.8
  • 1.3.0
  • 1.3.1
  • 1.4.0
  • 1.4.1
  • 1.4.2
  • 1.4.3
  • 1.5.0
  • 1.5.1
  • 1.6.0
  • 1.6.1
  • 1.91.0
  • 1.91.1
  • 1.91.2
  • 1.91.3
  • 1.93.0
  • 1.93.1
  • 1.93.2
  • 1.93.3
  • 1.93.4
  • 1.93.5
  • CROSS_PLATFORM_SEPT_2017
  • PRE_CAS_MERGE_JULY_2018
  • bst-1-branchpoint
  • bst-1.2-branchpoint
  • bst-1.4-branchpoint
144 results
Show changes
Commits on Source (7)
...@@ -45,9 +45,10 @@ _BUFFER_SIZE = 65536 ...@@ -45,9 +45,10 @@ _BUFFER_SIZE = 65536
# #
class CASCache(): class CASCache():
def __init__(self, path): def __init__(self, path, *, disable_exec=False):
self.casdir = os.path.join(path, 'cas') self.casdir = os.path.join(path, 'cas')
self.tmpdir = os.path.join(path, 'tmp') self.tmpdir = os.path.join(path, 'tmp')
self._disable_exec = disable_exec
os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True) os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True) os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
os.makedirs(self.tmpdir, exist_ok=True) os.makedirs(self.tmpdir, exist_ok=True)
...@@ -342,8 +343,12 @@ class CASCache(): ...@@ -342,8 +343,12 @@ class CASCache():
# Returns: # Returns:
# (str): The path of the object # (str): The path of the object
# #
def objpath(self, digest): def objpath(self, digest, *, is_exec=False):
return os.path.join(self.casdir, 'objects', digest.hash[:2], digest.hash[2:]) if is_exec and not self._disable_exec:
filename = '{}.exec'.format(digest.hash[2:])
else:
filename = digest.hash[2:]
return os.path.join(self.casdir, 'objects', digest.hash[:2], filename)
# add_object(): # add_object():
# #
...@@ -360,7 +365,7 @@ class CASCache(): ...@@ -360,7 +365,7 @@ class CASCache():
# #
# Either `path` or `buffer` must be passed, but not both. # Either `path` or `buffer` must be passed, but not both.
# #
def add_object(self, *, digest=None, path=None, buffer=None, link_directly=False): def add_object(self, *, digest=None, path=None, buffer=None, link_directly=False, is_exec=False):
# Exactly one of the two parameters has to be specified # Exactly one of the two parameters has to be specified
assert (path is None) != (buffer is None) assert (path is None) != (buffer is None)
...@@ -376,10 +381,7 @@ class CASCache(): ...@@ -376,10 +381,7 @@ class CASCache():
for chunk in iter(lambda: tmp.read(_BUFFER_SIZE), b""): for chunk in iter(lambda: tmp.read(_BUFFER_SIZE), b""):
h.update(chunk) h.update(chunk)
else: else:
tmp = stack.enter_context(utils._tempnamedfile(dir=self.tmpdir)) tmp = stack.enter_context(self._temporary_object(is_exec=is_exec))
# Set mode bits to 0644
os.chmod(tmp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
if path: if path:
with open(path, 'rb') as f: with open(path, 'rb') as f:
for chunk in iter(lambda: f.read(_BUFFER_SIZE), b""): for chunk in iter(lambda: f.read(_BUFFER_SIZE), b""):
...@@ -395,7 +397,7 @@ class CASCache(): ...@@ -395,7 +397,7 @@ class CASCache():
digest.size_bytes = os.fstat(tmp.fileno()).st_size digest.size_bytes = os.fstat(tmp.fileno()).st_size
# Place file at final location # Place file at final location
objpath = self.objpath(digest) objpath = self.objpath(digest, is_exec=is_exec)
os.makedirs(os.path.dirname(objpath), exist_ok=True) os.makedirs(os.path.dirname(objpath), exist_ok=True)
os.link(tmp.name, objpath) os.link(tmp.name, objpath)
...@@ -604,11 +606,7 @@ class CASCache(): ...@@ -604,11 +606,7 @@ class CASCache():
for filenode in directory.files: for filenode in directory.files:
# regular file, create hardlink # regular file, create hardlink
fullpath = os.path.join(dest, filenode.name) fullpath = os.path.join(dest, filenode.name)
os.link(self.objpath(filenode.digest), fullpath) os.link(self.objpath(filenode.digest, is_exec=filenode.is_executable), fullpath)
if filenode.is_executable:
os.chmod(fullpath, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
for dirnode in directory.directories: for dirnode in directory.directories:
# Don't try to checkout a dangling ref # Don't try to checkout a dangling ref
...@@ -700,8 +698,8 @@ class CASCache(): ...@@ -700,8 +698,8 @@ class CASCache():
elif stat.S_ISREG(mode): elif stat.S_ISREG(mode):
filenode = directory.files.add() filenode = directory.files.add()
filenode.name = name filenode.name = name
self.add_object(path=full_path, digest=filenode.digest)
filenode.is_executable = (mode & stat.S_IXUSR) == stat.S_IXUSR filenode.is_executable = (mode & stat.S_IXUSR) == stat.S_IXUSR
self.add_object(path=full_path, digest=filenode.digest, is_exec=filenode.is_executable)
elif stat.S_ISLNK(mode): elif stat.S_ISLNK(mode):
symlinknode = directory.symlinks.add() symlinknode = directory.symlinks.add()
symlinknode.name = name symlinknode.name = name
...@@ -800,7 +798,7 @@ class CASCache(): ...@@ -800,7 +798,7 @@ class CASCache():
for filenode in directory.files: for filenode in directory.files:
if update_mtime: if update_mtime:
os.utime(self.objpath(filenode.digest)) os.utime(self.objpath(filenode.digest, is_exec=filenode.is_executable))
reachable.add(filenode.digest.hash) reachable.add(filenode.digest.hash)
for dirnode in directory.directories: for dirnode in directory.directories:
...@@ -811,7 +809,7 @@ class CASCache(): ...@@ -811,7 +809,7 @@ class CASCache():
d = remote_execution_pb2.Digest() d = remote_execution_pb2.Digest()
d.hash = directory_digest.hash d.hash = directory_digest.hash
d.size_bytes = directory_digest.size_bytes d.size_bytes = directory_digest.size_bytes
yield d yield False, d
directory = remote_execution_pb2.Directory() directory = remote_execution_pb2.Directory()
...@@ -822,11 +820,26 @@ class CASCache(): ...@@ -822,11 +820,26 @@ class CASCache():
d = remote_execution_pb2.Digest() d = remote_execution_pb2.Digest()
d.hash = filenode.digest.hash d.hash = filenode.digest.hash
d.size_bytes = filenode.digest.size_bytes d.size_bytes = filenode.digest.size_bytes
yield d yield filenode.is_executable, d
for dirnode in directory.directories: for dirnode in directory.directories:
yield from self._required_blobs(dirnode.digest) yield from self._required_blobs(dirnode.digest)
# _temporary_object():
#
# Returns:
# (file): A file object to a named temporary file.
#
# Create a named temporary file with 0o0644 access rights.
@contextlib.contextmanager
def _temporary_object(self, *, is_exec=False):
with utils._tempnamedfile(dir=self.tmpdir) as f:
access = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
if is_exec and not self._disable_exec:
access |= stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
os.chmod(f.name, access)
yield f
# _ensure_blob(): # _ensure_blob():
# #
# Fetch and add blob if it's not already local. # Fetch and add blob if it's not already local.
...@@ -838,27 +851,27 @@ class CASCache(): ...@@ -838,27 +851,27 @@ class CASCache():
# Returns: # Returns:
# (str): The path of the object # (str): The path of the object
# #
def _ensure_blob(self, remote, digest): def _ensure_blob(self, remote, digest, is_exec=False):
objpath = self.objpath(digest) objpath = self.objpath(digest, is_exec=is_exec)
if os.path.exists(objpath): if os.path.exists(objpath):
# already in local repository # already in local repository
return objpath return objpath
with utils._tempnamedfile(dir=self.tmpdir) as f: with self._temporary_object(is_exec=is_exec) as f:
remote._fetch_blob(digest, f) remote._fetch_blob(digest, f)
added_digest = self.add_object(path=f.name, link_directly=True) added_digest = self.add_object(path=f.name, link_directly=True, is_exec=is_exec)
assert added_digest.hash == digest.hash assert added_digest.hash == digest.hash
return objpath return objpath
def _batch_download_complete(self, batch): def _batch_download_complete(self, batch):
for digest, data in batch.send(): for digest, data, is_exec in batch.send():
with utils._tempnamedfile(dir=self.tmpdir) as f: with self._temporary_object(is_exec=is_exec) as f:
f.write(data) f.write(data)
f.flush() f.flush()
added_digest = self.add_object(path=f.name, link_directly=True) added_digest = self.add_object(path=f.name, link_directly=True, is_exec=is_exec)
assert added_digest.hash == digest.hash assert added_digest.hash == digest.hash
# Helper function for _fetch_directory(). # Helper function for _fetch_directory().
...@@ -872,8 +885,9 @@ class CASCache(): ...@@ -872,8 +885,9 @@ class CASCache():
return _CASBatchRead(remote) return _CASBatchRead(remote)
# Helper function for _fetch_directory(). # Helper function for _fetch_directory().
def _fetch_directory_node(self, remote, digest, batch, fetch_queue, fetch_next_queue, *, recursive=False): def _fetch_directory_node(self, remote, digest, batch, fetch_queue, fetch_next_queue,
in_local_cache = os.path.exists(self.objpath(digest)) *, recursive=False, is_exec=False):
in_local_cache = os.path.exists(self.objpath(digest, is_exec=is_exec))
if in_local_cache: if in_local_cache:
# Skip download, already in local cache. # Skip download, already in local cache.
...@@ -881,14 +895,14 @@ class CASCache(): ...@@ -881,14 +895,14 @@ class CASCache():
elif (digest.size_bytes >= remote.max_batch_total_size_bytes or elif (digest.size_bytes >= remote.max_batch_total_size_bytes or
not remote.batch_read_supported): not remote.batch_read_supported):
# Too large for batch request, download in independent request. # Too large for batch request, download in independent request.
self._ensure_blob(remote, digest) self._ensure_blob(remote, digest, is_exec=is_exec)
in_local_cache = True in_local_cache = True
else: else:
if not batch.add(digest): if not batch.add(digest, is_exec=is_exec):
# Not enough space left in batch request. # Not enough space left in batch request.
# Complete pending batch first. # Complete pending batch first.
batch = self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue) batch = self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
batch.add(digest) batch.add(digest, is_exec=is_exec)
if recursive: if recursive:
if in_local_cache: if in_local_cache:
...@@ -936,11 +950,13 @@ class CASCache(): ...@@ -936,11 +950,13 @@ class CASCache():
for dirnode in directory.directories: for dirnode in directory.directories:
if dirnode.name not in excluded_subdirs: if dirnode.name not in excluded_subdirs:
batch = self._fetch_directory_node(remote, dirnode.digest, batch, batch = self._fetch_directory_node(remote, dirnode.digest, batch,
fetch_queue, fetch_next_queue, recursive=True) fetch_queue, fetch_next_queue,
recursive=True)
for filenode in directory.files: for filenode in directory.files:
batch = self._fetch_directory_node(remote, filenode.digest, batch, batch = self._fetch_directory_node(remote, filenode.digest, batch,
fetch_queue, fetch_next_queue) fetch_queue, fetch_next_queue,
is_exec=filenode.is_executable)
# Fetch final batch # Fetch final batch
self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue) self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
...@@ -962,7 +978,7 @@ class CASCache(): ...@@ -962,7 +978,7 @@ class CASCache():
tree.children.extend([tree.root]) tree.children.extend([tree.root])
for directory in tree.children: for directory in tree.children:
for filenode in directory.files: for filenode in directory.files:
self._ensure_blob(remote, filenode.digest) self._ensure_blob(remote, filenode.digest, is_exec=filenode.is_executable)
# place directory blob only in final location when we've downloaded # place directory blob only in final location when we've downloaded
# all referenced blobs to avoid dangling references in the repository # all referenced blobs to avoid dangling references in the repository
...@@ -975,22 +991,28 @@ class CASCache(): ...@@ -975,22 +991,28 @@ class CASCache():
def _send_directory(self, remote, digest, u_uid=uuid.uuid4()): def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
required_blobs = self._required_blobs(digest) required_blobs = self._required_blobs(digest)
executable = {}
missing_blobs = dict() missing_blobs = dict()
# Limit size of FindMissingBlobs request # Limit size of FindMissingBlobs request
for required_blobs_group in _grouper(required_blobs, 512): for required_blobs_group in _grouper(required_blobs, 512):
request = remote_execution_pb2.FindMissingBlobsRequest(instance_name=remote.spec.instance_name) request = remote_execution_pb2.FindMissingBlobsRequest(instance_name=remote.spec.instance_name)
for required_digest in required_blobs_group: for is_exec, required_digest in required_blobs_group:
d = request.blob_digests.add() d = request.blob_digests.add()
d.hash = required_digest.hash d.hash = required_digest.hash
d.size_bytes = required_digest.size_bytes d.size_bytes = required_digest.size_bytes
if required_digest.hash not in executable:
executable[required_digest.hash] = set()
executable[required_digest.hash].add(is_exec)
response = remote.cas.FindMissingBlobs(request) response = remote.cas.FindMissingBlobs(request)
for missing_digest in response.missing_blob_digests: for missing_digest in response.missing_blob_digests:
d = remote_execution_pb2.Digest() d = remote_execution_pb2.Digest()
d.hash = missing_digest.hash d.hash = missing_digest.hash
d.size_bytes = missing_digest.size_bytes d.size_bytes = missing_digest.size_bytes
missing_blobs[d.hash] = d for is_exec in executable[missing_digest.hash]:
missing_blobs[d.hash] = (is_exec, d)
# Upload any blobs missing on the server # Upload any blobs missing on the server
self._send_blobs(remote, missing_blobs.values(), u_uid) self._send_blobs(remote, missing_blobs.values(), u_uid)
...@@ -998,8 +1020,8 @@ class CASCache(): ...@@ -998,8 +1020,8 @@ class CASCache():
def _send_blobs(self, remote, digests, u_uid=uuid.uuid4()): def _send_blobs(self, remote, digests, u_uid=uuid.uuid4()):
batch = _CASBatchUpdate(remote) batch = _CASBatchUpdate(remote)
for digest in digests: for is_exec, digest in digests:
with open(self.objpath(digest), 'rb') as f: with open(self.objpath(digest, is_exec=is_exec), 'rb') as f:
assert os.fstat(f.fileno()).st_size == digest.size_bytes assert os.fstat(f.fileno()).st_size == digest.size_bytes
if (digest.size_bytes >= remote.max_batch_total_size_bytes or if (digest.size_bytes >= remote.max_batch_total_size_bytes or
......
...@@ -306,8 +306,9 @@ class _CASBatchRead(): ...@@ -306,8 +306,9 @@ class _CASBatchRead():
self._request = remote_execution_pb2.BatchReadBlobsRequest() self._request = remote_execution_pb2.BatchReadBlobsRequest()
self._size = 0 self._size = 0
self._sent = False self._sent = False
self._is_exec = {}
def add(self, digest): def add(self, digest, *, is_exec=False):
assert not self._sent assert not self._sent
new_batch_size = self._size + digest.size_bytes new_batch_size = self._size + digest.size_bytes
...@@ -319,6 +320,9 @@ class _CASBatchRead(): ...@@ -319,6 +320,9 @@ class _CASBatchRead():
request_digest.hash = digest.hash request_digest.hash = digest.hash
request_digest.size_bytes = digest.size_bytes request_digest.size_bytes = digest.size_bytes
self._size = new_batch_size self._size = new_batch_size
if digest.hash not in self._is_exec:
self._is_exec[digest.hash] = set()
self._is_exec[digest.hash].add(is_exec)
return True return True
def send(self): def send(self):
...@@ -341,7 +345,8 @@ class _CASBatchRead(): ...@@ -341,7 +345,8 @@ class _CASBatchRead():
raise CASRemoteError("Failed to download blob {}: expected {} bytes, received {} bytes".format( raise CASRemoteError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
response.digest.hash, response.digest.size_bytes, len(response.data))) response.digest.hash, response.digest.size_bytes, len(response.data)))
yield (response.digest, response.data) for is_exec in self._is_exec[response.digest.hash]:
yield (response.digest, response.data, is_exec)
# Represents a batch of blobs queued for upload. # Represents a batch of blobs queued for upload.
......
...@@ -61,7 +61,7 @@ class ArtifactTooLargeException(Exception): ...@@ -61,7 +61,7 @@ class ArtifactTooLargeException(Exception):
def create_server(repo, *, enable_push, def create_server(repo, *, enable_push,
max_head_size=int(10e9), max_head_size=int(10e9),
min_head_size=int(2e9)): min_head_size=int(2e9)):
cas = CASCache(os.path.abspath(repo)) cas = CASCache(os.path.abspath(repo), disable_exec=True)
# Use max_workers default from Python 3.5+ # Use max_workers default from Python 3.5+
max_workers = (os.cpu_count() or 1) * 5 max_workers = (os.cpu_count() or 1) * 5
......
...@@ -20,8 +20,6 @@ ...@@ -20,8 +20,6 @@
import os import os
from functools import cmp_to_key from functools import cmp_to_key
from collections.abc import Mapping from collections.abc import Mapping
import tempfile
import shutil
from .._exceptions import LoadError, LoadErrorReason from .._exceptions import LoadError, LoadErrorReason
from .. import Consistency from .. import Consistency
...@@ -49,12 +47,10 @@ from .._message import Message, MessageType ...@@ -49,12 +47,10 @@ from .._message import Message, MessageType
# context (Context): The Context object # context (Context): The Context object
# project (Project): The toplevel Project object # project (Project): The toplevel Project object
# parent (Loader): A parent Loader object, in the case this is a junctioned Loader # parent (Loader): A parent Loader object, in the case this is a junctioned Loader
# tempdir (str): A directory to cleanup with the Loader, given to the loader by a parent
# loader in the case that this loader is a subproject loader.
# #
class Loader(): class Loader():
def __init__(self, context, project, *, parent=None, tempdir=None): def __init__(self, context, project, *, parent=None):
# Ensure we have an absolute path for the base directory # Ensure we have an absolute path for the base directory
basedir = project.element_path basedir = project.element_path
...@@ -73,7 +69,6 @@ class Loader(): ...@@ -73,7 +69,6 @@ class Loader():
self._options = project.options # Project options (OptionPool) self._options = project.options # Project options (OptionPool)
self._basedir = basedir # Base project directory self._basedir = basedir # Base project directory
self._first_pass_options = project.first_pass_config.options # Project options (OptionPool) self._first_pass_options = project.first_pass_config.options # Project options (OptionPool)
self._tempdir = tempdir # A directory to cleanup
self._parent = parent # The parent loader self._parent = parent # The parent loader
self._meta_elements = {} # Dict of resolved meta elements by name self._meta_elements = {} # Dict of resolved meta elements by name
...@@ -159,30 +154,6 @@ class Loader(): ...@@ -159,30 +154,6 @@ class Loader():
return ret return ret
# cleanup():
#
# Remove temporary checkout directories of subprojects
#
def cleanup(self):
if self._parent and not self._tempdir:
# already done
return
# recurse
for loader in self._loaders.values():
# value may be None with nested junctions without overrides
if loader is not None:
loader.cleanup()
if not self._parent:
# basedir of top-level loader is never a temporary directory
return
# safe guard to not accidentally delete directories outside builddir
if self._tempdir.startswith(self._context.builddir + os.sep):
if os.path.exists(self._tempdir):
shutil.rmtree(self._tempdir)
########################################### ###########################################
# Private Methods # # Private Methods #
########################################### ###########################################
...@@ -540,23 +511,28 @@ class Loader(): ...@@ -540,23 +511,28 @@ class Loader():
"Subproject has no ref for junction: {}".format(filename), "Subproject has no ref for junction: {}".format(filename),
detail=detail) detail=detail)
if len(sources) == 1 and sources[0]._get_local_path(): workspace = element._get_workspace()
if workspace:
# If a workspace is open, load it from there instead
basedir = workspace.get_absolute_path()
elif len(sources) == 1 and sources[0]._get_local_path():
# Optimization for junctions with a single local source # Optimization for junctions with a single local source
basedir = sources[0]._get_local_path() basedir = sources[0]._get_local_path()
tempdir = None
else: else:
# Stage sources # Stage sources
os.makedirs(self._context.builddir, exist_ok=True) element._update_state()
basedir = tempfile.mkdtemp(prefix="{}-".format(element.normal_name), dir=self._context.builddir) basedir = os.path.join(self.project.directory, ".bst", "staged-junctions",
element._stage_sources_at(basedir, mount_workspaces=False) filename, element._get_cache_key())
tempdir = basedir if not os.path.exists(basedir):
os.makedirs(basedir, exist_ok=True)
element._stage_sources_at(basedir, mount_workspaces=False)
# Load the project # Load the project
project_dir = os.path.join(basedir, element.path) project_dir = os.path.join(basedir, element.path)
try: try:
from .._project import Project from .._project import Project
project = Project(project_dir, self._context, junction=element, project = Project(project_dir, self._context, junction=element,
parent_loader=self, tempdir=tempdir) parent_loader=self)
except LoadError as e: except LoadError as e:
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF: if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
raise LoadError(reason=LoadErrorReason.INVALID_JUNCTION, raise LoadError(reason=LoadErrorReason.INVALID_JUNCTION,
......
...@@ -91,7 +91,7 @@ class ProjectConfig: ...@@ -91,7 +91,7 @@ class ProjectConfig:
class Project(): class Project():
def __init__(self, directory, context, *, junction=None, cli_options=None, def __init__(self, directory, context, *, junction=None, cli_options=None,
default_mirror=None, parent_loader=None, tempdir=None): default_mirror=None, parent_loader=None):
# The project name # The project name
self.name = None self.name = None
...@@ -147,7 +147,7 @@ class Project(): ...@@ -147,7 +147,7 @@ class Project():
self._project_includes = None self._project_includes = None
profile_start(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-')) profile_start(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
self._load(parent_loader=parent_loader, tempdir=tempdir) self._load(parent_loader=parent_loader)
profile_end(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-')) profile_end(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
self._partially_loaded = True self._partially_loaded = True
...@@ -389,8 +389,6 @@ class Project(): ...@@ -389,8 +389,6 @@ class Project():
# Cleans up resources used loading elements # Cleans up resources used loading elements
# #
def cleanup(self): def cleanup(self):
self.loader.cleanup()
# Reset the element loader state # Reset the element loader state
Element._reset_load_state() Element._reset_load_state()
...@@ -439,7 +437,7 @@ class Project(): ...@@ -439,7 +437,7 @@ class Project():
# #
# Raises: LoadError if there was a problem with the project.conf # Raises: LoadError if there was a problem with the project.conf
# #
def _load(self, parent_loader=None, tempdir=None): def _load(self, parent_loader=None):
# Load builtin default # Load builtin default
projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE) projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE)
...@@ -505,8 +503,7 @@ class Project(): ...@@ -505,8 +503,7 @@ class Project():
self._fatal_warnings = _yaml.node_get(pre_config_node, list, 'fatal-warnings', default_value=[]) self._fatal_warnings = _yaml.node_get(pre_config_node, list, 'fatal-warnings', default_value=[])
self.loader = Loader(self._context, self, self.loader = Loader(self._context, self,
parent=parent_loader, parent=parent_loader)
tempdir=tempdir)
self._project_includes = Includes(self.loader, copy_tree=False) self._project_includes = Includes(self.loader, copy_tree=False)
......
...@@ -2,6 +2,8 @@ import os ...@@ -2,6 +2,8 @@ import os
import tarfile import tarfile
import hashlib import hashlib
import pytest import pytest
import shutil
import stat
import subprocess import subprocess
from tests.testutils.site import IS_WINDOWS from tests.testutils.site import IS_WINDOWS
from tests.testutils import create_repo, ALL_REPO_KINDS, generate_junction from tests.testutils import create_repo, ALL_REPO_KINDS, generate_junction
...@@ -709,3 +711,34 @@ def test_build_checkout_cross_junction(datafiles, cli, tmpdir): ...@@ -709,3 +711,34 @@ def test_build_checkout_cross_junction(datafiles, cli, tmpdir):
filename = os.path.join(checkout, 'etc', 'animal.conf') filename = os.path.join(checkout, 'etc', 'animal.conf')
assert os.path.exists(filename) assert os.path.exists(filename)
@pytest.mark.datafiles(DATA_DIR)
def test_access_rights(datafiles, cli):
project = str(datafiles)
checkout = os.path.join(cli.directory, 'checkout')
shutil.copyfile(os.path.join(project, 'files', 'bin-files', 'usr', 'bin', 'hello'),
os.path.join(project, 'files', 'bin-files', 'usr', 'bin', 'hello-2'))
os.chmod(os.path.join(project, 'files', 'bin-files', 'usr', 'bin', 'hello'),
0o0755)
os.chmod(os.path.join(project, 'files', 'bin-files', 'usr', 'bin', 'hello-2'),
0o0644)
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
checkout_args = ['artifact', 'checkout', 'target.bst',
'--directory', checkout]
# Now check it out
result = cli.run(project=project, args=checkout_args)
result.assert_success()
st = os.lstat(os.path.join(checkout, 'usr', 'bin', 'hello'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0755
st = os.lstat(os.path.join(checkout, 'usr', 'bin', 'hello-2'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
import os import os
import shutil import shutil
import stat
import pytest import pytest
from buildstream.plugintestutils import cli from buildstream.plugintestutils import cli
from tests.testutils import create_artifact_share, generate_junction from tests.testutils import create_artifact_share, generate_junction
...@@ -462,3 +463,74 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles): ...@@ -462,3 +463,74 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
assert shareproject.repo not in result.stderr assert shareproject.repo not in result.stderr
assert shareuser.repo not in result.stderr assert shareuser.repo not in result.stderr
assert sharecli.repo in result.stderr assert sharecli.repo in result.stderr
@pytest.mark.datafiles(DATA_DIR)
def test_pull_access_rights(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
checkout = os.path.join(str(tmpdir), 'checkout')
# Work-around datafiles not preserving mode
os.chmod(os.path.join(project, 'files/bin-files/usr/bin/hello'), 0o0755)
# We need a big file that does not go into a batch to test a different
# code path
os.makedirs(os.path.join(project, 'files/dev-files/usr/share'), exist_ok=True)
with open(os.path.join(project, 'files/dev-files/usr/share/big-file'), 'w') as f:
buf = ' ' * 4096
for _ in range(1024):
f.write(buf)
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
cli.configure({
'artifacts': {'url': share.repo, 'push': True}
})
result = cli.run(project=project, args=['build', 'compose-all.bst'])
result.assert_success()
result = cli.run(project=project,
args=['artifact', 'checkout',
'--hardlinks', '--no-integrate',
'compose-all.bst',
'--directory', checkout])
result.assert_success()
st = os.lstat(os.path.join(checkout, 'usr/include/pony.h'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
st = os.lstat(os.path.join(checkout, 'usr/bin/hello'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0755
st = os.lstat(os.path.join(checkout, 'usr/share/big-file'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
shutil.rmtree(checkout)
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
result = cli.run(project=project, args=['artifact', 'pull', 'compose-all.bst'])
result.assert_success()
result = cli.run(project=project,
args=['artifact', 'checkout',
'--hardlinks', '--no-integrate',
'compose-all.bst',
'--directory', checkout])
result.assert_success()
st = os.lstat(os.path.join(checkout, 'usr/include/pony.h'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
st = os.lstat(os.path.join(checkout, 'usr/bin/hello'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0755
st = os.lstat(os.path.join(checkout, 'usr/share/big-file'))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
...@@ -49,7 +49,7 @@ class ArtifactShare(): ...@@ -49,7 +49,7 @@ class ArtifactShare():
os.makedirs(self.repodir) os.makedirs(self.repodir)
self.cas = CASCache(self.repodir) self.cas = CASCache(self.repodir, disable_exec=True)
self.total_space = total_space self.total_space = total_space
self.free_space = free_space self.free_space = free_space
......