Commit c7f76268 authored by Raoul Hidalgo Charman's avatar Raoul Hidalgo Charman Committed by Jürg Billeter

cachedir: add new dir option that's default root to other dirs

Makes artifactdir and builddir obsolete.

Fixes #870
parent 52c0c185
......@@ -58,12 +58,21 @@ class Context():
# Filename indicating which configuration file was used, or None for the defaults
self.config_origin = None
# The directory under which other directories are based
self.cachedir = None
# The directory where various sources are stored
self.sourcedir = None
# The directory where build sandboxes will be created
self.builddir = None
# The directory for CAS
self.casdir = None
# The directory for temporary files
self.tmpdir = None
# Default root location for workspaces
self.workspacedir = None
......@@ -179,13 +188,24 @@ class Context():
user_config = _yaml.load(config)
_yaml.composite(defaults, user_config)
# Give obsoletion warnings
if defaults.get('builddir'):
raise LoadError(LoadErrorReason.INVALID_DATA,
"builddir is obsolete, use cachedir")
if defaults.get('artifactdir'):
print("artifactdir is deprecated, use cachedir")
else:
defaults['artifactdir'] = os.path.join(defaults['cachedir'], 'artifacts')
_yaml.node_validate(defaults, [
'sourcedir', 'builddir', 'artifactdir', 'logdir',
'cachedir', 'sourcedir', 'builddir', 'artifactdir', 'logdir',
'scheduler', 'artifacts', 'logging', 'projects',
'cache', 'prompt', 'workspacedir', 'remote-execution'
'cache', 'prompt', 'workspacedir', 'remote-execution',
])
for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
for directory in ['cachedir', 'sourcedir', 'artifactdir', 'logdir',
'workspacedir']:
# Allow the ~ tilde expansion and any environment variables in
# path specification in the config files.
#
......@@ -195,6 +215,11 @@ class Context():
path = os.path.normpath(path)
setattr(self, directory, path)
# add directories not set by users
self.tmpdir = os.path.join(self.cachedir, 'tmp')
self.casdir = os.path.join(self.cachedir, 'cas')
self.builddir = os.path.join(self.cachedir, 'build')
# Load quota configuration
# We need to find the first existing directory in the path of
# our artifactdir - the artifactdir may not have been created
......@@ -640,7 +665,7 @@ class Context():
def get_cascache(self):
if self._cascache is None:
self._cascache = CASCache(self.artifactdir)
self._cascache = CASCache(self.cachedir)
return self._cascache
......
......@@ -13,11 +13,8 @@
# Location to store sources
sourcedir: ${XDG_CACHE_HOME}/buildstream/sources
# Location to perform builds
builddir: ${XDG_CACHE_HOME}/buildstream/build
# Location to store local binary artifacts
artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
# Root location for other directories in the cache
cachedir: ${XDG_CACHE_HOME}/buildstream
# Location to store build logs
logdir: ${XDG_CACHE_HOME}/buildstream/logs
......
......@@ -277,10 +277,10 @@ class Cli():
*, cache_dir=None):
# Read configuration to figure out where artifacts are stored
if not cache_dir:
default = os.path.join(project, 'cache', 'artifacts')
default = os.path.join(project, 'cache')
if self.config is not None:
cache_dir = self.config.get('artifactdir', default)
cache_dir = self.config.get('cachedir', default)
else:
cache_dir = default
......@@ -582,11 +582,21 @@ def cli_integration(tmpdir, integration_cache):
# We want to cache sources for integration tests more permanently,
# to avoid downloading the huge base-sdk repeatedly
fixture.configure({
'cachedir': integration_cache.cachedir,
'sourcedir': integration_cache.sources,
'artifactdir': integration_cache.artifacts
})
return fixture
yield fixture
# remove following folders if necessary
try:
shutil.rmtree(os.path.join(integration_cache.cachedir, 'build'))
except FileNotFoundError:
pass
try:
shutil.rmtree(os.path.join(integration_cache.cachedir, 'tmp'))
except FileNotFoundError:
pass
@contextmanager
......@@ -626,10 +636,8 @@ def configured(directory, config=None):
if not config.get('sourcedir', False):
config['sourcedir'] = os.path.join(directory, 'sources')
if not config.get('builddir', False):
config['builddir'] = os.path.join(directory, 'build')
if not config.get('artifactdir', False):
config['artifactdir'] = os.path.join(directory, 'artifacts')
if not config.get('cachedir', False):
config['cachedir'] = directory
if not config.get('logdir', False):
config['logdir'] = os.path.join(directory, 'logs')
......
......@@ -53,16 +53,16 @@ def pytest_runtest_setup(item):
class IntegrationCache():
def __init__(self, cache):
cache = os.path.abspath(cache)
self.root = os.path.abspath(cache)
os.makedirs(cache, exist_ok=True)
# Use the same sources every time
self.sources = os.path.join(cache, 'sources')
self.sources = os.path.join(self.root, 'sources')
# Create a temp directory for the duration of the test for
# the artifacts directory
try:
self.artifacts = tempfile.mkdtemp(dir=cache, prefix='artifacts-')
self.cachedir = tempfile.mkdtemp(dir=self.root, prefix='cache-')
except OSError as e:
raise AssertionError("Unable to create test directory !") from e
......@@ -84,7 +84,11 @@ def integration_cache(request):
# Clean up the artifacts after each test run - we only want to
# cache sources between runs
try:
shutil.rmtree(cache.artifacts)
shutil.rmtree(cache.cachedir)
except FileNotFoundError:
pass
try:
shutil.rmtree(os.path.join(cache.root, 'cas'))
except FileNotFoundError:
pass
......
......@@ -194,10 +194,9 @@ def workdir(source_cache=None):
bst_config_file = os.path.join(tempdir, 'buildstream.conf')
config = {
'cachedir': tempdir,
'sourcedir': source_cache,
'artifactdir': os.path.join(tempdir, 'artifacts'),
'logdir': os.path.join(tempdir, 'logs'),
'builddir': os.path.join(tempdir, 'build'),
}
_yaml.dump(config, bst_config_file)
......@@ -411,12 +410,10 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
# Encode and save the output if that was asked for
output = _yaml.node_get(command, str, 'output', default_value=None)
if output is not None:
# Convert / Generate a nice <div>
converted = generate_html(command_out, directory, config_file,
source_cache, tempdir, palette,
command_str, command_fake_output is not None)
# Save it
filename = os.path.join(desc_dir, output)
filename = os.path.realpath(filename)
......
......@@ -2,7 +2,7 @@
commands:
# Make it fetch first
- directory: ../examples/running-commands
command: fetch hello.bst
command: source fetch hello.bst
# Capture a show output
- directory: ../examples/running-commands
......
......@@ -51,7 +51,8 @@ def test_cache_size_write(cli, tmpdir):
# Artifact cache must be in a known place
artifactdir = os.path.join(project_dir, "artifacts")
cli.configure({"artifactdir": artifactdir})
casdir = os.path.join(project_dir, "cas")
cli.configure({"cachedir": project_dir})
# Build, to populate the cache
res = cli.run(project=project_dir, args=["build", "test.bst"])
......
......@@ -436,7 +436,7 @@ def test_extract_expiry(cli, datafiles, tmpdir):
# Now we should have a directory for the cached target2.bst, which
# replaced target.bst in the cache, we should not have a directory
# for the target.bst
refsdir = os.path.join(project, 'cache', 'artifacts', 'cas', 'refs', 'heads')
refsdir = os.path.join(project, 'cache', 'cas', 'refs', 'heads')
refsdirtest = os.path.join(refsdir, 'test')
refsdirtarget = os.path.join(refsdirtest, 'target')
refsdirtarget2 = os.path.join(refsdirtest, 'target2')
......
......@@ -70,8 +70,8 @@ def test_push_pull(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
cas = os.path.join(cli.directory, 'cas')
shutil.rmtree(cas)
# Assert that nothing is cached locally anymore
state = cli.get_element_state(project, 'target.bst')
......
......@@ -57,7 +57,7 @@ def test_pull(cli, tmpdir, datafiles):
# Set up an artifact cache.
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
cache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
......@@ -66,7 +66,8 @@ def test_pull(cli, tmpdir, datafiles):
'artifacts': {
'url': share.repo,
'push': True,
}
},
'cachedir': cache_dir
}
# Write down the user configuration file
......@@ -93,7 +94,6 @@ def test_pull(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
context.set_message_handler(message_handler)
# Load the project and CAS cache
......@@ -111,7 +111,7 @@ def test_pull(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_pull, queue, user_config_file, project_dir,
artifact_dir, 'target.bst', element_key))
cache_dir, 'target.bst', element_key))
try:
# Keep SIGINT blocked in the child process
......@@ -128,12 +128,14 @@ def test_pull(cli, tmpdir, datafiles):
assert cas.contains(element, element_key)
def _test_pull(user_config_file, project_dir, artifact_dir,
def _test_pull(user_config_file, project_dir, cache_dir,
element_name, element_key, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.artifactdir = artifact_dir
context.cachedir = cache_dir
context.casdir = os.path.join(cache_dir, 'cas')
context.tmpdir = os.path.join(cache_dir, 'tmp')
context.set_message_handler(message_handler)
# Load the project manually
......@@ -166,7 +168,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
# Set up an artifact cache.
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
rootcache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
......@@ -175,7 +177,8 @@ def test_pull_tree(cli, tmpdir, datafiles):
'artifacts': {
'url': share.repo,
'push': True,
}
},
'cachedir': rootcache_dir
}
# Write down the user configuration file
......@@ -196,7 +199,6 @@ def test_pull_tree(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
context.set_message_handler(message_handler)
# Load the project and CAS cache
......@@ -219,7 +221,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_push_tree, queue, user_config_file, project_dir,
artifact_dir, artifact_digest))
artifact_digest))
try:
# Keep SIGINT blocked in the child process
......@@ -247,7 +249,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_pull_tree, queue, user_config_file, project_dir,
artifact_dir, tree_digest))
tree_digest))
try:
# Keep SIGINT blocked in the child process
......@@ -269,11 +271,10 @@ def test_pull_tree(cli, tmpdir, datafiles):
assert os.path.exists(cas.objpath(directory_digest))
def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
......@@ -305,11 +306,10 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
queue.put("No remote configured")
def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
......
......@@ -51,7 +51,7 @@ def test_push(cli, tmpdir, datafiles):
# Set up an artifact cache.
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
rootcache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
......@@ -60,7 +60,8 @@ def test_push(cli, tmpdir, datafiles):
'artifacts': {
'url': share.repo,
'push': True,
}
},
'cachedir': rootcache_dir
}
# Write down the user configuration file
......@@ -69,7 +70,6 @@ def test_push(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
......@@ -89,7 +89,7 @@ def test_push(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_push, queue, user_config_file, project_dir,
artifact_dir, 'target.bst', element_key))
'target.bst', element_key))
try:
# Keep SIGINT blocked in the child process
......@@ -106,12 +106,10 @@ def test_push(cli, tmpdir, datafiles):
assert share.has_artifact('test', 'target.bst', element_key)
def _test_push(user_config_file, project_dir, artifact_dir,
element_name, element_key, queue):
def _test_push(user_config_file, project_dir, element_name, element_key, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
......@@ -152,7 +150,7 @@ def test_push_directory(cli, tmpdir, datafiles):
# Set up an artifact cache.
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
rootcache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
......@@ -161,7 +159,8 @@ def test_push_directory(cli, tmpdir, datafiles):
'artifacts': {
'url': share.repo,
'push': True,
}
},
'cachedir': rootcache_dir
}
# Write down the user configuration file
......@@ -170,7 +169,6 @@ def test_push_directory(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
context.set_message_handler(message_handler)
# Load the project and CAS cache
......@@ -198,7 +196,7 @@ def test_push_directory(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_push_directory, queue, user_config_file,
project_dir, artifact_dir, artifact_digest))
project_dir, artifact_digest))
try:
# Keep SIGINT blocked in the child process
......@@ -216,11 +214,10 @@ def test_push_directory(cli, tmpdir, datafiles):
assert share.has_object(artifact_digest)
def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
def _test_push_directory(user_config_file, project_dir, artifact_digest, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
......@@ -254,6 +251,7 @@ def test_push_message(cli, tmpdir, datafiles):
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
rootcache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
......@@ -262,7 +260,8 @@ def test_push_message(cli, tmpdir, datafiles):
'artifacts': {
'url': share.repo,
'push': True,
}
},
'cachedir': rootcache_dir
}
# Write down the user configuration file
......@@ -273,7 +272,7 @@ def test_push_message(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_push_message, queue, user_config_file,
project_dir, artifact_dir))
project_dir))
try:
# Keep SIGINT blocked in the child process
......@@ -292,11 +291,10 @@ def test_push_message(cli, tmpdir, datafiles):
assert share.has_object(message_digest)
def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
def _test_push_message(user_config_file, project_dir, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
......
......@@ -64,8 +64,8 @@ def test_push_pull_all(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
cas = os.path.join(cli.directory, 'cas')
shutil.rmtree(cas)
# Assert that nothing is cached locally anymore
states = cli.get_element_states(project, all_elements)
......@@ -114,7 +114,7 @@ def test_push_pull_default_targets(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
artifacts = os.path.join(cli.directory, 'artifacts')
artifacts = os.path.join(cli.directory, 'cas')
shutil.rmtree(artifacts)
# Assert that nothing is cached locally anymore
......@@ -156,8 +156,8 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
assert_shared(cli, share2, project, 'target.bst')
# Delete the user's local artifact cache.
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
cas = os.path.join(cli.directory, 'cas')
shutil.rmtree(cas)
# Assert that the element is not cached anymore.
assert cli.get_element_state(project, 'target.bst') != 'cached'
......@@ -210,8 +210,8 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the good_share.
#
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
cas = os.path.join(cli.directory, 'cas')
shutil.rmtree(cas)
result = cli.run(project=project, args=['artifact', 'pull', 'target.bst', '--remote',
good_share.repo])
......@@ -251,8 +251,8 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
cas = os.path.join(cli.directory, 'cas')
shutil.rmtree(cas)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
......@@ -301,8 +301,8 @@ def test_push_pull_track_non_strict(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
cas = os.path.join(cli.directory, 'cas')
shutil.rmtree(cas)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
......@@ -337,7 +337,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
result.assert_success()
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
cache_dir = os.path.join(project, 'cache', 'artifacts')
cache_dir = os.path.join(project, 'cache', 'cas')
shutil.rmtree(cache_dir)
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
......@@ -372,8 +372,8 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
cas = os.path.join(cli.directory, 'cas')
shutil.rmtree(cas)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
......
......@@ -22,9 +22,10 @@ DATA_DIR = os.path.join(
# cleared as just forcefully removing the refpath leaves dangling objects.
def default_state(cli, tmpdir, share):
shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
cli.configure({
'artifacts': {'url': share.repo, 'push': False},
'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
'cachedir': str(tmpdir),
'cache': {'pull-buildtrees': False},
})
......@@ -45,7 +46,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
cli2.configure({
'artifacts': {'url': share1.repo, 'push': True},
'artifactdir': os.path.join(str(tmpdir), 'artifacts')
'cachedir': str(tmpdir),
})
# Build autotools element, checked pushed, delete local
......
......@@ -231,10 +231,8 @@ def test_buildtree_options(cli, tmpdir, datafiles):
assert share.has_artifact('test', element_name, cli.get_element_key(project, element_name))
# Discard the cache
cli.configure({
'artifacts': {'url': share.repo, 'push': True},
'artifactdir': os.path.join(cli.directory, 'artifacts2')
})
shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
assert cli.get_element_state(project, element_name) != 'cached'
# Pull from cache, but do not include buildtrees.
......@@ -274,7 +272,7 @@ def test_buildtree_options(cli, tmpdir, datafiles):
])
assert 'Attempting to fetch missing artifact buildtree' in res.stderr
assert 'Hi' in res.output
shutil.rmtree(os.path.join(os.path.join(cli.directory, 'artifacts2')))
shutil.rmtree(os.path.join(os.path.join(str(tmpdir), 'cache', 'cas')))
assert cli.get_element_state(project, element_name) != 'cached'
# Check it's not loading the shell at all with always set for the buildtree, when the
......
......@@ -31,7 +31,7 @@ def create_test_directory(*path, mode=0o644):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS] + ['local'])
@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_cache):
def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
project = str(datafiles)
element_name = 'list.bst'
element_path = os.path.join(project, 'elements', element_name)
......@@ -94,9 +94,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca
return f.read()
finally:
os.umask(old_umask)
cache_dir = integration_cache.artifacts
cli.remove_artifact_from_cache(project, element_name,
cache_dir=cache_dir)
cli.remove_artifact_from_cache(project, element_name)
assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
......@@ -104,7 +102,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache):
def test_deterministic_source_local(cli, tmpdir, datafiles):
"""Only user rights should be considered for local source.
"""
project = str(datafiles)
......@@ -156,8 +154,6 @@ def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache):
with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
return f.read()
finally:
cache_dir = integration_cache.artifacts
cli.remove_artifact_from_cache(project, element_name,
cache_dir=cache_dir)
cli.remove_artifact_from_cache(project, element_name)
assert get_value_for_mask(0o7777) == get_value_for_mask(0o0700)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment