Commit ed0df6b5 authored by James Ennis's avatar James Ennis Committed by Tristan Van Berkom

push.py: Add tests to test remote cache expiry

The tests include a new environment variable: BST_CACHE_QUOTA.
This variable is used in receive_main() in pushreceive.py.
Test names: test_artifact_expires, test_large_artifact_is_not_cached_remotely
test_recently_pulled_artifact_does_not_expire: marked as xfail until
we implement LRU expiry in remote share
parent 8a0c0a9a
import os
import shutil
import pytest
from collections import namedtuple
from unittest.mock import MagicMock
from buildstream._exceptions import ErrorDomain
from tests.testutils import cli, create_artifact_share
from tests.testutils import cli, create_artifact_share, create_element_size
from tests.testutils.site import IS_LINUX
# Project directory
......@@ -195,3 +198,183 @@ def test_push_after_pull(cli, tmpdir, datafiles):
result.assert_success()
assert result.get_pulled_elements() == ['target.bst']
assert result.get_pushed_elements() == ['target.bst']
# Ensure that when an artifact's size exceeds available disk space
# the least recently pushed artifact is deleted in order to make room for
# the incoming artifact.
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_expires(cli, datafiles, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
element_path = os.path.join(project, 'elements')
# Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
# Mock the os.statvfs() call to return a named tuple which emulates an
# os.statvfs_result object
statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9),
f_bfree=(int(12e6) + int(2e9)),
f_bsize=1))
# Configure bst to push to the cache
cli.configure({
'artifacts': {'url': share.repo, 'push': True},
})
# Create and build an element of 5 MB
create_element_size('element1.bst', element_path, [], int(5e6)) # [] => no deps
result = cli.run(project=project, args=['build', 'element1.bst'])
result.assert_success()
# Create and build an element of 5 MB
create_element_size('element2.bst', element_path, [], int(5e6)) # [] => no deps
result = cli.run(project=project, args=['build', 'element2.bst'])
result.assert_success()
# update the share
share.update_summary()
# check that element's 1 and 2 are cached both locally and remotely
assert cli.get_element_state(project, 'element1.bst') == 'cached'
assert_shared(cli, share, project, 'element1.bst')
assert cli.get_element_state(project, 'element2.bst') == 'cached'
assert_shared(cli, share, project, 'element2.bst')
# update mocked available disk space now that two 5 MB artifacts have been added
os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9),
f_bfree=(int(2e6) + int(2e9)),
f_bsize=1))
# Create and build another element of 5 MB (This will exceed the free disk space available)
create_element_size('element3.bst', element_path, [], int(5e6))
result = cli.run(project=project, args=['build', 'element3.bst'])
result.assert_success()
# update the share
share.update_summary()
# Ensure it is cached both locally and remotely
assert cli.get_element_state(project, 'element3.bst') == 'cached'
assert_shared(cli, share, project, 'element3.bst')
# Ensure element1 has been removed from the share
assert_not_shared(cli, share, project, 'element1.bst')
# Ensure that elemen2 remains
assert_shared(cli, share, project, 'element2.bst')
# Test that a large artifact, whose size exceeds the quota, is not pushed
# to the remote share
@pytest.mark.xfail
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_too_large(cli, datafiles, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
element_path = os.path.join(project, 'elements')
# Create an artifact share (remote cache) in tmpdir/artifactshare
share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
# Mock a file system with 5 MB total space
statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(5e6) + int(2e9),
f_bfree=(int(5e6) + int(2e9)),
f_bsize=1))
# Configure bst to push to the remote cache
cli.configure({
'artifacts': {'url': share.repo, 'push': True},
})
# Create and push a 3MB element
create_element_size('small_element.bst', element_path, [], int(3e6))
result = cli.run(project=project, args=['build', 'small_element.bst'])
result.assert_success()
# Create and try to push a 6MB element.
create_element_size('large_element.bst', element_path, [], int(6e6))
result = cli.run(project=project, args=['build', 'large_element.bst'])
result.assert_success()
# update the cache
share.update_summary()
# Ensure that the small artifact is still in the share
assert cli.get_element_state(project, 'small_element.bst') == 'cached'
assert_shared(cli, share, project, 'small_element.bst')
# Ensure that the artifact is cached locally but NOT remotely
assert cli.get_element_state(project, 'large_element.bst') == 'cached'
assert_not_shared(cli, share, project, 'large_element.bst')
# Test that when an element is pulled recently, it is not considered the LRU element.
# NOTE: We expect this test to fail as the current implementation of remote cache
# expiry only expiries from least recently pushed. NOT least recently used. This will
# hopefully change when we implement as CAS cache.
@pytest.mark.xfail
@pytest.mark.datafiles(DATA_DIR)
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
element_path = os.path.join(project, 'elements')
# Create an artifact share (remote cache) in tmpdir/artifactshare
share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
# Mock a file system with 12 MB free disk space
statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9) + int(2e9),
f_bfree=(int(12e6) + int(2e9)),
f_bsize=1))
# Configure bst to push to the cache
cli.configure({
'artifacts': {'url': share.repo, 'push': True},
})
# Create and build 2 elements, each of 5 MB.
create_element_size('element1.bst', element_path, [], int(5e6))
result = cli.run(project=project, args=['build', 'element1.bst'])
result.assert_success()
create_element_size('element2.bst', element_path, [], int(5e6))
result = cli.run(project=project, args=['build', 'element2.bst'])
result.assert_success()
share.update_summary()
# Ensure they are cached locally
assert cli.get_element_state(project, 'element1.bst') == 'cached'
assert cli.get_element_state(project, 'element2.bst') == 'cached'
# Ensure that they have been pushed to the cache
assert_shared(cli, share, project, 'element1.bst')
assert_shared(cli, share, project, 'element2.bst')
# Remove element1 from the local cache
cli.remove_artifact_from_cache(project, 'element1.bst')
assert cli.get_element_state(project, 'element1.bst') != 'cached'
# Pull the element1 from the remote cache (this should update its mtime)
result = cli.run(project=project, args=['pull', 'element1.bst', '--remote',
share.repo])
result.assert_success()
# Ensure element1 is cached locally
assert cli.get_element_state(project, 'element1.bst') == 'cached'
# Create and build the element3 (of 5 MB)
create_element_size('element3.bst', element_path, [], int(5e6))
result = cli.run(project=project, args=['build', 'element3.bst'])
result.assert_success()
share.update_summary()
# Make sure it's cached locally and remotely
assert cli.get_element_state(project, 'element3.bst') == 'cached'
assert_shared(cli, share, project, 'element3.bst')
# Ensure that element2 was deleted from the share and element1 remains
assert_not_shared(cli, share, project, 'element2.bst')
assert_shared(cli, share, project, 'element1.bst')
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment