Skip to content
Snippets Groups Projects
Commit 52bec9bb authored by Vasilii Iakliushin's avatar Vasilii Iakliushin :two:
Browse files

Merge branch '366724_add_protected_branch_cache' into 'master'

Add `ProtectedBranches::CacheService` for efficient caching

See merge request !92922
parents 6e5bb192 8aebc640
No related branches found
No related tags found
1 merge request!92922Add `ProtectedBranches::CacheService` for efficient caching
Pipeline #600317264 passed
......@@ -13,5 +13,9 @@ def initialize(project, current_user = nil, params = {})
def after_execute(*)
# overridden in EE::ProtectedBranches module
end
def refresh_cache
CacheService.new(@project, @current_user, @params).refresh
end
end
end
# frozen_string_literal: true
module ProtectedBranches
class CacheService < ProtectedBranches::BaseService
CACHE_ROOT_KEY = 'cache:gitlab:protected_branch'
TTL_UNSET = -1
CACHE_EXPIRE_IN = 1.day
CACHE_LIMIT = 1000
def fetch(ref_name)
record = OpenSSL::Digest::SHA256.hexdigest(ref_name)
Gitlab::Redis::Cache.with do |redis|
cached_result = redis.hget(redis_key, record)
break Gitlab::Redis::Boolean.decode(cached_result) unless cached_result.nil?
value = yield
redis.hset(redis_key, record, Gitlab::Redis::Boolean.encode(value))
# We don't want to extend cache expiration time
if redis.ttl(redis_key) == TTL_UNSET
redis.expire(redis_key, CACHE_EXPIRE_IN)
end
# If the cache record has too many elements, then something went wrong and
# it's better to drop the cache key.
if redis.hlen(redis_key) > CACHE_LIMIT
redis.unlink(redis_key)
end
value
end
end
def refresh
Gitlab::Redis::Cache.with { |redis| redis.unlink(redis_key) }
end
private
def redis_key
@redis_key ||= [CACHE_ROOT_KEY, @project.id].join(':')
end
end
end
......@@ -7,6 +7,8 @@ def execute(skip_authorization: false)
save_protected_branch
refresh_cache
protected_branch
end
......
......@@ -5,7 +5,7 @@ class DestroyService < ProtectedBranches::BaseService
def execute(protected_branch)
raise Gitlab::Access::AccessDeniedError unless can?(current_user, :destroy_protected_branch, protected_branch)
protected_branch.destroy
protected_branch.destroy.tap { refresh_cache }
end
end
end
......
......@@ -10,6 +10,8 @@ def execute(protected_branch)
if protected_branch.update(params)
after_execute(protected_branch: protected_branch, old_merge_access_levels: old_merge_access_levels, old_push_access_levels: old_push_access_levels)
refresh_cache
end
protected_branch
......
# frozen_string_literal: true
# rubocop:disable Style/RedundantFetchBlock
#
require 'spec_helper'
RSpec.describe ProtectedBranches::CacheService, :clean_gitlab_redis_cache do
subject(:service) { described_class.new(project, user) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { project.first_owner }
let(:immediate_expiration) { 0 }
describe '#fetch' do
it 'caches the value' do
expect(service.fetch('main') { true }).to eq(true)
expect(service.fetch('not-found') { false }).to eq(false)
# Uses cached values
expect(service.fetch('main') { false }).to eq(true)
expect(service.fetch('not-found') { true }).to eq(false)
end
it 'sets expiry on the key' do
stub_const("#{described_class.name}::CACHE_EXPIRE_IN", immediate_expiration)
expect(service.fetch('main') { true }).to eq(true)
expect(service.fetch('not-found') { false }).to eq(false)
expect(service.fetch('main') { false }).to eq(false)
expect(service.fetch('not-found') { true }).to eq(true)
end
it 'does not set an expiry on the key after the hash is already created' do
expect(service.fetch('main') { true }).to eq(true)
stub_const("#{described_class.name}::CACHE_EXPIRE_IN", immediate_expiration)
expect(service.fetch('not-found') { false }).to eq(false)
expect(service.fetch('main') { false }).to eq(true)
expect(service.fetch('not-found') { true }).to eq(false)
end
context 'when CACHE_LIMIT is exceeded' do
before do
stub_const("#{described_class.name}::CACHE_LIMIT", 2)
end
it 'recreates cache' do
expect(service.fetch('main') { true }).to eq(true)
expect(service.fetch('not-found') { false }).to eq(false)
# Uses cached values
expect(service.fetch('main') { false }).to eq(true)
expect(service.fetch('not-found') { true }).to eq(false)
# Overflow
expect(service.fetch('new-branch') { true }).to eq(true)
# Refreshes values
expect(service.fetch('main') { false }).to eq(false)
expect(service.fetch('not-found') { true }).to eq(true)
end
end
end
describe '#refresh' do
it 'clears cached values' do
expect(service.fetch('main') { true }).to eq(true)
expect(service.fetch('not-found') { false }).to eq(false)
service.refresh
# Recreates cache
expect(service.fetch('main') { false }).to eq(false)
expect(service.fetch('not-found') { true }).to eq(true)
end
end
end
# rubocop:enable Style/RedundantFetchBlock
......@@ -24,6 +24,14 @@
expect(project.protected_branches.last.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
end
it 'refreshes the cache' do
expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
expect(cache_service).to receive(:refresh)
end
service.execute
end
context 'when protecting a branch with a name that contains HTML tags' do
let(:name) { 'foo<b>bar<\b>' }
......
......@@ -16,6 +16,14 @@
expect(protected_branch).to be_destroyed
end
it 'refreshes the cache' do
expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
expect(cache_service).to receive(:refresh)
end
service.execute(protected_branch)
end
context 'when a policy restricts rule deletion' do
before do
policy = instance_double(ProtectedBranchPolicy, allowed?: false)
......
......@@ -18,6 +18,14 @@
expect(result.reload.name).to eq(params[:name])
end
it 'refreshes the cache' do
expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
expect(cache_service).to receive(:refresh)
end
result
end
context 'when updating name of a protected branch to one that contains HTML tags' do
let(:new_name) { 'foo<b>bar<\b>' }
let(:result) { service.execute(protected_branch) }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment