Skip to content
Snippets Groups Projects
Commit bcec4cf6 authored by Sylvester Chin's avatar Sylvester Chin :red_circle: Committed by Mayra Cabrera
Browse files

Upgrade the Sidekiq gem to v7.0.9

This MR also updates the sidekiq-cron gem to v1.9.1.

Changelog: other
EE: true
parent 2f6862c5
No related branches found
No related tags found
1 merge request!135185Upgrade the Sidekiq gem to v7.1.6
Showing
with 349 additions and 71 deletions
......@@ -247,8 +247,8 @@ gem 'state_machines-activerecord', '~> 0.8.0' # rubocop:todo Gemfile/MissingFeat
gem 'acts-as-taggable-on', '~> 10.0' # rubocop:todo Gemfile/MissingFeatureCategory
# Background jobs
gem 'sidekiq', '~> 6.5.10' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'sidekiq-cron', '~> 1.8.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'sidekiq', '~> 7.1.6' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'sidekiq-cron', '~> 1.9.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'gitlab-sidekiq-fetcher', path: 'vendor/gems/sidekiq-reliable-fetch', require: 'sidekiq-reliable-fetch' # rubocop:todo Gemfile/MissingFeatureCategory
# Cron Parser
......
......@@ -514,6 +514,7 @@
{"name":"redcarpet","version":"3.6.0","platform":"ruby","checksum":"8ad1889c0355ff4c47174af14edd06d62f45a326da1da6e8a121d59bdcd2e9e9"},
{"name":"redis","version":"4.8.0","platform":"ruby","checksum":"2000cf5014669c9dc821704b6d322a35a9a33852a95208911d9175d63b448a44"},
{"name":"redis-actionpack","version":"5.3.0","platform":"ruby","checksum":"3fb1ad0a8fd9d26a289c9399bb609dcaef38bf37711e6f677a53ca728fc19140"},
{"name":"redis-client","version":"0.18.0","platform":"ruby","checksum":"a93bd1f99c024bb7f8e21eff7bdbcb16d85dbcbfe3f6ed051239e38d4c127704"},
{"name":"redis-rack","version":"2.1.4","platform":"ruby","checksum":"0872eecb303e483c3863d6bd0d47323d230640d41c1a4ac4a2c7596ec0b1774c"},
{"name":"redis-store","version":"1.9.1","platform":"ruby","checksum":"7b4c7438d46f7b7ce8f67fc0eda3a04fc67d32d28cf606cc98a5df4d2b77071d"},
{"name":"regexp_parser","version":"2.6.0","platform":"ruby","checksum":"f163ba463a45ca2f2730e0902f2475bb0eefcd536dfc2f900a86d1e5a7d7a556"},
......@@ -589,8 +590,8 @@
{"name":"sexp_processor","version":"4.17.0","platform":"ruby","checksum":"4daa4874ce1838cd801c65e66ed5d4f140024404a3de7482c36d4ef2604dff6f"},
{"name":"shellany","version":"0.0.1","platform":"ruby","checksum":"0e127a9132698766d7e752e82cdac8250b6adbd09e6c0a7fbbb6f61964fedee7"},
{"name":"shoulda-matchers","version":"5.1.0","platform":"ruby","checksum":"a01d20589989e9653ab4a28c67d9db2b82bcf0a2496cf01d5e1a95a4aaaf5b07"},
{"name":"sidekiq","version":"6.5.12","platform":"ruby","checksum":"b4f93b2204c42220d0b526a7b8e0c49b5f9da82c1ce1a05d2baf1e8f744c197f"},
{"name":"sidekiq-cron","version":"1.8.0","platform":"ruby","checksum":"47da72ca73ce5b71896aaf7e7c4391386ec517dd003f184c50c0b727d82eb0ca"},
{"name":"sidekiq","version":"7.1.6","platform":"ruby","checksum":"7859da66d5bcef3c22bea2c3091d08c866890168e003f5bf4dea197dc37843a2"},
{"name":"sidekiq-cron","version":"1.9.1","platform":"ruby","checksum":"79d11c79c686ec2e540c1932ccd12b0c07e7c228d28a0a7c515a6c7fcd3c22df"},
{"name":"sigdump","version":"0.2.4","platform":"ruby","checksum":"0bf2176e55c1a262788623fe5ea57caddd6ba2abebe5e349d9d5e7c3a3010ed7"},
{"name":"signet","version":"0.17.0","platform":"ruby","checksum":"1d2831930dc28da32e34bec68cf7ded97ee2867b208f97c500ee293829cb0004"},
{"name":"simple_po_parser","version":"1.1.6","platform":"ruby","checksum":"122687d44d3de516a0e69e2f383a4180f5015e8c5ed5a7f2258f2b376f64cbf3"},
......
......@@ -169,9 +169,9 @@ PATH
PATH
remote: vendor/gems/sidekiq-reliable-fetch
specs:
gitlab-sidekiq-fetcher (0.10.0)
gitlab-sidekiq-fetcher (0.11.0)
json (>= 2.5)
sidekiq (~> 6.1)
sidekiq (~> 7.0)
GEM
remote: https://rubygems.org/
......@@ -1347,6 +1347,8 @@ GEM
actionpack (>= 5, < 8)
redis-rack (>= 2.1.0, < 3)
redis-store (>= 1.1.0, < 2)
redis-client (0.18.0)
connection_pool
redis-rack (2.1.4)
rack (>= 2.0.8, < 3)
redis-store (>= 1.2, < 2)
......@@ -1529,12 +1531,13 @@ GEM
shellany (0.0.1)
shoulda-matchers (5.1.0)
activesupport (>= 5.2.0)
sidekiq (6.5.12)
connection_pool (>= 2.2.5, < 3)
rack (~> 2.0)
redis (>= 4.5.0, < 5)
sidekiq-cron (1.8.0)
fugit (~> 1)
sidekiq (7.1.6)
concurrent-ruby (< 2)
connection_pool (>= 2.3.0)
rack (>= 2.2.4)
redis-client (>= 0.14.0)
sidekiq-cron (1.9.1)
fugit (~> 1.8)
sidekiq (>= 4.2.1)
sigdump (0.2.4)
signet (0.17.0)
......@@ -2053,8 +2056,8 @@ DEPENDENCIES
sentry-ruby (~> 5.8.0)
sentry-sidekiq (~> 5.8.0)
shoulda-matchers (~> 5.1.0)
sidekiq (~> 6.5.10)
sidekiq-cron (~> 1.8.0)
sidekiq (~> 7.1.6)
sidekiq-cron (~> 1.9.0)
sigdump (~> 0.2.4)
simple_po_parser (~> 1.1.6)
simplecov (~> 0.21)
......
......@@ -27,6 +27,9 @@
Redis::Cluster::CommandLoader.prepend(Gitlab::Patch::CommandLoader)
Redis::Cluster.prepend(Gitlab::Patch::RedisCluster)
# this only instruments `RedisClient` used in `Sidekiq.redis`
RedisClient.register(Gitlab::Instrumentation::RedisClientMiddleware)
if Gitlab::Redis::Workhorse.params[:cluster].present?
raise "Do not configure workhorse with a Redis Cluster as pub/sub commands are not cluster-compatible."
end
......
......@@ -28,21 +28,25 @@ def enable_semi_reliable_fetch_mode?
end
# Custom Queues configuration
queues_config_hash = Gitlab::Redis::Queues.params
queues_config_hash = Gitlab::Redis::Queues.redis_client_params
enable_json_logs = Gitlab.config.sidekiq.log_format != 'text'
# Sidekiq's `strict_args!` raises an exception by default in 7.0
# https://github.com/sidekiq/sidekiq/blob/31bceff64e10d501323bc06ac0552652a47c082e/docs/7.0-Upgrade.md?plain=1#L59
Sidekiq.strict_args!(false)
Sidekiq.configure_server do |config|
config[:strict] = false
config[:queues] = Gitlab::SidekiqConfig.expand_queues(config[:queues])
if enable_json_logs
config.log_formatter = Gitlab::SidekiqLogging::JSONFormatter.new
config.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new
config[:job_logger] = Gitlab::SidekiqLogging::StructuredLogger
# Remove the default-provided handler. The exception is logged inside
# Gitlab::SidekiqLogging::StructuredLogger
config.error_handlers.delete(Sidekiq::DEFAULT_ERROR_HANDLER)
config.error_handlers.delete(Sidekiq::Config::ERROR_HANDLER)
end
Sidekiq.logger.info "Listening on queues #{config[:queues].uniq.sort}"
......@@ -107,8 +111,8 @@ def enable_semi_reliable_fetch_mode?
# We only need to do this for other clients. If Sidekiq-server is the
# client scheduling jobs, we have access to the regular sidekiq logger that
# writes to STDOUT
Sidekiq.logger = Gitlab::SidekiqLogging::ClientLogger.build
Sidekiq.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new if enable_json_logs
config.logger = Gitlab::SidekiqLogging::ClientLogger.build
config.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new if enable_json_logs
config.client_middleware(&Gitlab::SidekiqMiddleware.client_configurator)
end
......
......@@ -19,7 +19,7 @@
# Allow sidekiq to cleanly terminate and push any running jobs back
# into the queue. We use the configured timeout and add a small
# grace period
sleep(Sidekiq[:timeout] + 5)
sleep(Sidekiq.default_configuration[:timeout] + 5)
# Signaling the Sidekiq Pgroup as KILL is not forwarded to
# a possible child process. In Sidekiq Cluster, all child Sidekiq
......
......@@ -39,7 +39,7 @@
end
end
it 'allows creation from custom project template', :js do
it 'allows creation from custom project template', :js, :sidekiq_inline do
new_path = 'example-custom-project-template'
new_name = 'Example Custom Project Template'
......@@ -50,10 +50,7 @@
# Have to reset it to '' so it overwrites rather than appends
fill_in('project_path', with: '')
fill_in('project_path', with: new_path)
Sidekiq::Testing.inline! do
click_button 'Create project'
end
click_button 'Create project'
end
expect(page).to have_content new_name
......@@ -62,7 +59,7 @@
expect(Project.last.path).to eq new_path
end
it 'allows creation from custom project template using only the name', :js do
it 'allows creation from custom project template using only the name', :js, :sidekiq_inline do
new_path = 'example-custom-project-template'
new_name = 'Example Custom Project Template'
......@@ -70,10 +67,7 @@
page.within '.project-fields-form' do
fill_in('project_name', with: new_name)
Sidekiq::Testing.inline! do
click_button 'Create project'
end
click_button 'Create project'
end
expect(page).to have_content new_name
......@@ -82,7 +76,7 @@
expect(Project.last.path).to eq new_path
end
it 'allows creation from custom project template using only the path', :js do
it 'allows creation from custom project template using only the path', :js, :sidekiq_inline do
new_path = 'example-custom-project-template'
new_name = 'Example Custom Project Template'
......@@ -90,10 +84,7 @@
page.within '.project-fields-form' do
fill_in('project_path', with: new_path)
Sidekiq::Testing.inline! do
click_button 'Create project'
end
click_button 'Create project'
end
expect(page).to have_content new_name
......
......@@ -3,15 +3,6 @@
require 'spec_helper'
RSpec.describe Gitlab::Mirror do
around do |example|
original_logger = Sidekiq.logger
Sidekiq.logger = nil
example.run
Sidekiq.logger = original_logger
end
describe '#configure_cron_job!' do
let(:cron) { Gitlab::Mirror::SCHEDULER_CRON }
......
......@@ -40,7 +40,7 @@
end
describe 'sidekiq_retries_exhausted callback' do
let(:sidekiq_retry_handler) { Sidekiq::JobRetry.new(Sidekiq) }
let(:sidekiq_retry_handler) { Sidekiq::JobRetry.new(Sidekiq::Capsule.new("test", Sidekiq.default_configuration)) }
let(:vulnerability_export) { create(:vulnerability_export, :created, :csv) }
let(:default_job_payload) { { 'class' => described_class.name, 'args' => [vulnerability_export.id] } }
......
# frozen_string_literal: true
# This module references https://github.com/redis-rb/redis-client#instrumentation-and-middlewares
# implementing `call`, and `call_pipelined`.
module Gitlab
module Instrumentation
module RedisClientMiddleware
include RedisHelper
def call(command, redis_config)
instrumentation = instrumentation_class(redis_config)
result = instrument_call([command], instrumentation) do
super
end
measure_io(command, result, instrumentation) if ::RequestStore.active?
result
end
def call_pipelined(commands, redis_config)
instrumentation = instrumentation_class(redis_config)
result = instrument_call(commands, instrumentation, true) do
super
end
measure_io(commands, result, instrumentation) if ::RequestStore.active?
result
end
private
def measure_io(command, result, instrumentation)
measure_write_size(command, instrumentation)
measure_read_size(result, instrumentation)
end
def instrumentation_class(config)
config.custom[:instrumentation_class]
end
end
end
end
......@@ -15,7 +15,7 @@ def instrument_call(commands, instrumentation_class, pipelined = false)
end
yield
rescue ::Redis::BaseError => ex
rescue ::Redis::BaseError, ::RedisClient::Error => ex
if ex.message.start_with?('MOVED', 'ASK')
instrumentation_class.instance_count_cluster_redirection(ex)
else
......
......@@ -18,8 +18,8 @@ def call
return true unless @alive
# Tell sidekiq to restart itself
# Keep extra safe to wait `Sidekiq[:timeout] + 2` seconds before SIGKILL
send_signal(:TERM, $$, 'gracefully shut down', Sidekiq[:timeout] + 2)
# Keep extra safe to wait `Sidekiq.default_configuration[:timeout] + 2` seconds before SIGKILL
send_signal(:TERM, $$, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2)
return true unless @alive
# Ideally we should never reach this condition
......
......@@ -7,11 +7,11 @@
require 'sidekiq/version'
require 'sidekiq/cron/version'
if Gem::Version.new(Sidekiq::VERSION) != Gem::Version.new('6.5.12')
if Gem::Version.new(Sidekiq::VERSION) != Gem::Version.new('7.1.6')
raise 'New version of sidekiq detected, please remove or update this patch'
end
if Gem::Version.new(Sidekiq::Cron::VERSION) != Gem::Version.new('1.8.0')
if Gem::Version.new(Sidekiq::Cron::VERSION) != Gem::Version.new('1.9.1')
raise 'New version of sidekiq-cron detected, please remove or update this patch'
end
......
......@@ -19,7 +19,7 @@ class Wrapper
InvalidPathError = Class.new(StandardError)
class << self
delegate :params, :url, :store, :encrypted_secrets, to: :new
delegate :params, :url, :store, :encrypted_secrets, :redis_client_params, to: :new
def with
pool.with { |redis| yield redis }
......@@ -96,6 +96,27 @@ def params
redis_store_options
end
# redis_client_params modifies redis_store_options to be compatible with redis-client
# TODO: when redis-rb is updated to v5, there is no need to support 2 types of config format
def redis_client_params
options = redis_store_options
options[:custom] = { instrumentation_class: options[:instrumentation_class] }
# TODO: add support for cluster when upgrading to redis-rb v5.y.z we do not need cluster support
# as Sidekiq workload should not and does not run in a Redis Cluster
# support to be added in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/134862
if options[:sentinels]
# name is required in RedisClient::SentinelConfig
# https://github.com/redis-rb/redis-client/blob/1ab081c1d0e47df5d55e011c9390c70b2eef6731/lib/redis_client/sentinel_config.rb#L17
options[:name] = options[:host]
options.except(:scheme, :instrumentation_class, :host, :port)
else
# remove disallowed keys as seen in
# https://github.com/redis-rb/redis-client/blob/1ab081c1d0e47df5d55e011c9390c70b2eef6731/lib/redis_client/config.rb#L21
options.except(:scheme, :instrumentation_class)
end
end
def url
raw_config_hash[:url]
end
......
......@@ -94,7 +94,7 @@ def max_threads
#
# These threads execute Sidekiq client middleware when jobs
# are enqueued and those can access DB / Redis.
threads += Sidekiq[:concurrency] + 2
threads += Sidekiq.default_configuration[:concurrency] + 2
end
if puma?
......
# frozen_string_literal: true
require 'yaml'
require 'sidekiq/capsule'
module Gitlab
module SidekiqConfig
......@@ -161,7 +162,7 @@ def worker_queue_mappings
# the current Sidekiq process
def current_worker_queue_mappings
worker_queue_mappings
.select { |worker, queue| Sidekiq[:queues].include?(queue) }
.select { |worker, queue| Sidekiq.default_configuration.queues.include?(queue) }
.to_h
end
......
......@@ -16,11 +16,11 @@ def call(job, queue)
ActiveRecord::LogSubscriber.reset_runtime
Sidekiq.logger.info log_job_start(job, base_payload)
@logger.info log_job_start(job, base_payload)
yield
Sidekiq.logger.info log_job_done(job, started_time, base_payload)
@logger.info log_job_done(job, started_time, base_payload)
rescue Sidekiq::JobRetry::Handled => job_exception
# Sidekiq::JobRetry::Handled is raised by the internal Sidekiq
# processor. It is a wrapper around real exception indicating an
......@@ -29,11 +29,11 @@ def call(job, queue)
#
# For more information:
# https://github.com/mperham/sidekiq/blob/v5.2.7/lib/sidekiq/processor.rb#L173
Sidekiq.logger.warn log_job_done(job, started_time, base_payload, job_exception.cause || job_exception)
@logger.warn log_job_done(job, started_time, base_payload, job_exception.cause || job_exception)
raise
rescue StandardError => job_exception
Sidekiq.logger.warn log_job_done(job, started_time, base_payload, job_exception)
@logger.warn log_job_done(job, started_time, base_payload, job_exception)
raise
end
......
......@@ -64,7 +64,7 @@ def metrics
def initialize_process_metrics
metrics = self.metrics
metrics[:sidekiq_concurrency].set({}, Sidekiq[:concurrency].to_i)
metrics[:sidekiq_concurrency].set({}, Sidekiq.default_configuration[:concurrency].to_i)
return unless ::Feature.enabled?(:sidekiq_job_completion_metric_initialize)
......
......@@ -16,17 +16,14 @@ def initialize(mappings, logger: nil)
# Migrate jobs in SortedSets, i.e. scheduled and retry sets.
def migrate_set(sidekiq_set)
source_queues_regex = Regexp.union(mappings.keys)
cursor = 0
scanned = 0
migrated = 0
estimated_size = Sidekiq.redis { |c| c.zcard(sidekiq_set) }
logger&.info("Processing #{sidekiq_set} set. Estimated size: #{estimated_size}.")
begin
cursor, jobs = Sidekiq.redis { |c| c.zscan(sidekiq_set, cursor) }
jobs.each do |(job, score)|
Sidekiq.redis do |c|
c.zscan(sidekiq_set) do |job, score|
if scanned > 0 && scanned % LOG_FREQUENCY == 0
logger&.info("In progress. Scanned records: #{scanned}. Migrated records: #{migrated}.")
end
......@@ -45,7 +42,7 @@ def migrate_set(sidekiq_set)
migrated += migrate_job_in_set(sidekiq_set, job, score, job_hash)
end
end while cursor.to_i != 0
end
logger&.info("Done. Scanned records: #{scanned}. Migrated records: #{migrated}.")
......@@ -61,7 +58,7 @@ def migrate_queues
logger&.info("List of queues based on routing rules: #{routing_rules_queues}")
Sidekiq.redis do |conn|
# Redis 6 supports conn.scan_each(match: "queue:*", type: 'list')
conn.scan_each(match: "queue:*") do |key|
conn.scan("MATCH", "queue:*") do |key|
# Redis 5 compatibility
next unless conn.type(key) == 'list'
......@@ -101,13 +98,9 @@ def migrate_job_in_set(sidekiq_set, job, score, job_hash)
Sidekiq.redis do |connection|
removed = connection.zrem(sidekiq_set, job)
if removed
connection.zadd(sidekiq_set, score, Gitlab::Json.dump(job_hash))
connection.zadd(sidekiq_set, score, Gitlab::Json.dump(job_hash)) if removed > 0
1
else
0
end
removed
end
end
......
# frozen_string_literal: true
require 'spec_helper'
require 'rspec-parameterized'
require 'support/helpers/rails_helpers'
RSpec.describe Gitlab::Instrumentation::RedisClientMiddleware, :request_store, feature_category: :scalability do
using RSpec::Parameterized::TableSyntax
include RedisHelpers
let_it_be(:redis_store_class) { define_helper_redis_store_class }
let_it_be(:redis_client) { RedisClient.new(redis_store_class.redis_client_params) }
before do
redis_client.call("flushdb")
end
describe 'read and write' do
where(:setup, :command, :expect_write, :expect_read) do
# The response is 'OK', the request size is the combined size of array
# elements. Exercise counting of a status reply.
[] | [:set, 'foo', 'bar'] | (3 + 3 + 3) | 2
# The response is 1001, so 4 bytes. Exercise counting an integer reply.
[[:set, 'foobar', 1000]] | [:incr, 'foobar'] | (4 + 6) | 4
# Exercise counting empty multi bulk reply. Returns an empty hash `{}`
[] | [:hgetall, 'foobar'] | (7 + 6) | 2
# Hgetall response length is combined length of keys and values in the
# hash. Exercises counting of a multi bulk reply
# Returns `{"field"=>"hello world"}`, 5 for field, 11 for hello world, 8 for {, }, 4 "s, =, >
[[:hset, 'myhash', 'field', 'hello world']] | [:hgetall, 'myhash'] | (7 + 6) | (5 + 11 + 8)
# Exercise counting of a bulk reply
[[:set, 'foo', 'bar' * 100]] | [:get, 'foo'] | (3 + 3) | (3 * 100)
# Nested array response: [['foo', 0.0], ['bar', 1.0]]. Returns scores as float.
[[:zadd, 'myset', 0, 'foo'],
[:zadd, 'myset', 1, 'bar']] | [:zrange, 'myset', 0, -1, 'withscores'] | (6 + 5 + 1 + 2 + 10) | (3 + 3 + 3 + 3)
end
with_them do
it 'counts bytes read and written' do
setup.each { |cmd| redis_client.call(*cmd) }
RequestStore.clear!
redis_client.call(*command)
expect(Gitlab::Instrumentation::Redis.read_bytes).to eq(expect_read)
expect(Gitlab::Instrumentation::Redis.write_bytes).to eq(expect_write)
end
end
end
describe 'counting' do
let(:instrumentation_class) { redis_store_class.instrumentation_class }
it 'counts successful requests' do
expect(instrumentation_class).to receive(:instance_count_request).with(1).and_call_original
redis_client.call(:get, 'foobar')
end
it 'counts successful pipelined requests' do
expect(instrumentation_class).to receive(:instance_count_request).with(2).and_call_original
expect(instrumentation_class).to receive(:instance_count_pipelined_request).with(2).and_call_original
redis_client.pipelined do |pipeline|
pipeline.call(:get, '{foobar}buz')
pipeline.call(:get, '{foobar}baz')
end
end
context 'when encountering exceptions' do
before do
allow(redis_client.instance_variable_get(:@raw_connection)).to receive(:call).and_raise(
RedisClient::ConnectionError, 'Connection was closed or lost')
end
it 'counts exception' do
expect(instrumentation_class).to receive(:instance_count_exception)
.with(instance_of(RedisClient::ConnectionError)).and_call_original
expect(instrumentation_class).to receive(:log_exception)
.with(instance_of(RedisClient::ConnectionError)).and_call_original
expect(instrumentation_class).to receive(:instance_count_request).and_call_original
expect do
redis_client.call(:auth, 'foo', 'bar')
end.to raise_error(RedisClient::Error)
end
end
context 'in production environment' do
before do
stub_rails_env('production') # to avoid raising CrossSlotError
end
it 'counts disallowed cross-slot requests' do
expect(instrumentation_class).to receive(:increment_cross_slot_request_count).and_call_original
expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
redis_client.call(:mget, 'foo', 'bar')
end
it 'does not count allowed cross-slot requests' do
expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
expect(instrumentation_class).to receive(:increment_allowed_cross_slot_request_count).and_call_original
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
redis_client.call(:mget, 'foo', 'bar')
end
end
it 'does not count allowed non-cross-slot requests' do
expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
redis_client.call(:mget, 'bar')
end
end
it 'skips count for non-cross-slot requests' do
expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
redis_client.call(:mget, '{foo}bar', '{foo}baz')
end
end
context 'without active RequestStore' do
before do
::RequestStore.end!
end
it 'still runs cross-slot validation' do
expect do
redis_client.call('mget', 'foo', 'bar')
end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
end
end
end
describe 'latency' do
let(:instrumentation_class) { redis_store_class.instrumentation_class }
describe 'commands in the apdex' do
where(:command) do
[
[[:get, 'foobar']],
[%w[GET foobar]]
]
end
with_them do
it 'measures requests we want in the apdex' do
expect(instrumentation_class).to receive(:instance_observe_duration).with(a_value > 0)
.and_call_original
redis_client.call(*command)
end
end
context 'with pipelined commands' do
it 'measures requests that do not have blocking commands' do
expect(instrumentation_class).to receive(:instance_observe_duration).twice.with(a_value > 0)
.and_call_original
redis_client.pipelined do |pipeline|
pipeline.call(:get, '{foobar}buz')
pipeline.call(:get, '{foobar}baz')
end
end
it 'raises error when keys are not from the same slot' do
expect do
redis_client.pipelined do |pipeline|
pipeline.call(:get, 'foo')
pipeline.call(:get, 'bar')
end
end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
end
end
end
describe 'commands not in the apdex' do
where(:setup, :command) do
[['rpush', 'foobar', 1]] | ['brpop', 'foobar', 0]
[['rpush', 'foobar', 1]] | ['blpop', 'foobar', 0]
[['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
[['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
[['zadd', 'foobar', 1, 'a']] | ['bzpopmin', 'foobar', 0]
[['zadd', 'foobar', 1, 'a']] | ['bzpopmax', 'foobar', 0]
[['xadd', 'mystream', 1, 'myfield', 'mydata']] | ['xread', 'block', 1, 'streams', 'mystream', '0-0']
[['xadd', 'foobar', 1, 'myfield', 'mydata'],
['xgroup', 'create', 'foobar', 'mygroup',
0]] | ['xreadgroup', 'group', 'mygroup', 'myconsumer', 'block', 1, 'streams', 'foobar', '0-0']
[] | ['command']
end
with_them do
it 'skips requests we do not want in the apdex' do
setup.each { |cmd| redis_client.call(*cmd) }
expect(instrumentation_class).not_to receive(:instance_observe_duration)
redis_client.call(*command)
end
end
context 'with pipelined commands' do
it 'skips requests that have blocking commands' do
expect(instrumentation_class).not_to receive(:instance_observe_duration)
redis_client.pipelined do |pipeline|
pipeline.call(:get, '{foobar}buz')
pipeline.call(:rpush, '{foobar}baz', 1)
pipeline.call(:brpop, '{foobar}baz', 0)
end
end
end
end
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment