Commit 5d75b2b9 authored by 🤖 GitLab Bot 🤖's avatar 🤖 GitLab Bot 🤖

Add latest changes from gitlab-org/[email protected]

parent 6f2065c4
Pipeline #129734011 passed with stages
in 99 minutes and 57 seconds
......@@ -425,7 +425,7 @@ gem 'gitlab-mail_room', '~> 0.0.3', require: 'mail_room'
gem 'email_reply_trimmer', '~> 0.1'
gem 'html2text'
gem 'ruby-prof', '~> 1.0.0'
gem 'ruby-prof', '~> 1.3.0'
gem 'stackprof', '~> 0.2.15', require: false
gem 'rbtrace', '~> 0.4', require: false
gem 'memory_profiler', '~> 0.9', require: false
......
......@@ -951,7 +951,7 @@ GEM
i18n
ruby-fogbugz (0.2.1)
crack (~> 0.4)
ruby-prof (1.0.0)
ruby-prof (1.3.1)
ruby-progressbar (1.10.1)
ruby-saml (1.7.2)
nokogiri (>= 1.5.10)
......@@ -1358,7 +1358,7 @@ DEPENDENCIES
rubocop-performance (~> 1.4.1)
rubocop-rspec (~> 1.37.0)
ruby-fogbugz (~> 0.2.1)
ruby-prof (~> 1.0.0)
ruby-prof (~> 1.3.0)
ruby-progressbar
ruby_parser (~> 3.8)
rubyzip (~> 2.0.0)
......
......@@ -68,6 +68,9 @@ module BulkInsertSafe
# @param [Boolean] validate Whether validations should run on [items]
# @param [Integer] batch_size How many items should at most be inserted at once
# @param [Boolean] skip_duplicates Marks duplicates as allowed, and skips inserting them
# @param [Symbol] returns Pass :ids to return an array with the primary key values
# for all inserted records or nil to omit the underlying
# RETURNING SQL clause entirely.
# @param [Proc] handle_attributes Block that will receive each item attribute hash
# prior to insertion for further processing
#
......@@ -78,10 +81,11 @@ module BulkInsertSafe
#
# @return true if operation succeeded, throws otherwise.
#
def bulk_insert!(items, validate: true, skip_duplicates: false, batch_size: DEFAULT_BATCH_SIZE, &handle_attributes)
def bulk_insert!(items, validate: true, skip_duplicates: false, returns: nil, batch_size: DEFAULT_BATCH_SIZE, &handle_attributes)
_bulk_insert_all!(items,
validate: validate,
on_duplicate: skip_duplicates ? :skip : :raise,
returns: returns,
unique_by: nil,
batch_size: batch_size,
&handle_attributes)
......@@ -94,6 +98,9 @@ module BulkInsertSafe
# @param [Boolean] validate Whether validations should run on [items]
# @param [Integer] batch_size How many items should at most be inserted at once
# @param [Symbol/Array] unique_by Defines index or columns to use to consider item duplicate
# @param [Symbol] returns Pass :ids to return an array with the primary key values
# for all inserted or updated records or nil to omit the
# underlying RETURNING SQL clause entirely.
# @param [Proc] handle_attributes Block that will receive each item attribute hash
# prior to insertion for further processing
#
......@@ -109,10 +116,11 @@ module BulkInsertSafe
#
# @return true if operation succeeded, throws otherwise.
#
def bulk_upsert!(items, unique_by:, validate: true, batch_size: DEFAULT_BATCH_SIZE, &handle_attributes)
def bulk_upsert!(items, unique_by:, returns: nil, validate: true, batch_size: DEFAULT_BATCH_SIZE, &handle_attributes)
_bulk_insert_all!(items,
validate: validate,
on_duplicate: :update,
returns: returns,
unique_by: unique_by,
batch_size: batch_size,
&handle_attributes)
......@@ -120,21 +128,30 @@ module BulkInsertSafe
private
def _bulk_insert_all!(items, on_duplicate:, unique_by:, validate:, batch_size:, &handle_attributes)
return true if items.empty?
def _bulk_insert_all!(items, on_duplicate:, returns:, unique_by:, validate:, batch_size:, &handle_attributes)
return [] if items.empty?
returning =
case returns
when :ids
[primary_key]
when nil
false
else
raise ArgumentError, "returns needs to be :ids or nil"
end
transaction do
items.each_slice(batch_size) do |item_batch|
items.each_slice(batch_size).flat_map do |item_batch|
attributes = _bulk_insert_item_attributes(
item_batch, validate, &handle_attributes)
ActiveRecord::InsertAll
.new(self, attributes, on_duplicate: on_duplicate, unique_by: unique_by)
.execute
.new(self, attributes, on_duplicate: on_duplicate, returning: returning, unique_by: unique_by)
.execute
.pluck(primary_key)
end
end
true
end
def _bulk_insert_item_attributes(items, validate_items)
......
......@@ -58,7 +58,7 @@ module Projects
end
def tree_saver_class
if ::Feature.enabled?(:streaming_serializer, project)
if ::Feature.enabled?(:streaming_serializer, project, default_enabled: true)
Gitlab::ImportExport::Project::TreeSaver
else
# Once we remove :streaming_serializer feature flag, Project::LegacyTreeSaver should be removed as well
......
# frozen_string_literal: true
module Projects
module Prometheus
module Alerts
class NotifyService < BaseService
include Gitlab::Utils::StrongMemoize
include IncidentManagement::Settings
def execute(token)
return false unless valid_payload_size?
return false unless valid_version?
return false unless valid_alert_manager_token?(token)
persist_events
send_alert_email if send_email?
process_incident_issues if process_issues?
true
end
private
def valid_payload_size?
Gitlab::Utils::DeepSize.new(params).valid?
end
def send_email?
incident_management_setting.send_email && firings.any?
end
def firings
@firings ||= alerts_by_status('firing')
end
def alerts_by_status(status)
alerts.select { |alert| alert['status'] == status }
end
def alerts
params['alerts']
end
def valid_version?
params['version'] == '4'
end
def valid_alert_manager_token?(token)
valid_for_manual?(token) || valid_for_managed?(token)
end
def valid_for_manual?(token)
prometheus = project.find_or_initialize_service('prometheus')
return false unless prometheus.manual_configuration?
if setting = project.alerting_setting
compare_token(token, setting.token)
else
token.nil?
end
end
def valid_for_managed?(token)
prometheus_application = available_prometheus_application(project)
return false unless prometheus_application
if token
compare_token(token, prometheus_application.alert_manager_token)
else
prometheus_application.alert_manager_token.nil?
end
end
def available_prometheus_application(project)
alert_id = gitlab_alert_id
return unless alert_id
alert = find_alert(project, alert_id)
return unless alert
cluster = alert.environment.deployment_platform&.cluster
return unless cluster&.enabled?
return unless cluster.application_prometheus_available?
cluster.application_prometheus
end
def find_alert(project, metric)
Projects::Prometheus::AlertsFinder
.new(project: project, metric: metric)
.execute
.first
end
def gitlab_alert_id
alerts&.first&.dig('labels', 'gitlab_alert_id')
end
def compare_token(expected, actual)
return unless expected && actual
ActiveSupport::SecurityUtils.secure_compare(expected, actual)
end
def send_alert_email
notification_service
.async
.prometheus_alerts_fired(project, firings)
end
def process_incident_issues
alerts.each do |alert|
IncidentManagement::ProcessPrometheusAlertWorker
.perform_async(project.id, alert.to_h)
end
end
def persist_events
CreateEventsService.new(project, nil, params).execute
end
end
end
end
end
---
title: Enable streaming serializer feature flag by default.
merge_request: 27813
author:
type: performance
---
title: Add support for Okta as a SCIM provider
merge_request: 25649
author:
type: added
# frozen_string_literal: true
# Patch to use COPY in db/structure.sql when populating schema_migrations table
# This is intended to reduce potential for merge conflicts in db/structure.sql
ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.prepend(Gitlab::Database::PostgresqlAdapter::SchemaVersionsCopyMixin)
This diff is collapsed.
......@@ -14,7 +14,7 @@ tasks such as:
To request access to Chatops on GitLab.com:
1. Log into <https://ops.gitlab.net/users/sign_in> **using the same username** as for GitLab.com (you may have to rename it).
1. Ask in the [#production](https://gitlab.slack.com/messages/production) channel to add you by running `/chatops run member add <username> gitlab-com/chatops --ops`.
1. Ask in the [#production](https://gitlab.slack.com/messages/production) channel for an existing member to add you to the `chatops` project in Ops. They can do it by running `/chatops run member add <username> gitlab-com/chatops --ops` command in that channel.
NOTE: **Note:** If you had to change your username for GitLab.com on the first step, make sure [to reflect this information](https://gitlab.com/gitlab-com/www-gitlab-com#adding-yourself-to-the-team-page) on [the team page](https://about.gitlab.com/company/team/).
......
......@@ -150,6 +150,10 @@ module API
authorize! :download_code, release
end
def authorize_create_evidence!
# This is a separate method so that EE can extend its behaviour
end
def release
@release ||= user_project.releases.find_by_tag(params[:tag])
end
......
# frozen_string_literal: true
module Gitlab
module Database
module PostgresqlAdapter
module SchemaVersionsCopyMixin
extend ActiveSupport::Concern
def dump_schema_information # :nodoc:
versions = schema_migration.all_versions
copy_versions_sql(versions) if versions.any?
end
private
def copy_versions_sql(versions)
sm_table = quote_table_name(schema_migration.table_name)
sql = +"COPY #{sm_table} (version) FROM STDIN;\n"
sql << versions.map { |v| Integer(v) }.sort.join("\n")
sql << "\n\\.\n"
sql
end
end
end
end
end
......@@ -168,9 +168,9 @@ module Gitlab
# rubocop: enable CodeReuse/ActiveRecord
def self.print_by_total_time(result, options = {})
default_options = { sort_method: :total_time }
default_options = { sort_method: :total_time, filter_by: :total_time }
Gitlab::Profiler::TotalTimeFlatPrinter.new(result).print(STDOUT, default_options.merge(options))
RubyProf::FlatPrinter.new(result).print(STDOUT, default_options.merge(options))
end
end
end
# frozen_string_literal: true
module Gitlab
module Profiler
class TotalTimeFlatPrinter < RubyProf::FlatPrinter
def max_percent
@options[:max_percent] || 100
end
# Copied from:
# <https://github.com/ruby-prof/ruby-prof/blob/master/lib/ruby-prof/printers/flat_printer.rb>
#
# The changes are just to filter by total time, not self time, and add a
# max_percent option as well.
def print_methods(thread)
total_time = thread.total_time
methods = thread.methods.sort_by(&sort_method).reverse
sum = 0
methods.each do |method|
total_percent = (method.total_time / total_time) * 100
next if total_percent < min_percent
next if total_percent > max_percent
sum += method.self_time
@output << "%6.2f %9.3f %9.3f %9.3f %9.3f %8d %s%-30s %s\n" % [
method.self_time / total_time * 100, # %self
method.total_time, # total
method.self_time, # self
method.wait_time, # wait
method.children_time, # children
method.called, # calls
method.recursive? ? "*" : " ", # cycle
method.full_name, # method_name
method_location(method) # location
]
end
end
end
end
end
......@@ -4,75 +4,110 @@ require 'toml-rb'
module Gitlab
module SetupHelper
class << self
# We cannot create config.toml files for all possible Gitaly configuations.
# For instance, if Gitaly is running on another machine then it makes no
# sense to write a config.toml file on the current machine. This method will
# only generate a configuration for the most common and simplest case: when
# we have exactly one Gitaly process and we are sure it is running locally
# because it uses a Unix socket.
# For development and testing purposes, an extra storage is added to gitaly,
# which is not known to Rails, but must be explicitly stubbed.
def gitaly_configuration_toml(gitaly_dir, storage_paths, gitaly_ruby: true)
storages = []
address = nil
Gitlab.config.repositories.storages.each do |key, val|
if address
if address != val['gitaly_address']
raise ArgumentError, "Your gitlab.yml contains more than one gitaly_address."
def create_configuration(dir, storage_paths, force: false)
generate_configuration(
configuration_toml(dir, storage_paths),
get_config_path(dir),
force: force
)
end
# rubocop:disable Rails/Output
def generate_configuration(toml_data, config_path, force: false)
FileUtils.rm_f(config_path) if force
File.open(config_path, File::WRONLY | File::CREAT | File::EXCL) do |f|
f.puts toml_data
end
rescue Errno::EEXIST
puts 'Skipping config.toml generation:'
puts 'A configuration file already exists.'
rescue ArgumentError => e
puts 'Skipping config.toml generation:'
puts e.message
end
# rubocop:enable Rails/Output
module Gitaly
extend Gitlab::SetupHelper
class << self
# We cannot create config.toml files for all possible Gitaly configuations.
# For instance, if Gitaly is running on another machine then it makes no
# sense to write a config.toml file on the current machine. This method will
# only generate a configuration for the most common and simplest case: when
# we have exactly one Gitaly process and we are sure it is running locally
# because it uses a Unix socket.
# For development and testing purposes, an extra storage is added to gitaly,
# which is not known to Rails, but must be explicitly stubbed.
def configuration_toml(gitaly_dir, storage_paths, gitaly_ruby: true)
storages = []
address = nil
Gitlab.config.repositories.storages.each do |key, val|
if address
if address != val['gitaly_address']
raise ArgumentError, "Your gitlab.yml contains more than one gitaly_address."
end
elsif URI(val['gitaly_address']).scheme != 'unix'
raise ArgumentError, "Automatic config.toml generation only supports 'unix:' addresses."
else
address = val['gitaly_address']
end
elsif URI(val['gitaly_address']).scheme != 'unix'
raise ArgumentError, "Automatic config.toml generation only supports 'unix:' addresses."
else
address = val['gitaly_address']
storages << { name: key, path: storage_paths[key] }
end
storages << { name: key, path: storage_paths[key] }
end
config = { socket_path: address.sub(/\Aunix:/, '') }
if Rails.env.test?
storage_path = Rails.root.join('tmp', 'tests', 'second_storage').to_s
storages << { name: 'test_second_storage', path: storage_path }
end
if Rails.env.test?
storage_path = Rails.root.join('tmp', 'tests', 'second_storage').to_s
storages << { name: 'test_second_storage', path: storage_path }
config[:auth] = { token: 'secret' }
# Compared to production, tests run in constrained environments. This
# number is meant to grow with the number of concurrent rails requests /
# sidekiq jobs, and concurrency will be low anyway in test.
config[:git] = { catfile_cache_size: 5 }
end
config = { socket_path: address.sub(/\Aunix:/, ''), storage: storages }
config[:auth] = { token: 'secret' } if Rails.env.test?
config[:storage] = storages
internal_socket_dir = File.join(gitaly_dir, 'internal_sockets')
FileUtils.mkdir(internal_socket_dir) unless File.exist?(internal_socket_dir)
config[:internal_socket_dir] = internal_socket_dir
internal_socket_dir = File.join(gitaly_dir, 'internal_sockets')
FileUtils.mkdir(internal_socket_dir) unless File.exist?(internal_socket_dir)
config[:internal_socket_dir] = internal_socket_dir
config[:'gitaly-ruby'] = { dir: File.join(gitaly_dir, 'ruby') } if gitaly_ruby
config[:'gitlab-shell'] = { dir: Gitlab.config.gitlab_shell.path }
config[:bin_dir] = Gitlab.config.gitaly.client_path
config[:'gitaly-ruby'] = { dir: File.join(gitaly_dir, 'ruby') } if gitaly_ruby
config[:'gitlab-shell'] = { dir: Gitlab.config.gitlab_shell.path }
config[:bin_dir] = Gitlab.config.gitaly.client_path
if Rails.env.test?
# Compared to production, tests run in constrained environments. This
# number is meant to grow with the number of concurrent rails requests /
# sidekiq jobs, and concurrency will be low anyway in test.
config[:git] = { catfile_cache_size: 5 }
TomlRB.dump(config)
end
TomlRB.dump(config)
private
def get_config_path(dir)
File.join(dir, 'config.toml')
end
end
end
module Praefect
extend Gitlab::SetupHelper
class << self
def configuration_toml(gitaly_dir, storage_paths)
nodes = [{ storage: 'default', address: "unix:#{gitaly_dir}/gitaly.socket", primary: true, token: 'secret' }]
config = { socket_path: "#{gitaly_dir}/praefect.socket", virtual_storage_name: 'default', token: 'secret', node: nodes }
config[:token] = 'secret' if Rails.env.test?
TomlRB.dump(config)
end
# rubocop:disable Rails/Output
def create_gitaly_configuration(dir, storage_paths, force: false)
config_path = File.join(dir, 'config.toml')
FileUtils.rm_f(config_path) if force
private
File.open(config_path, File::WRONLY | File::CREAT | File::EXCL) do |f|
f.puts gitaly_configuration_toml(dir, storage_paths)
def get_config_path(dir)
File.join(dir, 'praefect.config.toml')
end
rescue Errno::EEXIST
puts "Skipping config.toml generation:"
puts "A configuration file already exists."
rescue ArgumentError => e
puts "Skipping config.toml generation:"
puts e.message
end
# rubocop:enable Rails/Output
end
end
end
......@@ -27,7 +27,7 @@ Usage: rake "gitlab:gitaly:install[/installation/dir,/storage/path]")
end
storage_paths = { 'default' => args.storage_path }
Gitlab::SetupHelper.create_gitaly_configuration(args.dir, storage_paths)
Gitlab::SetupHelper::Gitaly.create_configuration(args.dir, storage_paths)
Dir.chdir(args.dir) do
# In CI we run scripts/gitaly-test-build instead of this command
unless ENV['CI'].present?
......
......@@ -48,7 +48,7 @@ class UploadedFile
return if path.blank? && remote_id.blank?
file_path = nil
if path
if path.present?
file_path = File.realpath(path)
paths = Array(upload_paths) << Dir.tmpdir
......
# frozen_string_literal: true
module QA
context 'Create', quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/issues/36817', type: :bug } do
context 'Create', quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/30226', type: :bug } do
describe 'Merge request rebasing' do
it 'user rebases source branch of merge request' do
Flow::Login.sign_in
......
......@@ -17,13 +17,16 @@ class GitalyTestBuild
check_gitaly_config!
# Starting gitaly further validates its configuration
pid = start_gitaly
Process.kill('TERM', pid)
gitaly_pid = start_gitaly
praefect_pid = start_praefect
Process.kill('TERM', gitaly_pid)
Process.kill('TERM', praefect_pid)
# Make the 'gitaly' executable look newer than 'GITALY_SERVER_VERSION'.
# Without this a gitaly executable created in the setup-test-env job
# will look stale compared to GITALY_SERVER_VERSION.
FileUtils.touch(File.join(tmp_tests_gitaly_dir, 'gitaly'), mtime: Time.now + (1 << 24))
FileUtils.touch(File.join(tmp_tests_gitaly_dir, 'praefect'), mtime: Time.now + (1 << 24))
end
end
......
......@@ -13,10 +13,9 @@ class GitalyTestSpawn
# # Uncomment line below to see all gitaly logs merged into CI trace
# spawn('sleep 1; tail -f log/gitaly-test.log')
pid = start_gitaly
# In local development this pid file is used by rspec.
IO.write(File.expand_path('../tmp/tests/gitaly.pid', __dir__), pid)
IO.write(File.expand_path('../tmp/tests/gitaly.pid', __dir__), start_gitaly)
IO.write(File.expand_path('../tmp/tests/praefect.pid', __dir__), start_praefect)
end
end
......
......@@ -37,16 +37,31 @@ module GitalyTest
env_hash
end
def config_path
File.join(tmp_tests_gitaly_dir, 'config.toml')
def config_path(service)
case service
when :gitaly
File.join(tmp_tests_gitaly_dir, 'config.toml')
when :praefect
File.join(tmp_tests_gitaly_dir, 'praefect.config.toml')
end
end
def start_gitaly
args = %W[#{tmp_tests_gitaly_dir}/gitaly #{config_path}]
pid = spawn(env, *args, [:out, :err] => 'log/gitaly-test.log')
start(:gitaly)
end
def start_praefect
start(:praefect)
end
def start(service)
args = ["#{tmp_tests_gitaly_dir}/#{service}"]
args.push("-config") if service == :praefect
args.push(config_path(service))
pid = spawn(env, *args, [:out, :err] => "log/#{service}-test.log")
begin
try_connect!
try_connect!(service)
rescue
Process.kill('TERM', pid)
raise
......@@ -68,11 +83,11 @@ module GitalyTest
abort 'bundle check failed' unless system(env, 'bundle', 'check', chdir: File.dirname(gemfile))
end
def read_socket_path
def read_socket_path(service)
# This code needs to work in an environment where we cannot use bundler,
# so we cannot easily use the toml-rb gem. This ad-hoc parser should be
# good enough.
config_text = IO.read(config_path)
config_text = IO.read(config_path(service))
config_text.lines.each do |line|
match_data = line.match(/^\s*socket_path\s*=\s*"([^"]*)"$/)
......@@ -80,14 +95,14 @@ module GitalyTest
return match_data[1] if match_data
end
raise "failed to find socket_path in #{config_path}"
raise "failed to find socket_path in #{config_path(service)}"
end
def try_connect!
print "Trying to connect to gitaly: "
def try_connect!(service)
print "Trying to connect to #{service}: "
timeout = 20
delay = 0.1
socket = read_socket_path
socket = read_socket_path(service)
Integer(timeout / delay).times do
UNIXSocket.new(socket)
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Database::PostgresqlAdapter::SchemaVersionsCopyMixin do
let(:schema_migration) { double('schem_migration', table_name: table_name, all_versions: versions) }
let(:versions) { %w(5 2 1000 200 4 93 2) }
let(:table_name) { "schema_migrations" }
let(:instance) do
Object.new.extend(described_class)
end
before do
allow(instance).to receive(:schema_migration).and_return(schema_migration)
allow(instance).to receive(:quote_table_name).with(table_name).and_return("\"#{table_name}\"")