...
 
Commits (55)
......@@ -155,7 +155,7 @@ export default class MilestoneSelect {
const { $el, e } = clickEvent;
let selected = clickEvent.selectedObj;
let data, boardsStore;
let data, modalStoreFilter;
if (!selected) return;
if (options.handleClick) {
......@@ -179,11 +179,11 @@ export default class MilestoneSelect {
}
if ($dropdown.closest('.add-issues-modal').length) {
boardsStore = ModalStore.store.filter;
modalStoreFilter = ModalStore.store.filter;
}
if (boardsStore) {
boardsStore[$dropdown.data('fieldName')] = selected.name;
if (modalStoreFilter) {
modalStoreFilter[$dropdown.data('fieldName')] = selected.name;
e.preventDefault();
} else if ($dropdown.hasClass('js-filter-submit') && (isIssueIndex || isMRIndex)) {
return Issuable.filterResults($dropdown.closest('form'));
......
......@@ -247,15 +247,19 @@ Please check your network connection and try again.`;
} else {
this.reopenIssue()
.then(() => this.enableButton())
.catch(() => {
.catch(({ data }) => {
this.enableButton();
this.toggleStateButtonLoading(false);
Flash(
sprintf(
__('Something went wrong while reopening the %{issuable}. Please try again later'),
{ issuable: this.noteableDisplayName },
),
let errorMessage = sprintf(
__('Something went wrong while reopening the %{issuable}. Please try again later'),
{ issuable: this.noteableDisplayName },
);
if (data) {
errorMessage = Object.values(data).join('\n');
}
Flash(errorMessage);
});
}
},
......
......@@ -44,6 +44,7 @@ export default {
class="sidebar-collapsed-icon"
data-placement="left"
data-container="body"
data-boundary="viewport"
@click="handleClick"
>
<i aria-hidden="true" data-hidden="true" class="fa fa-tags"> </i>
......
......@@ -8,7 +8,6 @@ class ApplicationController < ActionController::Base
include GitlabRoutingHelper
include PageLayoutHelper
include SafeParamsHelper
include SentryHelper
include WorkhorseHelper
include EnforcesTwoFactorAuthentication
include WithPerformanceBar
......@@ -129,6 +128,7 @@ class ApplicationController < ActionController::Base
payload[:ua] = request.env["HTTP_USER_AGENT"]
payload[:remote_ip] = request.remote_ip
payload[Gitlab::CorrelationId::LOG_KEY] = Gitlab::CorrelationId.current_id
logged_user = auth_user
......@@ -155,7 +155,7 @@ class ApplicationController < ActionController::Base
end
def log_exception(exception)
Raven.capture_exception(exception) if sentry_enabled?
Gitlab::Sentry.track_acceptable_exception(exception)
backtrace_cleaner = Gitlab.rails5? ? request.env["action_dispatch.backtrace_cleaner"] : env
application_trace = ActionDispatch::ExceptionWrapper.new(backtrace_cleaner, exception).application_trace
......@@ -487,4 +487,8 @@ class ApplicationController < ActionController::Base
def impersonator
@impersonator ||= User.find(session[:impersonator_id]) if session[:impersonator_id]
end
def sentry_context
Gitlab::Sentry.context(current_user)
end
end
......@@ -102,7 +102,7 @@ module IssuableCollections
elsif @group
options[:group_id] = @group.id
options[:include_subgroups] = true
options[:use_cte_for_search] = true
options[:attempt_group_search_optimizations] = true
end
params.permit(finder_type.valid_params).merge(options)
......
......@@ -122,17 +122,21 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
respond_to do |format|
format.html do
if @merge_request.valid?
redirect_to([@merge_request.target_project.namespace.becomes(Namespace), @merge_request.target_project, @merge_request])
else
if @merge_request.errors.present?
define_edit_vars
render :edit
else
redirect_to project_merge_request_path(@merge_request.target_project, @merge_request)
end
end
format.json do
render json: serializer.represent(@merge_request, serializer: 'basic')
if merge_request.errors.present?
render json: @merge_request.errors, status: :bad_request
else
render json: serializer.represent(@merge_request, serializer: 'basic')
end
end
end
rescue ActiveRecord::StaleObjectError
......
......@@ -27,12 +27,13 @@
# created_before: datetime
# updated_after: datetime
# updated_before: datetime
# use_cte_for_search: boolean
# attempt_group_search_optimizations: boolean
#
class IssuableFinder
prepend FinderWithCrossProjectAccess
include FinderMethods
include CreatedAtFilter
include Gitlab::Utils::StrongMemoize
requires_cross_project_access unless: -> { project? }
......@@ -75,8 +76,9 @@ class IssuableFinder
items = init_collection
items = filter_items(items)
# This has to be last as we may use a CTE as an optimization fence by
# passing the use_cte_for_search param
# This has to be last as we may use a CTE as an optimization fence
# by passing the attempt_group_search_optimizations param and
# enabling the use_cte_for_group_issues_search feature flag
# https://www.postgresql.org/docs/current/static/queries-with.html
items = by_search(items)
......@@ -85,6 +87,8 @@ class IssuableFinder
def filter_items(items)
items = by_project(items)
items = by_group(items)
items = by_subquery(items)
items = by_scope(items)
items = by_created_at(items)
items = by_updated_at(items)
......@@ -282,12 +286,31 @@ class IssuableFinder
end
# rubocop: enable CodeReuse/ActiveRecord
def use_subquery_for_search?
strong_memoize(:use_subquery_for_search) do
attempt_group_search_optimizations? &&
Feature.enabled?(:use_subquery_for_group_issues_search, default_enabled: false)
end
end
def use_cte_for_search?
strong_memoize(:use_cte_for_search) do
attempt_group_search_optimizations? &&
!use_subquery_for_search? &&
Feature.enabled?(:use_cte_for_group_issues_search, default_enabled: true)
end
end
private
def init_collection
klass.all
end
def attempt_group_search_optimizations?
search && Gitlab::Database.postgresql? && params[:attempt_group_search_optimizations]
end
def count_key(value)
Array(value).last.to_sym
end
......@@ -351,12 +374,13 @@ class IssuableFinder
end
# rubocop: enable CodeReuse/ActiveRecord
def use_cte_for_search?
return false unless search
return false unless Gitlab::Database.postgresql?
return false unless Feature.enabled?(:use_cte_for_group_issues_search, default_enabled: true)
params[:use_cte_for_search]
# Wrap projects and groups in a subquery if the conditions are met.
def by_subquery(items)
if use_subquery_for_search?
klass.where(id: items.select(:id)) # rubocop: disable CodeReuse/ActiveRecord
else
items
end
end
# rubocop: disable CodeReuse/ActiveRecord
......
......@@ -42,7 +42,7 @@ module IconsHelper
end
def sprite_icon(icon_name, size: nil, css_class: nil)
if Gitlab::Sentry.should_raise?
if Gitlab::Sentry.should_raise_for_dev?
unless known_sprites.include?(icon_name)
exception = ArgumentError.new("#{icon_name} is not a known icon in @gitlab-org/gitlab-svg")
raise exception
......
# frozen_string_literal: true
module SentryHelper
def sentry_enabled?
Gitlab::Sentry.enabled?
end
def sentry_context
Gitlab::Sentry.context(current_user)
end
end
......@@ -120,7 +120,7 @@ module Ci
acts_as_taggable
add_authentication_token_field :token
add_authentication_token_field :token, encrypted: true, fallback: true
before_save :update_artifacts_size, if: :artifacts_file_changed?
before_save :ensure_token
......
......@@ -70,13 +70,14 @@ module FastDestroyAll
module Helpers
extend ActiveSupport::Concern
include AfterCommitQueue
class_methods do
##
# This method is to be defined on models which have fast destroyable models as children,
# and let us avoid to use `dependent: :destroy` hook
def use_fast_destroy(relation)
before_destroy(prepend: true) do
def use_fast_destroy(relation, opts = {})
set_callback :destroy, :before, opts.merge(prepend: true) do
perform_fast_destroy(public_send(relation)) # rubocop:disable GitlabSecurity/PublicSend
end
end
......
......@@ -17,6 +17,8 @@
module WithUploads
extend ActiveSupport::Concern
include FastDestroyAll::Helpers
include FeatureGate
# Currently there is no simple way how to select only not-mounted
# uploads, it should be all FileUploaders so we select them by
......@@ -25,21 +27,40 @@ module WithUploads
included do
has_many :uploads, as: :model
has_many :file_uploads, -> { where(uploader: FILE_UPLOADERS) }, class_name: 'Upload', as: :model
before_destroy :destroy_file_uploads
# TODO: when feature flag is removed, we can use just dependent: destroy
# option on :file_uploads
before_destroy :remove_file_uploads
use_fast_destroy :file_uploads, if: :fast_destroy_enabled?
end
def retrieve_upload(_identifier, paths)
uploads.find_by(path: paths)
end
private
# mounted uploads are deleted in carrierwave's after_commit hook,
# but FileUploaders which are not mounted must be deleted explicitly and
# it can not be done in after_commit because FileUploader requires loads
# associated model on destroy (which is already deleted in after_commit)
def destroy_file_uploads
self.uploads.where(uploader: FILE_UPLOADERS).find_each do |upload|
def remove_file_uploads
fast_destroy_enabled? ? delete_uploads : destroy_uploads
end
def delete_uploads
file_uploads.delete_all(:delete_all)
end
def destroy_uploads
file_uploads.find_each do |upload|
upload.destroy
end
end
def retrieve_upload(_identifier, paths)
uploads.find_by(path: paths)
def fast_destroy_enabled?
Feature.enabled?(:fast_destroy_uploads, self)
end
end
......@@ -539,15 +539,26 @@ class MergeRequest < ActiveRecord::Base
def validate_branches
if target_project == source_project && target_branch == source_branch
errors.add :branch_conflict, "You can not use same project/branch for source and target"
errors.add :branch_conflict, "You can't use same project/branch for source and target"
return
end
if opened?
similar_mrs = self.target_project.merge_requests.where(source_branch: source_branch, target_branch: target_branch, source_project_id: source_project.try(:id)).opened
similar_mrs = similar_mrs.where('id not in (?)', self.id) if self.id
if similar_mrs.any?
errors.add :validate_branches,
"Cannot Create: This merge request already exists: #{similar_mrs.pluck(:title)}"
similar_mrs = target_project
.merge_requests
.where(source_branch: source_branch, target_branch: target_branch)
.where(source_project_id: source_project&.id)
.opened
similar_mrs = similar_mrs.where.not(id: id) if persisted?
conflict = similar_mrs.first
if conflict.present?
errors.add(
:validate_branches,
"Another open merge request already exists for this source branch: #{conflict.to_reference}"
)
end
end
end
......
# frozen_string_literal: true
# The PoolRepository model is the database equivalent of an ObjectPool for Gitaly
# That is; PoolRepository is the record in the database, ObjectPool is the
# repository on disk
class PoolRepository < ActiveRecord::Base
include Shardable
include AfterCommitQueue
has_one :source_project, class_name: 'Project'
validates :source_project, presence: true
has_many :member_projects, class_name: 'Project'
after_create :correct_disk_path
state_machine :state, initial: :none do
state :scheduled
state :ready
state :failed
event :schedule do
transition none: :scheduled
end
event :mark_ready do
transition [:scheduled, :failed] => :ready
end
event :mark_failed do
transition all => :failed
end
state all - [:ready] do
def joinable?
false
end
end
state :ready do
def joinable?
true
end
end
after_transition none: :scheduled do |pool, _|
pool.run_after_commit do
::ObjectPool::CreateWorker.perform_async(pool.id)
end
end
after_transition scheduled: :ready do |pool, _|
pool.run_after_commit do
::ObjectPool::ScheduleJoinWorker.perform_async(pool.id)
end
end
end
def create_object_pool
object_pool.create
end
# The members of the pool should have fetched the missing objects to their own
# objects directory. If the caller fails to do so, data loss might occur
def delete_object_pool
object_pool.delete
end
def link_repository(repository)
object_pool.link(repository.raw)
end
# This RPC can cause data loss, as not all objects are present the local repository
# No execution path yet, will be added through:
# https://gitlab.com/gitlab-org/gitaly/issues/1415
def delete_repository_alternate(repository)
object_pool.unlink_repository(repository.raw)
end
def object_pool
@object_pool ||= Gitlab::Git::ObjectPool.new(
shard.name,
disk_path. + '.git',
source_project.repository.raw)
end
private
def correct_disk_path
......
......@@ -1585,6 +1585,7 @@ class Project < ActiveRecord::Base
import_state.remove_jid
update_project_counter_caches
after_create_default_branch
join_pool_repository
refresh_markdown_cache!
end
......@@ -1981,8 +1982,48 @@ class Project < ActiveRecord::Base
Gitlab::CurrentSettings.max_attachment_size.megabytes.to_i
end
def object_pool_params
return {} unless !forked? && git_objects_poolable?
{
repository_storage: repository_storage,
pool_repository: pool_repository || create_new_pool_repository
}
end
# Git objects are only poolable when the project is or has:
# - Hashed storage -> The object pool will have a remote to its members, using relative paths.
# If the repository path changes we would have to update the remote.
# - Public -> User will be able to fetch Git objects that might not exist
# in their own repository.
# - Repository -> Else the disk path will be empty, and there's nothing to pool
def git_objects_poolable?
hashed_storage?(:repository) &&
public? &&
repository_exists? &&
Gitlab::CurrentSettings.hashed_storage_enabled &&
Feature.enabled?(:object_pools, self)
end
private
def create_new_pool_repository
pool = begin
create_or_find_pool_repository!(shard: Shard.by_name(repository_storage), source_project: self)
rescue ActiveRecord::RecordNotUnique
retry
end
pool.schedule
pool
end
def join_pool_repository
return unless pool_repository
ObjectPool::JoinWorker.perform_async(pool_repository.id, self.id)
end
def use_hashed_storage
if self.new_record? && Gitlab::CurrentSettings.hashed_storage_enabled
self.storage_version = LATEST_STORAGE_VERSION
......
......@@ -18,7 +18,9 @@ class Shard < ActiveRecord::Base
end
def self.by_name(name)
find_or_create_by(name: name)
transaction(requires_new: true) do
find_or_create_by(name: name)
end
rescue ActiveRecord::RecordNotUnique
retry
end
......
......@@ -25,6 +25,25 @@ class Upload < ActiveRecord::Base
Digest::SHA256.file(path).hexdigest
end
class << self
##
# FastDestroyAll concerns
def begin_fast_destroy
{
Uploads::Local => Uploads::Local.new.keys(with_files_stored_locally),
Uploads::Fog => Uploads::Fog.new.keys(with_files_stored_remotely)
}
end
##
# FastDestroyAll concerns
def finalize_fast_destroy(keys)
keys.each do |store_class, paths|
store_class.new.delete_keys_async(paths)
end
end
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
......
# frozen_string_literal: true
module Uploads
class Base
BATCH_SIZE = 100
attr_reader :logger
def initialize(logger: nil)
@logger ||= Rails.logger
end
def delete_keys_async(keys_to_delete)
keys_to_delete.each_slice(BATCH_SIZE) do |batch|
DeleteStoredFilesWorker.perform_async(self.class, batch)
end
end
end
end
# frozen_string_literal: true
module Uploads
class Fog < Base
include ::Gitlab::Utils::StrongMemoize
def available?
object_store.enabled
end
def keys(relation)
return [] unless available?
relation.pluck(:path)
end
def delete_keys(keys)
keys.each do |key|
connection.delete_object(bucket_name, key)
end
end
private
def object_store
Gitlab.config.uploads.object_store
end
def bucket_name
return unless available?
object_store.remote_directory
end
def connection
return unless available?
strong_memoize(:connection) do
::Fog::Storage.new(object_store.connection.to_hash.deep_symbolize_keys)
end
end
end
end
# frozen_string_literal: true
module Uploads
class Local < Base
def keys(relation)
relation.includes(:model).find_each.map(&:absolute_path)
end
def delete_keys(keys)
keys.each do |path|
delete_file(path)
end
end
private
def delete_file(path)
unless exists?(path)
logger.warn("File '#{path}' doesn't exist, skipping")
return
end
unless in_uploads?(path)
message = "Path '#{path}' is not in uploads dir, skipping"
logger.warn(message)
Gitlab::Sentry.track_exception(RuntimeError.new(message), extra: { uploads_dir: storage_dir })
return
end
FileUtils.rm(path)
delete_dir!(File.dirname(path))
end
def exists?(path)
path.present? && File.exist?(path)
end
def in_uploads?(path)
path.start_with?(storage_dir)
end
def delete_dir!(path)
Dir.rmdir(path)
rescue Errno::ENOENT
# Ignore: path does not exist
rescue Errno::ENOTDIR
# Ignore: path is not a dir
rescue Errno::ENOTEMPTY, Errno::EEXIST
# Ignore: dir is not empty
end
def storage_dir
@storage_dir ||= File.realpath(Gitlab.config.uploads.storage_path)
end
end
end
......@@ -58,13 +58,27 @@ module MergeRequests
.preload(:latest_merge_request_diff)
.where(target_branch: @push.branch_name).to_a
.select(&:diff_head_commit)
.select do |merge_request|
commit_ids.include?(merge_request.diff_head_sha) &&
merge_request.merge_request_diff.state != 'empty'
end
merge_requests = filter_merge_requests(merge_requests)
return if merge_requests.empty?
merge_requests = merge_requests.select do |merge_request|
commit_ids.include?(merge_request.diff_head_sha) &&
merge_request.merge_request_diff.state != 'empty'
commit_analyze_enabled = Feature.enabled?(:branch_push_merge_commit_analyze, @project, default_enabled: true)
if commit_analyze_enabled
analyzer = Gitlab::BranchPushMergeCommitAnalyzer.new(
@commits.reverse,
relevant_commit_ids: merge_requests.map(&:diff_head_sha)
)
end
filter_merge_requests(merge_requests).each do |merge_request|
merge_requests.each do |merge_request|
if commit_analyze_enabled
merge_request.merge_commit_sha = analyzer.get_merge_commit(merge_request.diff_head_sha)
end
MergeRequests::PostMergeService
.new(merge_request.target_project, @current_user)
.execute(merge_request)
......
......@@ -54,6 +54,8 @@ module Projects
new_params[:avatar] = @project.avatar
end
new_params.merge!(@project.object_pool_params)
new_project = CreateService.new(current_user, new_params).execute
return new_project unless new_project.persisted?
......
......@@ -5,7 +5,7 @@
- subscribed = params[:subscribed]
- labels_or_filters = @labels.exists? || @prioritized_labels.exists? || search.present? || subscribed.present?
- if @labels.present? && can_admin_label
- if labels_or_filters && can_admin_label
- content_for(:header_content) do
.nav-controls
= link_to _('New label'), new_project_label_path(@project), class: "btn btn-success qa-label-create-new"
......
......@@ -10,7 +10,6 @@
- cronjob:prune_old_events
- cronjob:remove_expired_group_links
- cronjob:remove_expired_members
- cronjob:remove_old_web_hook_logs
- cronjob:remove_unreferenced_lfs_objects
- cronjob:repository_archive_cache
- cronjob:repository_check_dispatch
......@@ -86,6 +85,10 @@
- todos_destroyer:todos_destroyer_project_private
- todos_destroyer:todos_destroyer_private_features
- object_pool:object_pool_create
- object_pool:object_pool_schedule_join
- object_pool:object_pool_join
- default
- mailers # ActionMailer::DeliveryJob.queue_name
......@@ -134,3 +137,4 @@
- delete_diff_files
- detect_repository_languages
- repository_cleanup
- delete_stored_files
# frozen_string_literal: true
##
# Concern for setting Sidekiq settings for the various ObjectPool queues
#
module ObjectPoolQueue
extend ActiveSupport::Concern
included do
queue_namespace :object_pool
end
end
# frozen_string_literal: true
class DeleteStoredFilesWorker
include ApplicationWorker
def perform(class_name, keys)
klass = begin
class_name.constantize
rescue NameError
nil
end
unless klass
message = "Unknown class '#{class_name}'"
logger.error(message)
Gitlab::Sentry.track_exception(RuntimeError.new(message))
return
end
klass.new(logger: logger).delete_keys(keys)
end
end
......@@ -28,6 +28,8 @@ class GitGarbageCollectWorker
# Refresh the branch cache in case garbage collection caused a ref lookup to fail
flush_ref_caches(project) if task == :gc
project.repository.expire_statistics_caches
# In case pack files are deleted, release libgit2 cache and open file
# descriptors ASAP instead of waiting for Ruby garbage collection
project.cleanup
......
# frozen_string_literal: true
module ObjectPool
class CreateWorker
include ApplicationWorker
include ObjectPoolQueue
include ExclusiveLeaseGuard
attr_reader :pool
def perform(pool_id)
@pool = PoolRepository.find_by_id(pool_id)
return unless pool
try_obtain_lease do
perform_pool_creation
end
end
private
def perform_pool_creation
return unless pool.failed? || pool.scheduled?
# If this is a retry and the previous execution failed, deletion will
# bring the pool back to a pristine state
pool.delete_object_pool if pool.failed?
pool.create_object_pool
pool.mark_ready
rescue => e
pool.mark_failed
raise e
end
def lease_key
"object_pool:create:#{pool.id}"
end
private
def lease_timeout
1.hour
end
end
end
# frozen_string_literal: true
module ObjectPool
class JoinWorker
include ApplicationWorker
include ObjectPoolQueue
def perform(pool_id, project_id)
pool = PoolRepository.find_by_id(pool_id)
return unless pool&.joinable?
project = Project.find_by_id(project_id)
return unless project
pool.link_repository(project.repository)
Projects::HousekeepingService.new(project).execute
end
end
end
# frozen_string_literal: true
module ObjectPool
class ScheduleJoinWorker
include ApplicationWorker
include ObjectPoolQueue
def perform(pool_id)
pool = PoolRepository.find_by_id(pool_id)
return unless pool&.joinable?
pool.member_projects.find_each do |project|
next if project.forked? && !project.import_finished?
ObjectPool::JoinWorker.perform_async(pool.id, project.id)
end
end
end
end
# frozen_string_literal: true
class RemoveOldWebHookLogsWorker
include ApplicationWorker
include CronjobQueue
WEB_HOOK_LOG_LIFETIME = 2.days
# rubocop: disable DestroyAll
def perform
WebHookLog.destroy_all(['created_at < ?', Time.now - WEB_HOOK_LOG_LIFETIME])
end
# rubocop: enable DestroyAll
end
---
title: Show error message when attempting to reopen an MR and there is an open MR
for the same branch
merge_request: 16447
author: Akos Gyimesi
type: fixed
---
title: Fix "merged with [commit]" info for merge requests being merged automatically
by other actions
merge_request: 22794
author:
type: fixed
---
title: Remove old webhook logs after 90 days, as documented, instead of after 2
merge_request:
author:
type: fixed
---
title: Encrypt CI/CD builds authentication tokens
merge_request: 23436
author:
type: security
---
title: Show primary button when all labels are prioritized
merge_request: 23648
author: George Tsiolis
type: other
---
title: Fix error when searching for group issues with priority or popularity sort
merge_request: 23445
author:
type: fixed
---
title: Log and pass correlation-id between Unicorn, Sidekiq and Gitaly
merge_request:
author:
type: added
---
title: Fix milestone select in issue sidebar of issue boards
merge_request: 23625
author:
type: fixed
---
title: Allow public forks to be deduplicated
merge_request: 23508
author:
type: added
......@@ -302,10 +302,6 @@ Settings.cron_jobs['gitlab_usage_ping_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['gitlab_usage_ping_worker']['cron'] ||= Settings.__send__(:cron_for_usage_ping)
Settings.cron_jobs['gitlab_usage_ping_worker']['job_class'] = 'GitlabUsagePingWorker'
Settings.cron_jobs['remove_old_web_hook_logs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['remove_old_web_hook_logs_worker']['cron'] ||= '40 0 * * *'
Settings.cron_jobs['remove_old_web_hook_logs_worker']['job_class'] = 'RemoveOldWebHookLogsWorker'
Settings.cron_jobs['stuck_merge_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_merge_jobs_worker']['cron'] ||= '0 */2 * * *'
Settings.cron_jobs['stuck_merge_jobs_worker']['job_class'] = 'StuckMergeJobsWorker'
......
# frozen_string_literal: true
Rails.application.config.middleware.use(Gitlab::Middleware::CorrelationId)
......@@ -29,6 +29,7 @@ unless Sidekiq.server?
gitaly_calls = Gitlab::GitalyClient.get_request_count
payload[:gitaly_calls] = gitaly_calls if gitaly_calls > 0
payload[:response] = event.payload[:response] if event.payload[:response]
payload[Gitlab::CorrelationId::LOG_KEY] = Gitlab::CorrelationId.current_id
payload
end
......
......@@ -24,4 +24,4 @@ def configure_sentry
end
end
configure_sentry if Rails.env.production?
configure_sentry if Rails.env.production? || Rails.env.development?
......@@ -21,6 +21,7 @@ Sidekiq.configure_server do |config|
chain.add Gitlab::SidekiqMiddleware::Shutdown
chain.add Gitlab::SidekiqMiddleware::RequestStoreMiddleware unless ENV['SIDEKIQ_REQUEST_STORE'] == '0'
chain.add Gitlab::SidekiqMiddleware::BatchLoader
chain.add Gitlab::SidekiqMiddleware::CorrelationLogger
chain.add Gitlab::SidekiqStatus::ServerMiddleware
end
......@@ -31,6 +32,7 @@ Sidekiq.configure_server do |config|
config.client_middleware do |chain|
chain.add Gitlab::SidekiqStatus::ClientMiddleware
chain.add Gitlab::SidekiqMiddleware::CorrelationInjector
end
config.on :startup do
......@@ -75,6 +77,7 @@ Sidekiq.configure_client do |config|
config.redis = queues_config_hash
config.client_middleware do |chain|
chain.add Gitlab::SidekiqMiddleware::CorrelationInjector
chain.add Gitlab::SidekiqStatus::ClientMiddleware
end
end
......@@ -81,4 +81,6 @@
- [delete_diff_files, 1]
- [detect_repository_languages, 1]
- [auto_devops, 2]
- [object_pool, 1]
- [repository_cleanup, 1]
- [delete_stored_files, 1]
......@@ -24,23 +24,24 @@ The following files require a review from the Documentation team:
* #{docs_paths_to_review.map { |path| "`#{path}`" }.join("\n* ")}
When your content is ready for review, mention a technical writer in a separate
comment and explain what needs to be reviewed.
You are welcome to mention them sooner if you have questions about writing or updating
the documentation. GitLabbers are also welcome to use the [#docs](https://gitlab.slack.com/archives/C16HYA2P5) channel on Slack.
Who to ping [based on DevOps stages](https://about.gitlab.com/handbook/product/categories/#devops-stages):
When your content is ready for review, assign the MR to a technical writer
according to the [DevOps stages](https://about.gitlab.com/handbook/product/categories/#devops-stages)
in the table below. If necessary, mention them in a comment explaining what needs
to be reviewed.
| Tech writer | Stage(s) |
| ------------ | ------------------------------------------------------------ |
| `@marcia` | ~Create ~Release |
| `@marcia` | ~Create ~Release + ~"development guidelines" |
| `@axil` | ~Distribution ~Gitaly ~Gitter ~Monitoring ~Packaging ~Secure |
| `@eread` | ~Manage ~Configure ~Geo ~Verify |
| `@mikelewis` | ~Plan |
You are welcome to mention them sooner if you have questions about writing or
updating the documentation. GitLabbers are also welcome to use the
[#docs](https://gitlab.slack.com/archives/C16HYA2P5) channel on Slack.
If you are not sure which category the change falls within, or the change is not
part of one of these categories, you can mention one of the usernames above.
part of one of these categories, mention one of the usernames above.
MARKDOWN
unless gitlab.mr_labels.include?('Documentation')
......
# frozen_string_literal: true
class AddStateToPoolRepository < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
# Given the table is empty, and the non concurrent methods are chosen so
# the transactions don't have to be disabled
# rubocop: disable Migration/AddConcurrentForeignKey, Migration/AddIndex
def change
add_column(:pool_repositories, :state, :string, null: true)
add_column :pool_repositories, :source_project_id, :integer
add_index :pool_repositories, :source_project_id, unique: true
add_foreign_key :pool_repositories, :projects, column: :source_project_id, on_delete: :nullify
end
# rubocop: enable Migration/AddConcurrentForeignKey, Migration/AddIndex
end
# frozen_string_literal: true
class AddTokenEncryptedToCiBuilds < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :ci_builds, :token_encrypted, :string
end
end
# frozen_string_literal: true
class AddIndexToCiBuildsTokenEncrypted < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :ci_builds, :token_encrypted, unique: true, where: 'token_encrypted IS NOT NULL'
end
def down
remove_concurrent_index :ci_builds, :token_encrypted
end
end
......@@ -345,6 +345,7 @@ ActiveRecord::Schema.define(version: 20181203002526) do
t.boolean "protected"
t.integer "failure_reason"
t.datetime_with_timezone "scheduled_at"
t.string "token_encrypted"
t.index ["artifacts_expire_at"], name: "index_ci_builds_on_artifacts_expire_at", where: "(artifacts_file <> ''::text)", using: :btree
t.index ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree
t.index ["commit_id", "stage_idx", "created_at"], name: "index_ci_builds_on_commit_id_and_stage_idx_and_created_at", using: :btree
......@@ -361,6 +362,7 @@ ActiveRecord::Schema.define(version: 20181203002526) do
t.index ["stage_id"], name: "index_ci_builds_on_stage_id", using: :btree
t.index ["status", "type", "runner_id"], name: "index_ci_builds_on_status_and_type_and_runner_id", using: :btree
t.index ["token"], name: "index_ci_builds_on_token", unique: true, using: :btree
t.index ["token_encrypted"], name: "index_ci_builds_on_token_encrypted", unique: true, where: "(token_encrypted IS NOT NULL)", using: :btree
t.index ["updated_at"], name: "index_ci_builds_on_updated_at", using: :btree
t.index ["user_id"], name: "index_ci_builds_on_user_id", using: :btree
end
......@@ -1510,8 +1512,11 @@ ActiveRecord::Schema.define(version: 20181203002526) do
create_table "pool_repositories", id: :bigserial, force: :cascade do |t|
t.integer "shard_id", null: false
t.string "disk_path"
t.string "state"
t.integer "source_project_id"
t.index ["disk_path"], name: "index_pool_repositories_on_disk_path", unique: true, using: :btree
t.index ["shard_id"], name: "index_pool_repositories_on_shard_id", using: :btree
t.index ["source_project_id"], name: "index_pool_repositories_on_source_project_id", unique: true, using: :btree
end
create_table "programming_languages", force: :cascade do |t|
......@@ -2391,6 +2396,7 @@ ActiveRecord::Schema.define(version: 20181203002526) do
add_foreign_key "oauth_openid_requests", "oauth_access_grants", column: "access_grant_id", name: "fk_oauth_openid_requests_oauth_access_grants_access_grant_id"
add_foreign_key "pages_domains", "projects", name: "fk_ea2f6dfc6f", on_delete: :cascade
add_foreign_key "personal_access_tokens", "users"
add_foreign_key "pool_repositories", "projects", column: "source_project_id", on_delete: :nullify
add_foreign_key "pool_repositories", "shards", on_delete: :restrict
add_foreign_key "project_authorizations", "projects", on_delete: :cascade
add_foreign_key "project_authorizations", "users", on_delete: :cascade
......
......@@ -94,6 +94,23 @@ need to be performed on these nodes as well. Database changes will propagate wit
You must make sure the migration event was already processed or otherwise it may migrate
the files back to Hashed state again.
#### Hashed object pools
For deduplication of public forks and their parent repository, objects are pooled
in an object pool. These object pools are a third repository where shared objects
are stored.
```ruby
# object pool paths
"@pools/#{hash[0..1]}/#{hash[2..3]}/#{hash}.git"
```
The object pool feature is behind the `object_pools` feature flag, and can be
enabled for individual projects by executing
`Feature.enable(:object_pools, Project.find(<id>))`. Note that the project has to
be on hashed storage, should not be a fork itself, and hashed storage should be
enabled for all new projects.
##### Attachments
To rollback single Attachment migration, rename `aa/bb/abcdef1234567890...` folder back to `namespace/project`.
......
......@@ -974,10 +974,9 @@ curl --request DELETE --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://git
Merge changes submitted with MR using this API.
If merge request is unable to be accepted (ie: Work in Progress, Closed, Pipeline Pending Completion, or Failed while requiring Success) - you'll get a `405` and the error message 'Method Not Allowed'
If it has some conflicts and can not be merged - you'll get a `405` and the error message 'Branch cannot be merged'
If merge request is already merged or closed - you'll get a `406` and the error message 'Method Not Allowed'
If it has some conflicts and can not be merged - you'll get a `406` and the error message 'Branch cannot be merged'
If the `sha` parameter is passed and does not match the HEAD of the source - you'll get a `409` and the error message 'SHA does not match HEAD of source branch'
......
......@@ -722,16 +722,22 @@ Example response:
### Scope: wiki_blobs
Filters are available for this scope:
- filename
- path
- extension
To use a filter simply include it in your query like: `a query filename:some_name*`.
You may use wildcards (`*`) to use glob matching.
Wiki blobs searches are performed on both filenames and contents. Search
results:
- Found in filenames are displayed before results found in contents.
- May contain multiple matches for the same blob because the search string
might be found in both the filename and content, and matches of the different
types are displayed separately.
- May contain multiple matches for the same blob because the search string
might be found if the search string appears multiple times in the content.
might be found in both the filename and content, or might appear multiple
times in the content.
```bash
curl --request GET --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/projects/6/search?scope=wiki_blobs&search=bye
......@@ -788,22 +794,20 @@ Example response:
### Scope: blobs
Filters are available for this scope:
- filename
- path
- extension