Skip to content
Snippets Groups Projects
Commit d4dcaa0e authored by charlie ablett's avatar charlie ablett :tools:
Browse files

Merge branch '239356-fix-Layout/HashAlignment_app-remaining' into 'master'

Resolve Layout/HashAlignment rubocop offenses 8

See merge request !93522
parents 850fae5f 228496b5
No related branches found
No related tags found
1 merge request!93522Resolve Layout/HashAlignment rubocop offenses 8
Pipeline #602350789 passed with warnings
Pipeline: GitLab

#602355621

    Showing
    with 65 additions and 91 deletions
    ......@@ -164,32 +164,6 @@ Layout/HashAlignment:
    - 'app/models/user_status.rb'
    - 'app/models/wiki.rb'
    - 'app/models/work_items/type.rb'
    - 'app/presenters/analytics/cycle_analytics/stage_presenter.rb'
    - 'app/presenters/project_presenter.rb'
    - 'app/serializers/rollout_status_entity.rb'
    - 'app/services/chat_names/authorize_user_service.rb'
    - 'app/services/ci/archive_trace_service.rb'
    - 'app/services/ci/job_artifacts/destroy_batch_service.rb'
    - 'app/services/ci/list_config_variables_service.rb'
    - 'app/services/ci/parse_dotenv_artifact_service.rb'
    - 'app/services/ci/stuck_builds/drop_helpers.rb'
    - 'app/services/groups/import_export/import_service.rb'
    - 'app/services/issuable/import_csv/base_service.rb'
    - 'app/services/issues/export_csv_service.rb'
    - 'app/services/jira/requests/base.rb'
    - 'app/services/merge_requests/mergeability_check_service.rb'
    - 'app/services/merge_requests/push_options_handler_service.rb'
    - 'app/services/merge_requests/toggle_attention_requested_service.rb'
    - 'app/services/packages/conan/create_package_file_service.rb'
    - 'app/services/packages/create_package_file_service.rb'
    - 'app/services/packages/debian/create_package_file_service.rb'
    - 'app/services/packages/npm/create_package_service.rb'
    - 'app/services/projects/fork_service.rb'
    - 'app/services/projects/lfs_pointers/lfs_download_service.rb'
    - 'app/services/projects/update_remote_mirror_service.rb'
    - 'app/uploaders/file_uploader.rb'
    - 'app/workers/emails_on_push_worker.rb'
    - 'app/workers/x509_issuer_crl_check_worker.rb'
    - 'db/migrate/20210601080039_group_protected_environments_add_index_and_constraint.rb'
    - 'db/migrate/20210804150320_create_base_work_item_types.rb'
    - 'db/migrate/20210831203408_upsert_base_work_item_types.rb'
    ......
    ......@@ -28,7 +28,7 @@ def default_stage_attributes
    description: _('Time before an issue gets scheduled')
    },
    plan: {
    title: s_('CycleAnalyticsStage|Plan'),
    title: s_('CycleAnalyticsStage|Plan'),
    description: _('Time before an issue starts implementation')
    },
    code: {
    ......
    ......@@ -437,9 +437,9 @@ def add_special_file_path(file_name:, commit_message: nil, branch_name: nil, add
    project_new_blob_path(
    project,
    default_branch_or_main,
    file_name: file_name,
    file_name: file_name,
    commit_message: commit_message,
    branch_name: branch_name,
    branch_name: branch_name,
    **additional_params
    )
    end
    ......
    ......@@ -14,5 +14,5 @@ class RolloutStatusEntity < Grape::Entity
    expose :completion, if: -> (rollout_status, _) { rollout_status.found? }
    expose :complete?, as: :is_completed, if: -> (rollout_status, _) { rollout_status.found? }
    expose :canary_ingress, using: RolloutStatuses::IngressEntity, expose_nil: false,
    if: -> (rollout_status, _) { rollout_status.found? && rollout_status.canary_ingress_exists? }
    if: -> (rollout_status, _) { rollout_status.found? && rollout_status.canary_ingress_exists? }
    end
    ......@@ -29,11 +29,11 @@ def chat_name_token
    def chat_name_params
    {
    service_id: @service.id,
    team_id: @params[:team_id],
    service_id: @service.id,
    team_id: @params[:team_id],
    team_domain: @params[:team_domain],
    chat_id: @params[:user_id],
    chat_name: @params[:user_name]
    chat_id: @params[:user_id],
    chat_name: @params[:user_name]
    }
    end
    end
    ......
    ......@@ -62,8 +62,8 @@ def archive_error(error, job, worker_name)
    failed_archive_counter.increment
    Sidekiq.logger.warn(class: worker_name,
    message: "Failed to archive trace. message: #{error.message}.",
    job_id: job.id)
    message: "Failed to archive trace. message: #{error.message}.",
    job_id: job.id)
    Gitlab::ErrorTracking
    .track_and_raise_for_dev_exception(error,
    ......
    ......@@ -54,7 +54,7 @@ def execute(update_stats: true)
    increment_monitoring_statistics(artifacts_count, artifacts_bytes)
    success(destroyed_artifacts_count: artifacts_count,
    statistics_updates: affected_project_statistics)
    statistics_updates: affected_project_statistics)
    end
    # rubocop: enable CodeReuse/ActiveRecord
    ......
    ......@@ -26,8 +26,8 @@ def calculate_reactive_cache(sha)
    return {} unless config
    result = Gitlab::Ci::YamlProcessor.new(config, project: project,
    user: current_user,
    sha: sha).execute
    user: current_user,
    sha: sha).execute
    result.valid? ? result.variables_with_data : {}
    end
    ......
    ......@@ -40,7 +40,7 @@ def parse!(artifact)
    key, value = scan_line!(line)
    variables[key] = Ci::JobVariable.new(job_id: artifact.job_id,
    source: :dotenv, key: key, value: value)
    source: :dotenv, key: key, value: value)
    end
    end
    ......
    ......@@ -56,12 +56,12 @@ def track_exception_for_build(ex, build)
    def log_dropping_message(type, build, reason)
    Gitlab::AppLogger.info(class: self.class.name,
    message: "Dropping #{type} build",
    build_stuck_type: type,
    build_id: build.id,
    runner_id: build.runner_id,
    build_status: build.status,
    build_failure_reason: reason)
    message: "Dropping #{type} build",
    build_stuck_type: type,
    build_id: build.id,
    runner_id: build.runner_id,
    build_status: build.status,
    build_failure_reason: reason)
    end
    end
    end
    ......
    ......@@ -97,17 +97,17 @@ def valid_user_permissions?
    def notify_success
    @logger.info(
    group_id: group.id,
    group_id: group.id,
    group_name: group.name,
    message: 'Group Import/Export: Import succeeded'
    message: 'Group Import/Export: Import succeeded'
    )
    end
    def notify_error
    @logger.error(
    group_id: group.id,
    group_id: group.id,
    group_name: group.name,
    message: "Group Import/Export: Errors occurred, see '#{Gitlab::ErrorTracking::Logger.file_name}' for details"
    message: "Group Import/Export: Errors occurred, see '#{Gitlab::ErrorTracking::Logger.file_name}' for details"
    )
    end
    ......
    ......@@ -22,9 +22,9 @@ def execute
    def process_csv
    with_csv_lines.each do |row, line_no|
    issuable_attributes = {
    title: row[:title],
    title: row[:title],
    description: row[:description],
    due_date: row[:due_date]
    due_date: row[:due_date]
    }
    if create_issuable(issuable_attributes).persisted?
    ......
    ......@@ -25,24 +25,24 @@ def header_to_value_hash
    {
    'Title' => 'title',
    'Description' => 'description',
    'Issue ID' => 'iid',
    'URL' => -> (issue) { issue_url(issue) },
    'State' => -> (issue) { issue.closed? ? 'Closed' : 'Open' },
    'Author' => 'author_name',
    'Author Username' => -> (issue) { issue.author&.username },
    'Assignee' => -> (issue) { issue.assignees.map(&:name).join(', ') },
    'Assignee Username' => -> (issue) { issue.assignees.map(&:username).join(', ') },
    'Confidential' => -> (issue) { issue.confidential? ? 'Yes' : 'No' },
    'Locked' => -> (issue) { issue.discussion_locked? ? 'Yes' : 'No' },
    'Due Date' => -> (issue) { issue.due_date&.to_s(:csv) },
    'Created At (UTC)' => -> (issue) { issue.created_at&.to_s(:csv) },
    'Updated At (UTC)' => -> (issue) { issue.updated_at&.to_s(:csv) },
    'Closed At (UTC)' => -> (issue) { issue.closed_at&.to_s(:csv) },
    'Milestone' => -> (issue) { issue.milestone&.title },
    'Weight' => -> (issue) { issue.weight },
    'Labels' => -> (issue) { issue_labels(issue) },
    'Time Estimate' => ->(issue) { issue.time_estimate.to_s(:csv) },
    'Time Spent' => -> (issue) { issue_time_spent(issue) }
    'Issue ID' => 'iid',
    'URL' => -> (issue) { issue_url(issue) },
    'State' => -> (issue) { issue.closed? ? 'Closed' : 'Open' },
    'Author' => 'author_name',
    'Author Username' => -> (issue) { issue.author&.username },
    'Assignee' => -> (issue) { issue.assignees.map(&:name).join(', ') },
    'Assignee Username' => -> (issue) { issue.assignees.map(&:username).join(', ') },
    'Confidential' => -> (issue) { issue.confidential? ? 'Yes' : 'No' },
    'Locked' => -> (issue) { issue.discussion_locked? ? 'Yes' : 'No' },
    'Due Date' => -> (issue) { issue.due_date&.to_s(:csv) },
    'Created At (UTC)' => -> (issue) { issue.created_at&.to_s(:csv) },
    'Updated At (UTC)' => -> (issue) { issue.updated_at&.to_s(:csv) },
    'Closed At (UTC)' => -> (issue) { issue.closed_at&.to_s(:csv) },
    'Milestone' => -> (issue) { issue.milestone&.title },
    'Weight' => -> (issue) { issue.weight },
    'Labels' => -> (issue) { issue_labels(issue) },
    'Time Estimate' => ->(issue) { issue.time_estimate.to_s(:csv) },
    'Time Spent' => -> (issue) { issue_time_spent(issue) }
    }
    end
    ......
    ......@@ -9,10 +9,10 @@ class Base
    ERRORS = {
    connection: [Errno::ECONNRESET, Errno::ECONNREFUSED],
    jira_ruby: JIRA::HTTPError,
    ssl: OpenSSL::SSL::SSLError,
    timeout: [Timeout::Error, Errno::ETIMEDOUT],
    uri: [URI::InvalidURIError, SocketError]
    jira_ruby: JIRA::HTTPError,
    ssl: OpenSSL::SSL::SSLError,
    timeout: [Timeout::Error, Errno::ETIMEDOUT],
    uri: [URI::InvalidURIError, SocketError]
    }.freeze
    ALL_ERRORS = ERRORS.values.flatten.freeze
    ......
    ......@@ -78,8 +78,8 @@ def in_write_lock(retry_lease:, &block)
    lease_key = "mergeability_check:#{merge_request.id}"
    lease_opts = {
    ttl: 1.minute,
    retries: retry_lease ? 10 : 0,
    ttl: 1.minute,
    retries: retry_lease ? 10 : 0,
    sleep_sec: retry_lease ? 1.second : 0
    }
    ......
    ......@@ -105,7 +105,7 @@ def create!(branch)
    project: project,
    current_user: current_user,
    params: merge_request.attributes.merge(assignees: merge_request.assignees,
    label_ids: merge_request.label_ids)
    label_ids: merge_request.label_ids)
    ).execute
    end
    ......
    ......@@ -62,7 +62,7 @@ def reviewer
    def update_state(reviewer_or_assignee)
    reviewer_or_assignee&.update(state: reviewer_or_assignee&.attention_requested? ? :reviewed : :attention_requested,
    updated_state_by: current_user)
    updated_state_by: current_user)
    end
    end
    end
    ......@@ -13,11 +13,11 @@ def initialize(package, file, params)
    def execute
    package_file = package.package_files.build(
    file: file,
    size: params['file.size'],
    file: file,
    size: params['file.size'],
    file_name: params[:file_name],
    file_sha1: params['file.sha1'],
    file_md5: params['file.md5'],
    file_md5: params['file.md5'],
    conan_file_metadatum_attributes: {
    recipe_revision: params[:recipe_revision],
    package_revision: params[:package_revision],
    ......
    ......@@ -10,12 +10,12 @@ def initialize(package, params)
    def execute
    package_file = package.package_files.build(
    file: params[:file],
    size: params[:size],
    file_name: params[:file_name],
    file_sha1: params[:file_sha1],
    file: params[:file],
    size: params[:size],
    file_name: params[:file_name],
    file_sha1: params[:file_sha1],
    file_sha256: params[:file_sha256],
    file_md5: params[:file_md5]
    file_md5: params[:file_md5]
    )
    if params[:build].present?
    ......
    ......@@ -17,12 +17,12 @@ def execute
    # Debian package file are first uploaded to incoming with empty metadata,
    # and are moved later by Packages::Debian::ProcessChangesService
    package.package_files.create!(
    file: params[:file],
    size: params[:file]&.size,
    file_name: params[:file_name],
    file_sha1: params[:file_sha1],
    file: params[:file],
    size: params[:file]&.size,
    file_name: params[:file_name],
    file_sha1: params[:file_sha1],
    file_sha256: params[:file]&.sha256,
    file_md5: params[:file_md5],
    file_md5: params[:file_md5],
    debian_file_metadatum_attributes: {
    file_type: 'unknown',
    architecture: nil,
    ......
    0% Loading or .
    You are about to add 0 people to the discussion. Proceed with caution.
    Finish editing this message first!
    Please register or to comment