Unable to upgrade from 13.8.7 to 13.9.x or 13.10.x
Summary
Cannot update GitLab instance.
GitLab instance is based on the Docker image omnibus: gitlab/gitlab-ce:13.8.7-ce.0
Environment
OS
lsb_release -a
No LSB modules are available. Distributor ID: Debian Description: Debian GNU/Linux 9.13 (stretch) Release: 9.13 Codename: stretch
Docker
docker -v
Docker version 19.03.15, build 99e3ed8919
docker info
Client: Debug Mode: falseServer: Containers: 20 Running: 16 Paused: 0 Stopped: 4 Images: 24 Server Version: 19.03.15 Storage Driver: overlay2 Backing Filesystem: extfs Supports d_type: true Native Overlay Diff: true Logging Driver: json-file Cgroup Driver: cgroupfs Plugins: Volume: local Network: bridge host ipvlan macvlan null overlay Log: awslogs fluentd gcplogs gelf journald json-file local logentries splunk syslog Swarm: inactive Runtimes: runc Default Runtime: runc Init Binary: docker-init containerd version: 269548fa27e0089a8b8278fc4fc781d7f65a939b runc version: ff819c7e9184c13b7c2607fe6c30ae19403a7aff init version: fec3683 Security Options: seccomp Profile: default Kernel Version: 4.9.0-8-amd64 Operating System: Debian GNU/Linux 9 (stretch) OSType: linux Architecture: x86_64 CPUs: 6 Total Memory: 15.68GiB Name: myserver.domain.ext ID: F6FS:DL6I:4D4W:JVXJ:JTPE:ZOOX:CDYZ:ECXP:ALKQ:U2N5:CPBE:TC7Y Docker Root Dir: /var/lib/docker Debug Mode: false Registry: https://index.docker.io/v1/ Labels: Experimental: false Insecure Registries: 127.0.0.0/8 Live Restore Enabled: false
WARNING: No swap limit support
Logs
Logs during the upgrade
-> 0.0001s -- quote_table_name("check_c34e505c24") -> 0.0000s -- quote_table_name(:dast_profiles) -> 0.0000s -- execute("ALTER TABLE \"dast_profiles\"\nADD CONSTRAINT \"check_5fcf73bf61\" CHECK (char_length(\"name\") <= 255),\nADD CONSTRAINT \"check_c34e505c24\" CHECK (char_length(\"description\") <= 255)\n") -> 0.0012s == 20210111051045 CreateDastProfiles: migrated (0.0867s) ====================== == 20210111053308 AddProjectFkForDastProfile: migrating ======================= -- transaction_open?() -> 0.0000s -- foreign_keys(:dast_profiles) -> 0.0068s -- execute("ALTER TABLE dast_profiles\nADD CONSTRAINT fk_aa76ef30e9\nFOREIGN KEY (project_id)\nREFERENCES projects (id)\nON DELETE CASCADE\nNOT VALID;\n") -> 0.0029s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE dast_profiles VALIDATE CONSTRAINT fk_aa76ef30e9;") -> 0.0026s -- execute("RESET ALL") -> 0.0004s == 20210111053308 AddProjectFkForDastProfile: migrated (0.0173s) ============== == 20210111075104 AddTemporaryIndexOnSecurityFindingsScanId: migrating ======== -- transaction_open?() -> 0.0000s -- index_exists?(:security_findings, :scan_id, {:where=>"uuid is null", :name=>"tmp_index_on_security_findings_scan_id", :algorithm=>:concurrently}) -> 0.0052s -- execute("SET statement_timeout TO 0") -> 0.0002s -- add_index(:security_findings, :scan_id, {:where=>"uuid is null", :name=>"tmp_index_on_security_findings_scan_id", :algorithm=>:concurrently}) -> 0.0052s -- execute("RESET ALL") -> 0.0003s == 20210111075104 AddTemporaryIndexOnSecurityFindingsScanId: migrated (0.0116s) == 20210111075105 ScheduleUuidPopulationForSecurityFindings: migrating ======== == 20210111075105 ScheduleUuidPopulationForSecurityFindings: migrated (0.0000s) == 20210111075206 ScheduleUuidPopulationForSecurityFindings2: migrating ======= == 20210111075206 ScheduleUuidPopulationForSecurityFindings2: migrated (0.0028s) == 20210112202949 CreateComposerCacheFile: migrating ========================== -- create_table(:packages_composer_cache_files, {}) -- quote_column_name(:file) -> 0.0000s -> 0.0159s -- quote_table_name("check_84f5ba81f5") -> 0.0001s -- quote_table_name(:packages_composer_cache_files) -> 0.0000s -- execute("ALTER TABLE \"packages_composer_cache_files\"\nADD CONSTRAINT \"check_84f5ba81f5\" CHECK (char_length(\"file\") <= 255)\n") -> 0.0008s == 20210112202949 CreateComposerCacheFile: migrated (0.0192s) ================= == 20210113224909 AddPipelineConfigurationFullPathToCompliancePipeline: migrating -- add_column(:compliance_management_frameworks, :pipeline_configuration_full_path, :text) -> 0.0011s == 20210113224909 AddPipelineConfigurationFullPathToCompliancePipeline: migrated (0.0012s) == 20210113231532 AddConvertedAtToExperimentSubjects: migrating =============== -- add_column(:experiment_subjects, :converted_at, :datetime_with_timezone) -> 0.0014s == 20210113231532 AddConvertedAtToExperimentSubjects: migrated (0.0015s) ====== == 20210113231546 AddContextToExperimentSubjects: migrating =================== -- add_column(:experiment_subjects, :context, :jsonb, {:default=>{}, :null=>false}) -> 0.0048s == 20210113231546 AddContextToExperimentSubjects: migrated (0.0049s) ========== == 20210114142443 AddIndexesToOnboardingProgresses: migrating ================= -- transaction_open?() -> 0.0000s -- index_exists?(:onboarding_progresses, :created_at, {:where=>"git_write_at IS NULL", :name=>"index_onboarding_progresses_for_create_track", :algorithm=>:concurrently}) -> 0.0025s -- execute("SET statement_timeout TO 0") -> 0.0002s -- add_index(:onboarding_progresses, :created_at, {:where=>"git_write_at IS NULL", :name=>"index_onboarding_progresses_for_create_track", :algorithm=>:concurrently})rake aborted! StandardError: An error has occurred, all later migrations canceled: PG::InvalidObjectDefinition: ERROR: functions in index predicate must be marked IMMUTABLE /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:151:in `block in add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:337:in `disable_statement_timeout' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:150:in `add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/db/post_migrate/20210115220610_schedule_artifact_expiry_backfill.rb:28:in `up' /opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:61:in `block (3 levels) in <top (required)>' /opt/gitlab/embedded/bin/bundle:23:in `load' /opt/gitlab/embedded/bin/bundle:23:in `<main>' Caused by: ActiveRecord::StatementInvalid: PG::InvalidObjectDefinition: ERROR: functions in index predicate must be marked IMMUTABLE /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:151:in `block in add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:337:in `disable_statement_timeout' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:150:in `add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/db/post_migrate/20210115220610_schedule_artifact_expiry_backfill.rb:28:in `up' /opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:61:in `block (3 levels) in <top (required)>' /opt/gitlab/embedded/bin/bundle:23:in `load' /opt/gitlab/embedded/bin/bundle:23:in `<main>' Caused by: PG::InvalidObjectDefinition: ERROR: functions in index predicate must be marked IMMUTABLE /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:151:in `block in add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:337:in `disable_statement_timeout' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:150:in `add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/db/post_migrate/20210115220610_schedule_artifact_expiry_backfill.rb:28:in `up' /opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:61:in `block (3 levels) in <top (required)>' /opt/gitlab/embedded/bin/bundle:23:in `load' /opt/gitlab/embedded/bin/bundle:23:in `<main>' Tasks: TOP => db:migrate (See full trace by running task with --trace) -> 0.0053s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- index_exists?(:onboarding_progresses, :git_write_at, {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NULL", :name=>"index_onboarding_progresses_for_verify_track", :algorithm=>:concurrently}) -> 0.0031s -- execute("SET statement_timeout TO 0") -> 0.0002s -- add_index(:onboarding_progresses, :git_write_at, {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NULL", :name=>"index_onboarding_progresses_for_verify_track", :algorithm=>:concurrently}) -> 0.0043s -- execute("RESET ALL") -> 0.0002s -- transaction_open?() -> 0.0000s -- index_exists?(:onboarding_progresses, "GREATEST(git_write_at, pipeline_created_at)", {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NOT NULL AND trial_started_at IS NULL", :name=>"index_onboarding_progresses_for_trial_track", :algorithm=>:concurrently}) -> 0.0033s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:onboarding_progresses, "GREATEST(git_write_at, pipeline_created_at)", {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NOT NULL AND trial_started_at IS NULL", :name=>"index_onboarding_progresses_for_trial_track", :algorithm=>:concurrently}) -> 0.0046s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- index_exists?(:onboarding_progresses, "GREATEST(git_write_at, pipeline_created_at, trial_started_at)", {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NOT NULL AND trial_started_at IS NOT NULL AND user_added_at IS NULL", :name=>"index_onboarding_progresses_for_team_track", :algorithm=>:concurrently}) -> 0.0032s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:onboarding_progresses, "GREATEST(git_write_at, pipeline_created_at, trial_started_at)", {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NOT NULL AND trial_started_at IS NOT NULL AND user_added_at IS NULL", :name=>"index_onboarding_progresses_for_team_track", :algorithm=>:concurrently}) -> 0.0046s -- execute("RESET ALL") -> 0.0002s == 20210114142443 AddIndexesToOnboardingProgresses: migrated (0.0358s) ======== == 20210115090452 CreateGroupRepositoryStorageMove: migrating ================= -- table_exists?(:group_repository_storage_moves) -> 0.0009s -- create_table(:group_repository_storage_moves) -> 0.0133s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0003s -- execute("ALTER TABLE group_repository_storage_moves\nADD CONSTRAINT group_repository_storage_moves_source_storage_name\nCHECK ( char_length(source_storage_name) <= 255 )\nNOT VALID;\n") -> 0.0008s -- current_schema() -> 0.0002s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE group_repository_storage_moves VALIDATE CONSTRAINT group_repository_storage_moves_source_storage_name;") -> 0.0009s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0002s -- execute("ALTER TABLE group_repository_storage_moves\nADD CONSTRAINT group_repository_storage_moves_destination_storage_name\nCHECK ( char_length(destination_storage_name) <= 255 )\nNOT VALID;\n") -> 0.0006s -- current_schema() -> 0.0002s -- execute("SET statement_timeout TO 0") -> 0.0002s -- execute("ALTER TABLE group_repository_storage_moves VALIDATE CONSTRAINT group_repository_storage_moves_destination_storage_name;") -> 0.0008s -- execute("RESET ALL") -> 0.0002s == 20210115090452 CreateGroupRepositoryStorageMove: migrated (0.0306s) ======== == 20210115220610 ScheduleArtifactExpiryBackfill: migrating =================== -- transaction_open?() -> 0.0000s -- indexes(:ci_job_artifacts) -> 0.0061s -- execute("SET statement_timeout TO 0") -> 0.0002s -- remove_index(:ci_job_artifacts, {:algorithm=>:concurrently, :name=>"expired_artifacts_temp_index"}) -> 0.0021s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- index_exists?(:ci_job_artifacts, [:id, :created_at], {:where=>"expire_at IS NULL AND date(created_at AT TIME ZONE 'UTC') < '2020-06-22'::date", :name=>"expired_artifacts_temp_index", :algorithm=>:concurrently}) -> 0.0059s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:ci_job_artifacts, [:id, :created_at], {:where=>"expire_at IS NULL AND date(created_at AT TIME ZONE 'UTC') < '2020-06-22'::date", :name=>"expired_artifacts_temp_index", :algorithm=>:concurrently}) -- execute("RESET ALL") -> 0.0002s ================================================================================ Error executing action `run` on resource 'bash[migrate gitlab-rails database]' ================================================================================ Mixlib::ShellOut::ShellCommandFailed ------------------------------------ Expected process to exit with [0], but received '1' ---- Begin output of "bash" "/tmp/chef-script20210406-30-tjljeq" ---- STDOUT: == 20200816133024 AddCveIdRequestProjectSetting: migrating ==================== -- add_column(:project_settings, :cve_id_request_enabled, :boolean, {:default=>true, :null=>false}) -> 0.0734s == 20200816133024 AddCveIdRequestProjectSetting: migrated (0.0735s) =========== == 20201027101010 CreateUserFollowUsers: migrating ============================ -- execute("CREATE TABLE user_follow_users (\n follower_id integer not null references users (id) on delete cascade,\n followee_id integer not null references users (id) on delete cascade,\n PRIMARY KEY (follower_id, followee_id)\n);\nCREATE INDEX ON user_follow_users (followee_id);\n") -> 0.0358s == 20201027101010 CreateUserFollowUsers: migrated (0.0423s) =================== == 20201108134919 AddFindingFingerprintTable: migrating ======================= -- create_table(:vulnerability_finding_fingerprints) -> 0.0283s == 20201108134919 AddFindingFingerprintTable: migrated (0.0312s) ============== == 20201112130710 ScheduleRemoveDuplicateVulnerabilitiesFindings: migrating === -- transaction_open?() -> 0.0000s -- index_exists?(:vulnerability_occurrences, [:project_id, :report_type, :location_fingerprint, :primary_identifier_id, :id], {:name=>"tmp_idx_deduplicate_vulnerability_occurrences", :algorithm=>:concurrently}) -> 0.0070s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:vulnerability_occurrences, [:project_id, :report_type, :location_fingerprint, :primary_identifier_id, :id], {:name=>"tmp_idx_deduplicate_vulnerability_occurrences", :algorithm=>:concurrently}) -> 0.0064s -- execute("RESET ALL") -> 0.0004s -- Scheduling RemoveDuplicateVulnerabilitiesFindings jobs -- Scheduled 0 RemoveDuplicateVulnerabilitiesFindings jobs with a maximum of 5000 records per batch and an interval of 120 seconds. The migration is expected to take at least 0 seconds. Expect all jobs to have completed after 2021-04-06 23:13:03 UTC." == 20201112130710 ScheduleRemoveDuplicateVulnerabilitiesFindings: migrated (0.0555s) == 20201112130715 ScheduleRecalculateUuidOnVulnerabilitiesOccurrences: migrating -- Scheduling RecalculateVulnerabilitiesOccurrencesUuid jobs -- Scheduled 0 RecalculateVulnerabilitiesOccurrencesUuid jobs with a maximum of 2500 records per batch and an interval of 120 seconds. The migration is expected to take at least 0 seconds. Expect all jobs to have completed after 2021-04-06 23:13:03 UTC." == 20201112130715 ScheduleRecalculateUuidOnVulnerabilitiesOccurrences: migrated (0.0040s) == 20201120092000 AddKrokiFormatsToApplicationSettingsTable: migrating ======== -- change_table(:application_settings) -> 0.0051s == 20201120092000 AddKrokiFormatsToApplicationSettingsTable: migrated (0.0052s) == 20201120144823 CreateTokensWithIv: migrating =============================== -- create_table(:token_with_ivs) -> 0.0229s == 20201120144823 CreateTokensWithIv: migrated (0.0230s) ====================== == 20201127104228 AddIndexToSecurityScansOnCreatedAtAndId: migrating ========== -- transaction_open?() -> 0.0000s -- index_exists?(:security_scans, "date(timezone('UTC', created_at)), id", {:name=>"index_security_scans_on_date_created_at_and_id", :algorithm=>:concurrently}) -> 0.0033s -- execute("SET statement_timeout TO 0") -> 0.0002s -- add_index(:security_scans, "date(timezone('UTC', created_at)), id", {:name=>"index_security_scans_on_date_created_at_and_id", :algorithm=>:concurrently}) -> 0.0071s -- execute("RESET ALL") -> 0.0004s == 20201127104228 AddIndexToSecurityScansOnCreatedAtAndId: migrated (0.0117s) = == 20201128210000 AddServiceDeskReplyToIsNotNullIndexOnIssuesFix: migrating === -- transaction_open?() -> 0.0000s -- index_exists?(:issues, [:id], {:name=>"idx_on_issues_where_service_desk_reply_to_is_not_null", :where=>"service_desk_reply_to IS NOT NULL", :algorithm=>:concurrently}) -> 0.0154s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:issues, [:id], {:name=>"idx_on_issues_where_service_desk_reply_to_is_not_null", :where=>"service_desk_reply_to IS NOT NULL", :algorithm=>:concurrently}) -> 0.0092s -- execute("RESET ALL") -> 0.0004s == 20201128210000 AddServiceDeskReplyToIsNotNullIndexOnIssuesFix: migrated (0.0261s) == 20201128210234 SchedulePopulateIssueEmailParticipants: migrating =========== -- Scheduled 0 PopulateIssueEmailParticipants jobs with a maximum of 1000 records per batch and an interval of 120 seconds. The migration is expected to take at least 0 seconds. Expect all jobs to have completed after 2021-04-06 23:13:03 UTC." == 20201128210234 SchedulePopulateIssueEmailParticipants: migrated (0.0477s) == == 20201204111200 CreatePackagesDebianProjectComponents: migrating ============ -- table_exists?(:packages_debian_project_components) -> 0.0010s -- create_table(:packages_debian_project_components, {}) -> 0.0702s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0003s -- execute("ALTER TABLE packages_debian_project_components\nADD CONSTRAINT check_517559f298\nCHECK ( char_length(name) <= 255 )\nNOT VALID;\n") -> 0.0011s -- current_schema() -> 0.0002s -- execute("SET statement_timeout TO 0") -> 0.0002s -- execute("ALTER TABLE packages_debian_project_components VALIDATE CONSTRAINT check_517559f298;") -> 0.0007s -- execute("RESET ALL") -> 0.0003s == 20201204111200 CreatePackagesDebianProjectComponents: migrated (0.0808s) === == 20201204111300 CreatePackagesDebianGroupComponents: migrating ============== -- table_exists?(:packages_debian_group_components) -> 0.0010s -- create_table(:packages_debian_group_components, {}) -> 0.0142s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0003s -- execute("ALTER TABLE packages_debian_group_components\nADD CONSTRAINT check_a9bc7d85be\nCHECK ( char_length(name) <= 255 )\nNOT VALID;\n") -> 0.0007s -- current_schema() -> 0.0003s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE packages_debian_group_components VALIDATE CONSTRAINT check_a9bc7d85be;") -> 0.0008s -- execute("RESET ALL") -> 0.0003s == 20201204111300 CreatePackagesDebianGroupComponents: migrated (0.0232s) ===== == 20201204111400 CreatePackagesDebianProjectComponentFiles: migrating ======== -- table_exists?(:packages_debian_project_component_files) -> 0.0008s -- create_table(:packages_debian_project_component_files, {}) -> 0.0205s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0005s -- execute("ALTER TABLE packages_debian_project_component_files\nADD CONSTRAINT check_e5af03fa2d\nCHECK ( char_length(file) <= 255 )\nNOT VALID;\n") -> 0.0008s -- current_schema() -> 0.0003s -- execute("SET statement_timeout TO 0") -> 0.0004s -- execute("ALTER TABLE packages_debian_project_component_files VALIDATE CONSTRAINT check_e5af03fa2d;") -> 0.0008s -- execute("RESET ALL") -> 0.0002s == 20201204111400 CreatePackagesDebianProjectComponentFiles: migrated (0.0339s) == 20201204111500 CreatePackagesDebianGroupComponentFiles: migrating ========== -- table_exists?(:packages_debian_group_component_files) -> 0.0008s -- create_table(:packages_debian_group_component_files, {}) -> 0.0192s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0003s -- execute("ALTER TABLE packages_debian_group_component_files\nADD CONSTRAINT check_839e1685bc\nCHECK ( char_length(file) <= 255 )\nNOT VALID;\n") -> 0.0009s -- current_schema() -> 0.0004s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE packages_debian_group_component_files VALIDATE CONSTRAINT check_839e1685bc;") -> 0.0010s -- execute("RESET ALL") -> 0.0003s == 20201204111500 CreatePackagesDebianGroupComponentFiles: migrated (0.0397s) = == 20201204111600 CreatePackagesDebianPublications: migrating ================= -- create_table(:packages_debian_publications, {}) -> 0.0187s == 20201204111600 CreatePackagesDebianPublications: migrated (0.0188s) ======== == 20201209163113 RecreateIndexIssueEmailParticipantsOnIssueIdAndEmail: migrating -- transaction_open?() -> 0.0000s -- index_exists?(:issue_email_participants, "issue_id, lower(email)", {:unique=>true, :name=>"index_issue_email_participants_on_issue_id_and_lower_email", :algorithm=>:concurrently})NOTICE: trigger "trigger_9a96291bb38d" for relation "application_settings" does not exist, skipping NOTICE: trigger "trigger_22a39c5c25f3" for relation "application_settings" does not exist, skipping NOTICE: trigger "trigger_1572cbc9a15f" for relation "application_settings" does not exist, skipping -> 0.0026s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:issue_email_participants, "issue_id, lower(email)", {:unique=>true, :name=>"index_issue_email_participants_on_issue_id_and_lower_email", :algorithm=>:concurrently}) -> 0.0102s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- indexes(:issue_email_participants) -> 0.0024s -- execute("SET statement_timeout TO 0") -> 0.0002s -- remove_index(:issue_email_participants, {:algorithm=>:concurrently, :name=>"index_issue_email_participants_on_issue_id_and_email"}) -> 0.0038s -- execute("RESET ALL") -> 0.0004s == 20201209163113 RecreateIndexIssueEmailParticipantsOnIssueIdAndEmail: migrated (0.0219s) == 20201221225303 AddServiceDeskReplyToIsNotNullIndexOnIssues: migrating ====== == 20201221225303 AddServiceDeskReplyToIsNotNullIndexOnIssues: migrated (0.0000s) == 20201228110136 CreateIterationsCadence: migrating ========================== -- create_table(:iterations_cadences, {}) -- quote_column_name(:title) -> 0.0000s -> 0.0159s -- quote_table_name("check_fedff82d3b") -> 0.0000s -- quote_table_name(:iterations_cadences) -> 0.0000s -- execute("ALTER TABLE \"iterations_cadences\"\nADD CONSTRAINT \"check_fedff82d3b\" CHECK (char_length(\"title\") <= 255)\n") -> 0.0007s == 20201228110136 CreateIterationsCadence: migrated (0.0192s) ================= == 20201228110238 AddIterationsCadenceToSprints: migrating ==================== -- column_exists?(:sprints, :iterations_cadence_id) -> 0.0022s -- add_column(:sprints, :iterations_cadence_id, :integer) -> 0.0016s -- transaction_open?() -> 0.0000s -- index_exists?(:sprints, :iterations_cadence_id, {:name=>"index_sprints_iterations_cadence_id", :algorithm=>:concurrently}) -> 0.0059s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:sprints, :iterations_cadence_id, {:name=>"index_sprints_iterations_cadence_id", :algorithm=>:concurrently}) -> 0.0055s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- foreign_keys(:sprints) -> 0.0080s -- execute("ALTER TABLE sprints\nADD CONSTRAINT fk_365d1db505\nFOREIGN KEY (iterations_cadence_id)\nREFERENCES iterations_cadences (id)\nON DELETE CASCADE\nNOT VALID;\n") -> 0.0016s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE sprints VALIDATE CONSTRAINT fk_365d1db505;") -> 0.0041s -- execute("RESET ALL") -> 0.0003s == 20201228110238 AddIterationsCadenceToSprints: migrated (0.0342s) =========== == 20201231133921 ScheduleSetDefaultIterationCadences: migrating ============== == 20201231133921 ScheduleSetDefaultIterationCadences: migrated (0.0000s) ===== == 20210105030125 CleanupProjectsWithBadHasExternalWikiData: migrating ======== -- transaction_open?() -> 0.0000s -- index_exists?(:projects, :id, {:where=>"(\n \"projects\".\"has_external_wiki\" = TRUE\n)\nAND \"projects\".\"pending_delete\" = FALSE\nAND \"projects\".\"archived\" = FALSE\n", :name=>"tmp_index_projects_on_id_where_has_external_wiki_is_true", :algorithm=>:concurrently}) -> 0.0454s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:projects, :id, {:where=>"(\n \"projects\".\"has_external_wiki\" = TRUE\n)\nAND \"projects\".\"pending_delete\" = FALSE\nAND \"projects\".\"archived\" = FALSE\n", :name=>"tmp_index_projects_on_id_where_has_external_wiki_is_true", :algorithm=>:concurrently}) -> 0.0340s -- execute("RESET ALL") -> 0.0004s -- transaction_open?() -> 0.0000s -- indexes(:projects) -> 0.0245s -- execute("SET statement_timeout TO 0") -> 0.0003s -- remove_index(:projects, {:algorithm=>:concurrently, :name=>"tmp_index_projects_on_id_where_has_external_wiki_is_true"}) -> 0.0023s -- execute("RESET ALL") -> 0.0003s == 20210105030125 CleanupProjectsWithBadHasExternalWikiData: migrated (0.1869s) == 20210105052034 RenameAssetProxyWhitelistOnApplicationSettings: migrating === -- transaction_open?() -> 0.0000s -- columns(:application_settings) -> 0.1096s -- column_exists?(:application_settings, :id) -> 0.0891s -- column_exists?(:application_settings, :asset_proxy_allowlist) -> 0.0836s -- columns(:application_settings) -> 0.0832s -- add_column(:application_settings, :asset_proxy_allowlist, :text, {:limit=>nil, :precision=>nil, :scale=>nil}) -> 0.0025s -- transaction_open?() -> 0.0000s -- exec_query("SELECT COUNT(*) AS count FROM \"application_settings\"") -> 0.0016s -- exec_query("SELECT \"application_settings\".\"id\" FROM \"application_settings\" ORDER BY \"application_settings\".\"id\" ASC LIMIT 1") -> 0.0005s -- exec_query("SELECT \"application_settings\".\"id\" FROM \"application_settings\" WHERE \"application_settings\".\"id\" >= 5 ORDER BY \"application_settings\".\"id\" ASC LIMIT 1 OFFSET 1") -> 0.0007s -- execute("UPDATE \"application_settings\" SET \"asset_proxy_allowlist\" = \"application_settings\".\"asset_proxy_whitelist\" WHERE \"application_settings\".\"id\" >= 5") -> 0.0026s -- indexes(:application_settings) -> 0.0063s -- foreign_keys(:application_settings) -> 0.0079s -- transaction_open?() -> 0.0000s -- column_exists?(:application_settings, :asset_proxy_whitelist) -> 0.0746s -- column_exists?(:application_settings, :asset_proxy_allowlist) -> 0.0811s -- current_schema() -> 0.0006s -- quote_table_name(:application_settings) -> 0.0000s -- quote_column_name(:asset_proxy_whitelist) -> 0.0000s -- quote_column_name(:asset_proxy_allowlist) -> 0.0000s -- execute("CREATE OR REPLACE FUNCTION function_for_trigger_9a96291bb38d()\nRETURNS trigger\nLANGUAGE plpgsql\nAS $$\nBEGIN\n IF NEW.\"asset_proxy_whitelist\" IS NULL AND NEW.\"asset_proxy_allowlist\" IS NOT NULL THEN\n NEW.\"asset_proxy_whitelist\" = NEW.\"asset_proxy_allowlist\";\n END IF;\n\n IF NEW.\"asset_proxy_allowlist\" IS NULL AND NEW.\"asset_proxy_whitelist\" IS NOT NULL THEN\n NEW.\"asset_proxy_allowlist\" = NEW.\"asset_proxy_whitelist\";\n END IF;\n\n RETURN NEW;\nEND\n$$;\n\nDROP TRIGGER IF EXISTS trigger_9a96291bb38d\nON \"application_settings\";\n\nCREATE TRIGGER trigger_9a96291bb38d\nBEFORE INSERT ON \"application_settings\"\nFOR EACH ROW EXECUTE FUNCTION function_for_trigger_9a96291bb38d();\n") -> 0.0087s -- execute("CREATE OR REPLACE FUNCTION function_for_trigger_22a39c5c25f3()\nRETURNS trigger\nLANGUAGE plpgsql\nAS $$\nBEGIN\n NEW.\"asset_proxy_whitelist\" := NEW.\"asset_proxy_allowlist\";\n RETURN NEW;\nEND\n$$;\n\nDROP TRIGGER IF EXISTS trigger_22a39c5c25f3\nON \"application_settings\";\n\nCREATE TRIGGER trigger_22a39c5c25f3\nBEFORE UPDATE OF \"asset_proxy_allowlist\" ON \"application_settings\"\nFOR EACH ROW EXECUTE FUNCTION function_for_trigger_22a39c5c25f3();\n") -> 0.0013s -- execute("CREATE OR REPLACE FUNCTION function_for_trigger_1572cbc9a15f()\nRETURNS trigger\nLANGUAGE plpgsql\nAS $$\nBEGIN\n NEW.\"asset_proxy_allowlist\" := NEW.\"asset_proxy_whitelist\";\n RETURN NEW;\nEND\n$$;\n\nDROP TRIGGER IF EXISTS trigger_1572cbc9a15f\nON \"application_settings\";\n\nCREATE TRIGGER trigger_1572cbc9a15f\nBEFORE UPDATE OF \"asset_proxy_whitelist\" ON \"application_settings\"\nFOR EACH ROW EXECUTE FUNCTION function_for_trigger_1572cbc9a15f();\n") -> 0.0009s == 20210105052034 RenameAssetProxyWhitelistOnApplicationSettings: migrated (0.6032s) == 20210105052229 CleanUpAssetProxyWhitelistRenameOnApplicationSettings: migrating == 20210105052229 CleanUpAssetProxyWhitelistRenameOnApplicationSettings: migrated (0.0000s) == 20210105153342 AddEntityColumnsToVulnerabilityOccurrences: migrating ======= -- add_column(:vulnerability_occurrences, :description, :text) -> 0.0016s -- add_column(:vulnerability_occurrences, :message, :text) -> 0.0017s -- add_column(:vulnerability_occurrences, :solution, :text) -> 0.0006s -- add_column(:vulnerability_occurrences, :cve, :text) -> 0.0006s -- add_column(:vulnerability_occurrences, :location, :jsonb) -> 0.0006s == 20210105153342 AddEntityColumnsToVulnerabilityOccurrences: migrated (0.0055s) == 20210105154321 AddTextLimitToVulnerabilityOccurrencesEntityColumns: migrating -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0004s -- execute("ALTER TABLE vulnerability_occurrences\nADD CONSTRAINT check_ade261da6b\nCHECK ( char_length(description) <= 15000 )\nNOT VALID;\n") -> 0.0010s -- current_schema() -> 0.0003s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE vulnerability_occurrences VALIDATE CONSTRAINT check_ade261da6b;") -> 0.0010s -- execute("RESET ALL") -> 0.0002s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0002s -- execute("ALTER TABLE vulnerability_occurrences\nADD CONSTRAINT check_df6dd20219\nCHECK ( char_length(message) <= 3000 )\nNOT VALID;\n") -> 0.0006s -- current_schema() -> 0.0003s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE vulnerability_occurrences VALIDATE CONSTRAINT check_df6dd20219;") -> 0.0008s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0002s -- execute("ALTER TABLE vulnerability_occurrences\nADD CONSTRAINT check_4a3a60f2ba\nCHECK ( char_length(solution) <= 7000 )\nNOT VALID;\n") -> 0.0006s -- current_schema() -> 0.0002s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE vulnerability_occurrences VALIDATE CONSTRAINT check_4a3a60f2ba;") -> 0.0008s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0002s -- execute("ALTER TABLE vulnerability_occurrences\nADD CONSTRAINT check_f602da68dd\nCHECK ( char_length(cve) <= 48400 )\nNOT VALID;\n") -> 0.0007s -- current_schema() -> 0.0003s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE vulnerability_occurrences VALIDATE CONSTRAINT check_f602da68dd;") -> 0.0009s -- execute("RESET ALL") -> 0.0002s == 20210105154321 AddTextLimitToVulnerabilityOccurrencesEntityColumns: migrated (0.0303s) == 20210106155209 AddMergeRequestDiffCommitTrailers: migrating ================ -- add_column(:merge_request_diff_commits, :trailers, :jsonb, {:default=>{}, :null=>false}) -> 0.0020s == 20210106155209 AddMergeRequestDiffCommitTrailers: migrated (0.0041s) ======= == 20210106191305 RenameIndexesOnGitLabCom: migrating ========================= -- indexes(:ldap_group_links) -> 0.0018s -- current_schema() -> 0.0002s -- indexes(:emails) -> 0.0035s -- current_schema() -> 0.0003s -- indexes(:users) -> 0.0129s -- current_schema() -> 0.0003s -- indexes(:users) -> 0.0115s -- current_schema() -> 0.0002s -- indexes(:users) -> 0.0107s -- current_schema() -> 0.0003s -- indexes(:schema_migrations) -> 0.0026s -- current_schema() -> 0.0002s == 20210106191305 RenameIndexesOnGitLabCom: migrated (0.0573s) ================ == 20210106225424 AddKeepLatestArtifactsToApplicationSettings: migrating ====== -- add_column(:application_settings, :keep_latest_artifact, :boolean, {:default=>true, :null=>false}) -> 0.0027s == 20210106225424 AddKeepLatestArtifactsToApplicationSettings: migrated (0.0028s) == 20210107105306 AddDiffTypeToMergeRequestDiffs: migrating =================== -- column_exists?(:merge_request_diffs, :diff_type) -> 0.0039s -- add_column(:merge_request_diffs, :diff_type, :integer, {:null=>false, :limit=>2, :default=>1}) -> 0.0020s -- transaction_open?() -> 0.0000s -- index_exists?(:merge_request_diffs, :merge_request_id, {:unique=>true, :where=>"diff_type = 2", :name=>"index_merge_request_diffs_on_unique_merge_request_id", :algorithm=>:concurrently}) -> 0.0049s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:merge_request_diffs, :merge_request_id, {:unique=>true, :where=>"diff_type = 2", :name=>"index_merge_request_diffs_on_unique_merge_request_id", :algorithm=>:concurrently}) -> 0.0261s -- execute("RESET ALL") -> 0.0003s == 20210107105306 AddDiffTypeToMergeRequestDiffs: migrated (0.0417s) ========== == 20210107154615 AddMergeRequestContextCommitTrailers: migrating ============= -- add_column(:merge_request_context_commits, :trailers, :jsonb, {:default=>{}, :null=>false}) -> 0.0022s == 20210107154615 AddMergeRequestContextCommitTrailers: migrated (0.0023s) ==== == 20210111051045 CreateDastProfiles: migrating =============================== -- create_table(:dast_profiles, {:comment=>"{\"owner\":\"group::dynamic analysis\",\"description\":\"Profile used to run a DAST on-demand scan\"}"}) -- quote_column_name(:name) -> 0.0000s -- quote_column_name(:description) -> 0.0000s -> 0.0828s -- quote_table_name("check_5fcf73bf61") -> 0.0001s -- quote_table_name("check_c34e505c24") -> 0.0000s -- quote_table_name(:dast_profiles) -> 0.0000s -- execute("ALTER TABLE \"dast_profiles\"\nADD CONSTRAINT \"check_5fcf73bf61\" CHECK (char_length(\"name\") <= 255),\nADD CONSTRAINT \"check_c34e505c24\" CHECK (char_length(\"description\") <= 255)\n") -> 0.0012s == 20210111051045 CreateDastProfiles: migrated (0.0867s) ====================== == 20210111053308 AddProjectFkForDastProfile: migrating ======================= -- transaction_open?() -> 0.0000s -- foreign_keys(:dast_profiles) -> 0.0068s -- execute("ALTER TABLE dast_profiles\nADD CONSTRAINT fk_aa76ef30e9\nFOREIGN KEY (project_id)\nREFERENCES projects (id)\nON DELETE CASCADE\nNOT VALID;\n") -> 0.0029s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE dast_profiles VALIDATE CONSTRAINT fk_aa76ef30e9;") -> 0.0026s -- execute("RESET ALL") -> 0.0004s == 20210111053308 AddProjectFkForDastProfile: migrated (0.0173s) ============== == 20210111075104 AddTemporaryIndexOnSecurityFindingsScanId: migrating ======== -- transaction_open?() -> 0.0000s -- index_exists?(:security_findings, :scan_id, {:where=>"uuid is null", :name=>"tmp_index_on_security_findings_scan_id", :algorithm=>:concurrently}) -> 0.0052s -- execute("SET statement_timeout TO 0") -> 0.0002s -- add_index(:security_findings, :scan_id, {:where=>"uuid is null", :name=>"tmp_index_on_security_findings_scan_id", :algorithm=>:concurrently}) -> 0.0052s -- execute("RESET ALL") -> 0.0003s == 20210111075104 AddTemporaryIndexOnSecurityFindingsScanId: migrated (0.0116s) == 20210111075105 ScheduleUuidPopulationForSecurityFindings: migrating ======== == 20210111075105 ScheduleUuidPopulationForSecurityFindings: migrated (0.0000s) == 20210111075206 ScheduleUuidPopulationForSecurityFindings2: migrating ======= == 20210111075206 ScheduleUuidPopulationForSecurityFindings2: migrated (0.0028s) == 20210112202949 CreateComposerCacheFile: migrating ========================== -- create_table(:packages_composer_cache_files, {}) -- quote_column_name(:file) -> 0.0000s -> 0.0159s -- quote_table_name("check_84f5ba81f5") -> 0.0001s -- quote_table_name(:packages_composer_cache_files) -> 0.0000s -- execute("ALTER TABLE \"packages_composer_cache_files\"\nADD CONSTRAINT \"check_84f5ba81f5\" CHECK (char_length(\"file\") <= 255)\n") -> 0.0008s == 20210112202949 CreateComposerCacheFile: migrated (0.0192s) ================= == 20210113224909 AddPipelineConfigurationFullPathToCompliancePipeline: migrating -- add_column(:compliance_management_frameworks, :pipeline_configuration_full_path, :text) -> 0.0011s == 20210113224909 AddPipelineConfigurationFullPathToCompliancePipeline: migrated (0.0012s) == 20210113231532 AddConvertedAtToExperimentSubjects: migrating =============== -- add_column(:experiment_subjects, :converted_at, :datetime_with_timezone) -> 0.0014s == 20210113231532 AddConvertedAtToExperimentSubjects: migrated (0.0015s) ====== == 20210113231546 AddContextToExperimentSubjects: migrating =================== -- add_column(:experiment_subjects, :context, :jsonb, {:default=>{}, :null=>false}) -> 0.0048s == 20210113231546 AddContextToExperimentSubjects: migrated (0.0049s) ========== == 20210114142443 AddIndexesToOnboardingProgresses: migrating ================= -- transaction_open?() -> 0.0000s -- index_exists?(:onboarding_progresses, :created_at, {:where=>"git_write_at IS NULL", :name=>"index_onboarding_progresses_for_create_track", :algorithm=>:concurrently}) -> 0.0025s -- execute("SET statement_timeout TO 0") -> 0.0002s -- add_index(:onboarding_progresses, :created_at, {:where=>"git_write_at IS NULL", :name=>"index_onboarding_progresses_for_create_track", :algorithm=>:concurrently})rake aborted! StandardError: An error has occurred, all later migrations canceled: PG::InvalidObjectDefinition: ERROR: functions in index predicate must be marked IMMUTABLE /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:151:in `block in add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:337:in `disable_statement_timeout' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:150:in `add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/db/post_migrate/20210115220610_schedule_artifact_expiry_backfill.rb:28:in `up' /opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:61:in `block (3 levels) in <top (required)>' /opt/gitlab/embedded/bin/bundle:23:in `load' /opt/gitlab/embedded/bin/bundle:23:in `<main>' Caused by: ActiveRecord::StatementInvalid: PG::InvalidObjectDefinition: ERROR: functions in index predicate must be marked IMMUTABLE /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:151:in `block in add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:337:in `disable_statement_timeout' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:150:in `add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/db/post_migrate/20210115220610_schedule_artifact_expiry_backfill.rb:28:in `up' /opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:61:in `block (3 levels) in <top (required)>' /opt/gitlab/embedded/bin/bundle:23:in `load' /opt/gitlab/embedded/bin/bundle:23:in `<main>' Caused by: PG::InvalidObjectDefinition: ERROR: functions in index predicate must be marked IMMUTABLE /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:151:in `block in add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:337:in `disable_statement_timeout' /opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/database/migration_helpers.rb:150:in `add_concurrent_index' /opt/gitlab/embedded/service/gitlab-rails/db/post_migrate/20210115220610_schedule_artifact_expiry_backfill.rb:28:in `up' /opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:61:in `block (3 levels) in <top (required)>' /opt/gitlab/embedded/bin/bundle:23:in `load' /opt/gitlab/embedded/bin/bundle:23:in `<main>' Tasks: TOP => db:migrate (See full trace by running task with --trace) -> 0.0053s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- index_exists?(:onboarding_progresses, :git_write_at, {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NULL", :name=>"index_onboarding_progresses_for_verify_track", :algorithm=>:concurrently}) -> 0.0031s -- execute("SET statement_timeout TO 0") -> 0.0002s -- add_index(:onboarding_progresses, :git_write_at, {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NULL", :name=>"index_onboarding_progresses_for_verify_track", :algorithm=>:concurrently}) -> 0.0043s -- execute("RESET ALL") -> 0.0002s -- transaction_open?() -> 0.0000s -- index_exists?(:onboarding_progresses, "GREATEST(git_write_at, pipeline_created_at)", {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NOT NULL AND trial_started_at IS NULL", :name=>"index_onboarding_progresses_for_trial_track", :algorithm=>:concurrently}) -> 0.0033s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:onboarding_progresses, "GREATEST(git_write_at, pipeline_created_at)", {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NOT NULL AND trial_started_at IS NULL", :name=>"index_onboarding_progresses_for_trial_track", :algorithm=>:concurrently}) -> 0.0046s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- index_exists?(:onboarding_progresses, "GREATEST(git_write_at, pipeline_created_at, trial_started_at)", {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NOT NULL AND trial_started_at IS NOT NULL AND user_added_at IS NULL", :name=>"index_onboarding_progresses_for_team_track", :algorithm=>:concurrently}) -> 0.0032s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:onboarding_progresses, "GREATEST(git_write_at, pipeline_created_at, trial_started_at)", {:where=>"git_write_at IS NOT NULL AND pipeline_created_at IS NOT NULL AND trial_started_at IS NOT NULL AND user_added_at IS NULL", :name=>"index_onboarding_progresses_for_team_track", :algorithm=>:concurrently}) -> 0.0046s -- execute("RESET ALL") -> 0.0002s == 20210114142443 AddIndexesToOnboardingProgresses: migrated (0.0358s) ======== == 20210115090452 CreateGroupRepositoryStorageMove: migrating ================= -- table_exists?(:group_repository_storage_moves) -> 0.0009s -- create_table(:group_repository_storage_moves) -> 0.0133s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0003s -- execute("ALTER TABLE group_repository_storage_moves\nADD CONSTRAINT group_repository_storage_moves_source_storage_name\nCHECK ( char_length(source_storage_name) <= 255 )\nNOT VALID;\n") -> 0.0008s -- current_schema() -> 0.0002s -- execute("SET statement_timeout TO 0") -> 0.0003s -- execute("ALTER TABLE group_repository_storage_moves VALIDATE CONSTRAINT group_repository_storage_moves_source_storage_name;") -> 0.0009s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- current_schema() -> 0.0002s -- execute("ALTER TABLE group_repository_storage_moves\nADD CONSTRAINT group_repository_storage_moves_destination_storage_name\nCHECK ( char_length(destination_storage_name) <= 255 )\nNOT VALID;\n") -> 0.0006s -- current_schema() -> 0.0002s -- execute("SET statement_timeout TO 0") -> 0.0002s -- execute("ALTER TABLE group_repository_storage_moves VALIDATE CONSTRAINT group_repository_storage_moves_destination_storage_name;") -> 0.0008s -- execute("RESET ALL") -> 0.0002s == 20210115090452 CreateGroupRepositoryStorageMove: migrated (0.0306s) ======== == 20210115220610 ScheduleArtifactExpiryBackfill: migrating =================== -- transaction_open?() -> 0.0000s -- indexes(:ci_job_artifacts) -> 0.0061s -- execute("SET statement_timeout TO 0") -> 0.0002s -- remove_index(:ci_job_artifacts, {:algorithm=>:concurrently, :name=>"expired_artifacts_temp_index"}) -> 0.0021s -- execute("RESET ALL") -> 0.0003s -- transaction_open?() -> 0.0000s -- index_exists?(:ci_job_artifacts, [:id, :created_at], {:where=>"expire_at IS NULL AND date(created_at AT TIME ZONE 'UTC') < '2020-06-22'::date", :name=>"expired_artifacts_temp_index", :algorithm=>:concurrently}) -> 0.0059s -- execute("SET statement_timeout TO 0") -> 0.0003s -- add_index(:ci_job_artifacts, [:id, :created_at], {:where=>"expire_at IS NULL AND date(created_at AT TIME ZONE 'UTC') < '2020-06-22'::date", :name=>"expired_artifacts_temp_index", :algorithm=>:concurrently}) -- execute("RESET ALL") -> 0.0002s STDERR: ---- End output of "bash" "/tmp/chef-script20210406-30-tjljeq" ---- Ran "bash" "/tmp/chef-script20210406-30-tjljeq" returned 1 Resource Declaration: --------------------- # In /opt/gitlab/embedded/cookbooks/cache/cookbooks/gitlab/recipes/database_migrations.rb 70: bash "migrate gitlab-rails database" do 71: code <<-EOH 72: set -e 73: log_file="#{node['gitlab']['gitlab-rails']['log_directory']}/gitlab-rails-db-migrate-$(date +%Y-%m-%d-%H-%M-%S).log" 74: umask 077 75: /opt/gitlab/bin/gitlab-rake gitlab:db:configure 2>& 1 | tee ${log_file} 76: STATUS=${PIPESTATUS[0]} 77: chown #{account_helper.gitlab_user}:#{account_helper.gitlab_group} ${log_file} 78: echo $STATUS > #{db_migrate_status_file} 79: exit $STATUS 80: EOH 81: environment env_variables unless env_variables.empty? 82: notifies :run, "execute[clear the gitlab-rails cache]", :immediately 83: notifies :run, "ruby_block[check remote PG version]", :immediately 84: dependent_services.each do |svc| 85: notifies :restart, svc, :immediately 86: end 87: not_if "(test -f #{db_migrate_status_file}) && (cat #{db_migrate_status_file} | grep -Fx 0)" 88: only_if { node['gitlab']['gitlab-rails']['auto_migrate'] } 89: end Compiled Resource: ------------------ # Declared in /opt/gitlab/embedded/cookbooks/cache/cookbooks/gitlab/recipes/database_migrations.rb:70:in `from_file' bash("migrate gitlab-rails database") do action [:run] default_guard_interpreter :default command nil backup 5 interpreter "bash" declared_type :bash cookbook_name "gitlab" recipe_name "database_migrations" code " set -e\n log_file=\"/var/log/gitlab/gitlab-rails/gitlab-rails-db-migrate-$(date +%Y-%m-%d-%H-%M-%S).log\"\n umask 077\n /opt/gitlab/bin/gitlab-rake gitlab:db:configure 2>& 1 | tee ${log_file}\n STATUS=${PIPESTATUS[0]}\n chown git:git ${log_file}\n echo $STATUS > /var/opt/gitlab/gitlab-rails/upgrade-status/db-migrate-873248b1f0d3a7a5535771a3a1635803-7efd19e3716\n exit $STATUS\n" domain nil user nil not_if "(test -f /var/opt/gitlab/gitlab-rails/upgrade-status/db-migrate-873248b1f0d3a7a5535771a3a1635803-7efd19e3716) && (cat /var/opt/gitlab/gitlab-rails/upgrade-status/db-migrate-873248b1f0d3a7a5535771a3a1635803-7efd19e3716 | grep -Fx 0)" only_if { #code block } end System Info: ------------ chef_version=15.14.0 platform=ubuntu platform_version=20.04 ruby=ruby 2.7.2p137 (2020-10-01 revision 5445e04352) [x86_64-linux] program_name=/opt/gitlab/embedded/bin/chef-client executable=/opt/gitlab/embedded/bin/chef-client
Recipe: gitlab::mailroom
-
runit_service[mailroom] action restart
================================================================================
Error executing action
restart
on resource 'runit_service[mailroom]'================================================================================
Mixlib::ShellOut::ShellCommandFailed
Expected process to exit with [0], but received '1'
---- Begin output of /opt/gitlab/embedded/bin/sv restart /opt/gitlab/service/mailroom ----
STDOUT: fail: /opt/gitlab/service/mailroom: unable to change to service directory: file does not exist
STDERR:
---- End output of /opt/gitlab/embedded/bin/sv restart /opt/gitlab/service/mailroom ----
Ran /opt/gitlab/embedded/bin/sv restart /opt/gitlab/service/mailroom returned 1
Cookbook Trace:
/opt/gitlab/embedded/cookbooks/cache/cookbooks/runit/libraries/helpers.rb:136:in `tap'
/opt/gitlab/embedded/cookbooks/cache/cookbooks/runit/libraries/helpers.rb:136:in `safe_sv_shellout!'
/opt/gitlab/embedded/cookbooks/cache/cookbooks/runit/libraries/helpers.rb:164:in `restart_service'
/opt/gitlab/embedded/cookbooks/cache/cookbooks/runit/libraries/provider_runit_service.rb:357:in `block in class:RunitService'
Resource Declaration:
33: runit_service 'mailroom' do
34: finish true
35: options({
36: user: user,
37: groupname: group,
38: log_directory: mailroom_log_dir,
39: mail_room_config: mail_room_config,
40: exit_log_format: exit_log_format
41: }.merge(params))
42: log_options node['gitlab']['logging'].to_hash.merge(node['gitlab']['mailroom'].to_hash)
43: end
44:
Compiled Resource:
runit_service("mailroom") do
action [:enable]
default_guard_interpreter :default
declared_type :runit_service
cookbook_name "gitlab"
recipe_name "mailroom"
service_name "mailroom"
options {:env_dir=>"/opt/gitlab/sv/mailroom/env", :user=>"git", :groupname=>"git", :log_directory=>"/var/log/gitlab/mailroom", :mail_room_config=>"/opt/gitlab/embedded/service/gitlab-rails/config/mail_room.yml", :exit_log_format=>"plain"}
log_options {"svlogd_size"=>209715200, "svlogd_num"=>30, "svlogd_timeout"=>86400, "svlogd_filter"=>"gzip", "svlogd_udp"=>nil, "svlogd_prefix"=>nil, "udp_log_shipping_host"=>nil, "udp_log_shipping_hostname"=>nil, "udp_log_shipping_port"=>514, "logrotate_frequency"=>"daily", "logrotate_maxsize"=>nil, "logrotate_size"=>nil, "logrotate_rotate"=>30, "logrotate_compress"=>"compress", "logrotate_method"=>"copytruncate", "logrotate_postrotate"=>nil, "logrotate_dateformat"=>nil, "enable"=>true, "ha"=>false, "log_directory"=>"/var/log/gitlab/mailroom", "exit_log_format"=>"plain"}
run_template_name "mailroom"
finish true
log_template_name "mailroom"
check_script_template_name "mailroom"
finish_script_template_name "mailroom"
control_template_names {}
end
System Info:
chef_version=15.14.0
platform=ubuntu
platform_version=20.04
ruby=ruby 2.7.2p137 (2020-10-01 revision 5445e04352) [x86_64-linux]
program_name=/opt/gitlab/embedded/bin/chef-client
executable=/opt/gitlab/embedded/bin/chef-client
Recipe: gitlab::gitlab-rails
Thank you by advance for your help