PG::DuplicateTable: ERROR: relation "features" already exists
Just attempted to updaste to 9.3.0-ce.0 and received the following error, not my gitlab instance isn't accesible from web, and I'm not sure what to do to fix it.
== 20170511083824 RenameServicesBuildEventsToJobEvents: migrating =============
-- transaction_open?()
-> 0.0000s
-- columns(:services)
-> 0.0034s
-- add_column(:services, :job_events, :boolean, {:limit=>nil, :precision=>nil, :scale=>nil})
-> 0.0016s
-- change_column_default(:services, :job_events, "false")
-> 0.0053s
-- quote_table_name(:services)
-> 0.0001s
-- quote_column_name(:build_events)
-> 0.0000s
-- quote_column_name(:job_events)
-> 0.0000s
-- execute("CREATE OR REPLACE FUNCTION trigger_b54960bfe339()\nRETURNS trigger AS\n$BODY$\nBEGIN\n NEW.\"job_events\" := NEW.\"build_events\";\n RETURN NEW;\nEND;\n$BODY$\nLANGUAGE 'plpgsql'\nVOLATILE\n")
-> 0.0011s
-- execute("CREATE TRIGGER trigger_b54960bfe339\nBEFORE INSERT OR UPDATE\nON \"services\"\nFOR EACH ROW\nEXECUTE PROCEDURE trigger_b54960bfe339()\n")
-> 0.0009s
-- exec_query("SELECT COUNT(*) AS count FROM \"services\"")
-> 0.0027s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" ORDER BY \"services\".\"id\" ASC LIMIT 1")
-> 0.0017s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 2 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0005s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 2 AND \"services\".\"id\" < 7")
-> 0.0011s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 7 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0006s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 7 AND \"services\".\"id\" < 12")
-> 0.0008s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 12 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0006s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 12 AND \"services\".\"id\" < 17")
-> 0.0007s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 17 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0004s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 17 AND \"services\".\"id\" < 21")
-> 0.0007s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 21 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0005s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 21 AND \"services\".\"id\" < 26")
-> 0.0007s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 26 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0005s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 26 AND \"services\".\"id\" < 30")
-> 0.0012s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 30 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0006s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 30 AND \"services\".\"id\" < 35")
-> 0.0007s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 35 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0005s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 35 AND \"services\".\"id\" < 39")
-> 0.0006s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 39 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0004s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 39 AND \"services\".\"id\" < 44")
-> 0.0008s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 44 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0010s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 44 AND \"services\".\"id\" < 48")
-> 0.0008s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 48 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0005s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 48 AND \"services\".\"id\" < 52")
-> 0.0007s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 52 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0004s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 52 AND \"services\".\"id\" < 57")
-> 0.0006s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 57 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0004s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 57 AND \"services\".\"id\" < 61")
-> 0.0006s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 61 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0005s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 61 AND \"services\".\"id\" < 90")
-> 0.0007s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 90 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0006s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 90 AND \"services\".\"id\" < 652")
-> 0.0007s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 652 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0005s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 652 AND \"services\".\"id\" < 1200")
-> 0.0009s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 1200 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0005s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 1200 AND \"services\".\"id\" < 1204")
-> 0.0006s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 1204 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0012s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 1204 AND \"services\".\"id\" < 1208")
-> 0.0008s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 1208 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0006s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 1208 AND \"services\".\"id\" < 1212")
-> 0.0006s
-- exec_query("SELECT \"services\".\"id\" FROM \"services\" WHERE \"services\".\"id\" >= 1212 ORDER BY \"services\".\"id\" ASC LIMIT 1 OFFSET 4")
-> 0.0005s
-- execute("UPDATE \"services\" SET \"job_events\" = \"services\".\"build_events\" WHERE \"services\".\"id\" >= 1212")
-> 0.0007s
-- change_column_null(:services, :job_events, false)
-> 0.0007s
-- indexes(:services)
-> 0.0023s
-- foreign_keys(:services)
-> 0.0031s
== 20170511083824 RenameServicesBuildEventsToJobEvents: migrated (0.0684s) ====
== 20170511100900 CleanupRenameWebHooksBuildEventsToJobEvents: migrating ======
-- execute("DROP TRIGGER trigger_688beaaec90d ON web_hooks")
-> 0.0008srake aborted!
StandardError: An error has occurred, this and all later migrations canceled:
PG::DuplicateTable: ERROR: relation "features" already exists
: CREATE TABLE "features" ("id" serial primary key, "key" character varying NOT NULL, "created_at" timestamp NOT NULL, "updated_at" timestamp NOT NULL) /opt/gitlab/embedded/service/gitlab-rails/db/migrate/20170525174156_create_feature_tables.rb:7:in `up'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:50:in `block (3 levels) in <main>'
/opt/gitlab/embedded/bin/bundle:22:in `load'
/opt/gitlab/embedded/bin/bundle:22:in `<main>'
ActiveRecord::StatementInvalid: PG::DuplicateTable: ERROR: relation "features" already exists
: CREATE TABLE "features" ("id" serial primary key, "key" character varying NOT NULL, "created_at" timestamp NOT NULL, "updated_at" timestamp NOT NULL)
/opt/gitlab/embedded/service/gitlab-rails/db/migrate/20170525174156_create_feature_tables.rb:7:in `up'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:50:in `block (3 levels) in <main>'
/opt/gitlab/embedded/bin/bundle:22:in `load'
/opt/gitlab/embedded/bin/bundle:22:in `<main>'
PG::DuplicateTable: ERROR: relation "features" already exists
/opt/gitlab/embedded/service/gitlab-rails/db/migrate/20170525174156_create_feature_tables.rb:7:in `up'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:50:in `block (3 levels) in <main>'
/opt/gitlab/embedded/bin/bundle:22:in `load'
/opt/gitlab/embedded/bin/bundle:22:in `<main>'
Tasks: TOP => db:migrate
(See full trace by running task with --trace)
-- execute("DROP FUNCTION trigger_688beaaec90d()")
-> 0.0046s
-- remove_column(:web_hooks, :build_events)
-> 0.0016s
== 20170511100900 CleanupRenameWebHooksBuildEventsToJobEvents: migrated (0.0073s)
== 20170511101000 CleanupRenameServicesBuildEventsToJobEvents: migrating ======
-- execute("DROP TRIGGER trigger_b54960bfe339 ON services")
-> 0.0011s
-- execute("DROP FUNCTION trigger_b54960bfe339()")
-> 0.0007s
-- remove_column(:services, :build_events)
-> 0.0011s
== 20170511101000 CleanupRenameServicesBuildEventsToJobEvents: migrated (0.0032s)
== 20170519102115 AddPrometheusSettingsToMetricsSettings: migrating ===========
-- transaction_open?()
-> 0.0001s
-- execute("SET statement_timeout TO 0")
-> 0.0005s
-- transaction()
-- add_column(:application_settings, :prometheus_metrics_enabled, :boolean, {:default=>nil})
-> 0.0014s
-- change_column_default(:application_settings, :prometheus_metrics_enabled, false)
-> 0.0074s
-> 0.0100s
-- exec_query("SELECT COUNT(*) AS count FROM \"application_settings\"")
-> 0.0008s
-- exec_query("SELECT \"application_settings\".\"id\" FROM \"application_settings\" ORDER BY \"application_settings\".\"id\" ASC LIMIT 1")
-> 0.0005s
-- exec_query("SELECT \"application_settings\".\"id\" FROM \"application_settings\" WHERE \"application_settings\".\"id\" >= 1 ORDER BY \"application_settings\".\"id\" ASC LIMIT 1 OFFSET 1")
-> 0.0011s
-- execute("UPDATE \"application_settings\" SET \"prometheus_metrics_enabled\" = 'f' WHERE \"application_settings\".\"id\" >= 1")
-> 0.0009s
-- change_column_null(:application_settings, :prometheus_metrics_enabled, false)
-> 0.0008s
== 20170519102115 AddPrometheusSettingsToMetricsSettings: migrated (0.0233s) ==
== 20170521184006 AddChangePositionToNotes: migrating =========================
-- add_column(:notes, :change_position, :text)
-> 0.0015s
== 20170521184006 AddChangePositionToNotes: migrated (0.0016s) ================
== 20170523083112 MigrateOldArtifacts: migrating ==============================
== 20170523083112 MigrateOldArtifacts: migrated (0.0477s) =====================
== 20170523091700 AddRssTokenToUsers: migrating ===============================
-- add_column(:users, :rss_token, :string)
-> 0.0020s
-- transaction_open?()
-> 0.0000s
-- execute("SET statement_timeout TO 0")
-> 0.0003s
-- add_index(:users, :rss_token, {:algorithm=>:concurrently})
-> 0.0055s
== 20170523091700 AddRssTokenToUsers: migrated (0.0082s) ======================
== 20170523121229 CreateConversationalDevelopmentIndexMetrics: migrating ======
-- create_table(:conversational_development_index_metrics)
-> 0.0057s
== 20170523121229 CreateConversationalDevelopmentIndexMetrics: migrated (0.0058s)
== 20170524125940 AddSourceToCiPipeline: migrating ============================
-- add_column(:ci_pipelines, :source, :integer)
-> 0.0012s
== 20170524125940 AddSourceToCiPipeline: migrated (0.0013s) ===================
== 20170524161101 AddProtectedToCiVariables: migrating ========================
-- transaction_open?()
-> 0.0000s
-- execute("SET statement_timeout TO 0")
-> 0.0006s
-- transaction()
-- add_column(:ci_variables, :protected, :boolean, {:default=>nil})
-> 0.0020s
-- change_column_default(:ci_variables, :protected, false)
-> 0.0025s
-> 0.0057s
-- exec_query("SELECT COUNT(*) AS count FROM \"ci_variables\"")
-> 0.0009s
-- exec_query("SELECT \"ci_variables\".\"id\" FROM \"ci_variables\" ORDER BY \"ci_variables\".\"id\" ASC LIMIT 1")
-> 0.0007s
-- exec_query("SELECT \"ci_variables\".\"id\" FROM \"ci_variables\" WHERE \"ci_variables\".\"id\" >= 3 ORDER BY \"ci_variables\".\"id\" ASC LIMIT 1 OFFSET 1")
-> 0.0006s
-- execute("UPDATE \"ci_variables\" SET \"protected\" = 'f' WHERE \"ci_variables\".\"id\" >= 3 AND \"ci_variables\".\"id\" < 4")
-> 0.0008s
-- exec_query("SELECT \"ci_variables\".\"id\" FROM \"ci_variables\" WHERE \"ci_variables\".\"id\" >= 4 ORDER BY \"ci_variables\".\"id\" ASC LIMIT 1 OFFSET 1")
-> 0.0004s
-- execute("UPDATE \"ci_variables\" SET \"protected\" = 'f' WHERE \"ci_variables\".\"id\" >= 4 AND \"ci_variables\".\"id\" < 5")
-> 0.0013s
-- exec_query("SELECT \"ci_variables\".\"id\" FROM \"ci_variables\" WHERE \"ci_variables\".\"id\" >= 5 ORDER BY \"ci_variables\".\"id\" ASC LIMIT 1 OFFSET 1")
-> 0.0004s
-- execute("UPDATE \"ci_variables\" SET \"protected\" = 'f' WHERE \"ci_variables\".\"id\" >= 5")
-> 0.0011s
-- change_column_null(:ci_variables, :protected, false)
-> 0.0007s
== 20170524161101 AddProtectedToCiVariables: migrated (0.0195s) ===============
== 20170525132202 CreatePipelineStages: migrating =============================
-- create_table(:ci_stages)
-> 0.0048s
-- transaction_open?()
-> 0.0000s
-- execute("SET statement_timeout TO 0")
-> 0.0003s
-- execute("ALTER TABLE ci_stages\nADD CONSTRAINT fk_2360681d1d\nFOREIGN KEY (project_id)\nREFERENCES projects (id)\nON DELETE cascade\nNOT VALID;\n")
-> 0.0011s
-- execute("ALTER TABLE ci_stages VALIDATE CONSTRAINT fk_2360681d1d;")
-> 0.0018s
-- transaction_open?()
-> 0.0000s
-- execute("SET statement_timeout TO 0")
-> 0.0002s
-- execute("ALTER TABLE ci_stages\nADD CONSTRAINT fk_fb57e6cc56\nFOREIGN KEY (pipeline_id)\nREFERENCES ci_pipelines (id)\nON DELETE cascade\nNOT VALID;\n")
-> 0.0011s
-- execute("ALTER TABLE ci_stages VALIDATE CONSTRAINT fk_fb57e6cc56;")
-> 0.0009s
-- transaction_open?()
-> 0.0000s
-- execute("SET statement_timeout TO 0")
-> 0.0002s
-- add_index(:ci_stages, :project_id, {:algorithm=>:concurrently})
-> 0.0034s
-- transaction_open?()
-> 0.0000s
-- execute("SET statement_timeout TO 0")
-> 0.0007s
-- add_index(:ci_stages, :pipeline_id, {:algorithm=>:concurrently})
-> 0.0033s
== 20170525132202 CreatePipelineStages: migrated (0.0189s) ====================
== 20170525174156 CreateFeatureTables: migrating ==============================
-- create_table(:features)
STDERR:
---- End output of "bash" "/tmp/chef-script20170624-9857-1vpg8m3" ----
Ran "bash" "/tmp/chef-script20170624-9857-1vpg8m3" returned 1
Resource Declaration:
---------------------
# In /opt/gitlab/embedded/cookbooks/cache/cookbooks/gitlab/recipes/database_migrations.rb
51: bash "migrate gitlab-rails database" do
52: code <<-EOH
53: set -e
54: log_file="#{node['gitlab']['gitlab-rails']['log_directory']}/gitlab-rails-db-migrate-$(date +%Y-%m-%d-%H-%M-%S).log"
55: umask 077
56: /opt/gitlab/bin/gitlab-rake gitlab:db:configure 2>& 1 | tee ${log_file}
57: STATUS=${PIPESTATUS[0]}
58: echo $STATUS > #{db_migrate_status_file}
59: exit $STATUS
60: EOH
61: environment env_variables unless env_variables.empty?
62: notifies :run, 'execute[enable pg_trgm extension]', :before if omnibus_helper.service_enabled?('postgresql')
63: notifies :run, "execute[clear the gitlab-rails cache]", :immediately
64: dependent_services.each do |svc|
65: notifies :restart, svc, :immediately
66: end
67: not_if "(test -f #{db_migrate_status_file}) && (cat #{db_migrate_status_file} | grep -Fx 0)"
68: only_if { node['gitlab']['gitlab-rails']['auto_migrate'] }
69: end
Compiled Resource:
------------------
# Declared in /opt/gitlab/embedded/cookbooks/cache/cookbooks/gitlab/recipes/database_migrations.rb:51:in `from_file'
bash("migrate gitlab-rails database") do
action [:run]
updated true
retries 0
retry_delay 2
default_guard_interpreter :default
command "migrate gitlab-rails database"
backup 5
returns 0
code " set -e\n log_file=\"/var/log/gitlab/gitlab-rails/gitlab-rails-db-migrate-$(date +%Y-%m-%d-%H-%M-%S).log\"\n umask 077\n /opt/gitlab/bin/gitlab-rake gitlab:db:configure 2>& 1 | tee ${log_file}\n STATUS=${PIPESTATUS[0]}\n echo $STATUS > /var/opt/gitlab/gitlab-rails/upgrade-status/db-migrate-873248b1f0d3a7a5535771a3a1635803-acb7f25\n exit $STATUS\n"
interpreter "bash"
declared_type :bash
cookbook_name "gitlab"
recipe_name "database_migrations"
not_if "(test -f /var/opt/gitlab/gitlab-rails/upgrade-status/db-migrate-873248b1f0d3a7a5535771a3a1635803-acb7f25) && (cat /var/opt/gitlab/gitlab-rails/upgrade-status/db-migrate-873248b1f0d3a7a5535771a3a1635803-acb7f25 | grep -Fx 0)"
only_if { #code block }
end
Platform:
---------
x86_64-linux
Recipe: gitlab::gitlab-rails
* execute[clear the gitlab-rails cache] action run
- execute /opt/gitlab/bin/gitlab-rake cache:clear
Running handlers:
Running handlers complete
Chef Client failed. 20 resources updated in 27 seconds
dpkg: error processing package gitlab-ce (--configure):
subprocess installed post-installation script returned error exit status 1
Errors were encountered while processing:
gitlab-ce
E: Sub-process /usr/bin/dpkg returned an error code (1)
Edited by Brian Gilbert