From 52c3b8f31264230814d2ffa79d0987c1491676b3 Mon Sep 17 00:00:00 2001
From: Grzegorz Bizon <grzegorz@gitlab.com>
Date: Thu, 8 Jun 2017 05:29:35 +0000
Subject: [PATCH 01/14] Merge branch 'zj-object-store-artifacts' into 'master'

Object store for artifacts

Closes gitlab-ce#29203

See merge request !1762
---
 app/models/ci/build.rb                        |  20 +-
 app/services/projects/update_pages_service.rb |  30 ++-
 app/uploaders/artifact_uploader.rb            |  18 +-
 app/uploaders/object_store_uploader.rb        | 139 +++++++++++
 .../projects/artifacts/_tree_file.html.haml   |   9 +-
 app/views/projects/jobs/_sidebar.html.haml    |   4 +-
 ...w-to-store-artifacts-on-object-storage.yml |   4 +
 config/gitlab.yml.example                     |   8 +
 config/initializers/1_settings.rb             |   6 +
 ...1163708_add_artifacts_store_to_ci_build.rb |  17 ++
 db/schema.rb                                  |   2 +
 doc/administration/job_artifacts.md           |  37 ++-
 lib/api/helpers.rb                            |   2 +-
 lib/ci/api/builds.rb                          |   2 +-
 lib/tasks/gitlab/artifacts.rake               |  19 ++
 spec/factories/ci/builds.rb                   |   5 +
 .../import_export/safe_model_attributes.yml   |   2 +
 spec/models/ci/build_spec.rb                  |  44 ++++
 spec/requests/api/jobs_spec.rb                |  68 ++++--
 spec/requests/api/runner_spec.rb              |  29 ++-
 spec/requests/api/v3/builds_spec.rb           |  61 +++--
 spec/requests/ci/api/builds_spec.rb           |  29 ++-
 spec/services/ci/retry_build_service_spec.rb  |   3 +-
 spec/support/stub_artifacts.rb                |  26 ++
 spec/uploaders/artifact_uploader_spec.rb      |  27 +-
 spec/uploaders/object_store_uploader_spec.rb  | 231 ++++++++++++++++++
 26 files changed, 740 insertions(+), 102 deletions(-)
 create mode 100644 app/uploaders/object_store_uploader.rb
 create mode 100644 changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml
 create mode 100644 db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
 create mode 100644 lib/tasks/gitlab/artifacts.rake
 create mode 100644 spec/support/stub_artifacts.rb
 create mode 100644 spec/uploaders/object_store_uploader_spec.rb

diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 432f3f242eb4..39d0647b182f 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -295,17 +295,27 @@ def artifacts?
       !artifacts_expired? && artifacts_file.exists?
     end
 
+    def browsable_artifacts?
+      artifacts_metadata?
+    end
+
+    def downloadable_single_artifacts_file?
+      artifacts_metadata? && artifacts_file.file_storage?
+    end
+
     def artifacts_metadata?
       artifacts? && artifacts_metadata.exists?
     end
 
     def artifacts_metadata_entry(path, **options)
-      metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
-        artifacts_metadata.path,
-        path,
-        **options)
+      artifacts_metadata.use_file do |metadata_path|
+        metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
+          metadata_path,
+          path,
+          **options)
 
-      metadata.to_entry
+        metadata.to_entry
+      end
     end
 
     def erase_artifacts!
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index e60b854f916c..3c7c9d421bcb 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -65,9 +65,9 @@ def create_status
     end
 
     def extract_archive!(temp_path)
-      if artifacts.ends_with?('.tar.gz') || artifacts.ends_with?('.tgz')
+      if artifacts_filename.ends_with?('.tar.gz') || artifacts_filename.ends_with?('.tgz')
         extract_tar_archive!(temp_path)
-      elsif artifacts.ends_with?('.zip')
+      elsif artifacts_filename.ends_with?('.zip')
         extract_zip_archive!(temp_path)
       else
         raise 'unsupported artifacts format'
@@ -75,11 +75,13 @@ def extract_archive!(temp_path)
     end
 
     def extract_tar_archive!(temp_path)
-      results = Open3.pipeline(%W(gunzip -c #{artifacts}),
-                               %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
-                               %W(tar -x -C #{temp_path} #{SITE_PATH}),
-                               err: '/dev/null')
-      raise 'pages failed to extract' unless results.compact.all?(&:success?)
+      build.artifacts_file.use_file do |artifacts_path|
+        results = Open3.pipeline(%W(gunzip -c #{artifacts_path}),
+                                %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
+                                %W(tar -x -C #{temp_path} #{SITE_PATH}),
+                                err: '/dev/null')
+        raise 'pages failed to extract' unless results.compact.all?(&:success?)
+      end
     end
 
     def extract_zip_archive!(temp_path)
@@ -97,8 +99,10 @@ def extract_zip_archive!(temp_path)
       # -n  never overwrite existing files
       # We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
       site_path = File.join(SITE_PATH, '*')
-      unless system(*%W(unzip -qq -n #{artifacts} #{site_path} -d #{temp_path}))
-        raise 'pages failed to extract'
+      build.artifacts_file.use_file do |artifacts_path|
+        unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
+          raise 'pages failed to extract'
+        end
       end
     end
 
@@ -129,6 +133,10 @@ def blocks
       1 + max_size / BLOCK_SIZE
     end
 
+    def artifacts_filename
+      build.artifacts_file.filename
+    end
+
     def max_size
       current_application_settings.max_pages_size.megabytes || MAX_SIZE
     end
@@ -153,10 +161,6 @@ def ref
       build.ref
     end
 
-    def artifacts
-      build.artifacts_file.path
-    end
-
     def latest_sha
       project.commit(build.ref).try(:sha).to_s
     end
diff --git a/app/uploaders/artifact_uploader.rb b/app/uploaders/artifact_uploader.rb
index 14addb6cf143..0bd2bd4f422c 100644
--- a/app/uploaders/artifact_uploader.rb
+++ b/app/uploaders/artifact_uploader.rb
@@ -1,7 +1,5 @@
-class ArtifactUploader < GitlabUploader
-  storage :file
-
-  attr_reader :job, :field
+class ArtifactUploader < ObjectStoreUploader
+  storage_options Gitlab.config.artifacts
 
   def self.local_artifacts_store
     Gitlab.config.artifacts.path
@@ -11,12 +9,12 @@ def self.artifacts_upload_path
     File.join(self.local_artifacts_store, 'tmp/uploads/')
   end
 
-  def initialize(job, field)
-    @job, @field = job, field
-  end
-
   def store_dir
-    default_local_path
+    if file_storage?
+      default_local_path
+    else
+      default_path
+    end
   end
 
   def cache_dir
@@ -34,6 +32,6 @@ def default_local_path
   end
 
   def default_path
-    File.join(job.created_at.utc.strftime('%Y_%m'), job.project_id.to_s, job.id.to_s)
+    File.join(subject.created_at.utc.strftime('%Y_%m'), subject.project_id.to_s, subject.id.to_s)
   end
 end
diff --git a/app/uploaders/object_store_uploader.rb b/app/uploaders/object_store_uploader.rb
new file mode 100644
index 000000000000..32d4d31b37c0
--- /dev/null
+++ b/app/uploaders/object_store_uploader.rb
@@ -0,0 +1,139 @@
+require 'fog/aws'
+require 'carrierwave/storage/fog'
+
+class ObjectStoreUploader < GitlabUploader
+  before :store, :set_default_local_store
+  before :store, :verify_license!
+
+  LOCAL_STORE = 1
+  REMOTE_STORE = 2
+
+  class << self
+    def storage_options(options)
+      @storage_options = options
+    end
+
+    def object_store_options
+      @storage_options&.object_store
+    end
+
+    def object_store_enabled?
+      object_store_options&.enabled
+    end
+  end
+
+  attr_reader :subject, :field
+
+  def initialize(subject, field)
+    @subject = subject
+    @field = field
+  end
+
+  def object_store
+    subject.public_send(:"#{field}_store")
+  end
+
+  def object_store=(value)
+    @storage = nil
+    subject.public_send(:"#{field}_store=", value)
+  end
+
+  def use_file
+    if file_storage?
+      return yield path
+    end
+
+    begin
+      cache_stored_file!
+      yield cache_path
+    ensure
+      cache_storage.delete_dir!(cache_path(nil))
+    end
+  end
+
+  def filename
+    super || file&.filename
+  end
+
+  def migrate!(new_store)
+    raise 'Undefined new store' unless new_store
+
+    return unless object_store != new_store
+    return unless file
+
+    old_file = file
+    old_store = object_store
+
+    # for moving remote file we need to first store it locally
+    cache_stored_file! unless file_storage?
+
+    # change storage
+    self.object_store = new_store
+
+    storage.store!(file).tap do |new_file|
+      # since we change storage store the new storage
+      # in case of failure delete new file
+      begin
+        subject.save!
+      rescue => e
+        new_file.delete
+        self.object_store = old_store
+        raise e
+      end
+
+      old_file.delete
+    end
+  end
+
+  def fog_directory
+    self.class.object_store_options.remote_directory
+  end
+
+  def fog_credentials
+    self.class.object_store_options.connection
+  end
+
+  def fog_public
+    false
+  end
+
+  def move_to_store
+    file.try(:storage) == storage
+  end
+
+  def move_to_cache
+    file.try(:storage) == cache_storage
+  end
+
+  # We block storing artifacts on Object Storage, not receiving
+  def verify_license!(new_file)
+    return if file_storage?
+
+    raise 'Object Storage feature is missing' unless subject.project.feature_available?(:object_storage)
+  end
+
+  private
+
+  def set_default_local_store(new_file)
+    self.object_store = LOCAL_STORE unless self.object_store
+  end
+
+  def storage
+    @storage ||=
+      if object_store == REMOTE_STORE
+        remote_storage
+      else
+        local_storage
+      end
+  end
+
+  def remote_storage
+    raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
+
+    CarrierWave::Storage::Fog.new(self)
+  end
+
+  def local_storage
+    CarrierWave::Storage::File.new(self)
+  end
+end
diff --git a/app/views/projects/artifacts/_tree_file.html.haml b/app/views/projects/artifacts/_tree_file.html.haml
index 8edb9be049a1..28f7a52df250 100644
--- a/app/views/projects/artifacts/_tree_file.html.haml
+++ b/app/views/projects/artifacts/_tree_file.html.haml
@@ -1,10 +1,13 @@
-- path_to_file = file_project_job_artifacts_path(@project, @build, path: file.path)
+- path_to_file = file_namespace_project_job_artifacts_path(@project.namespace, @project, @build, path: file.path) if @build.downloadable_single_artifacts_file?
 
 %tr.tree-item{ 'data-link' => path_to_file }
   - blob = file.blob
   %td.tree-item-file-name
     = tree_icon('file', blob.mode, blob.name)
-    = link_to path_to_file do
-      %span.str-truncated= blob.name
+    %span.str-truncated
+      - if path_to_file
+        = link_to file.name, path_to_file
+      - else
+        = file.name
   %td
     = number_to_human_size(blob.size, precision: 2)
diff --git a/app/views/projects/jobs/_sidebar.html.haml b/app/views/projects/jobs/_sidebar.html.haml
index bddb587ddc63..408c1511683b 100644
--- a/app/views/projects/jobs/_sidebar.html.haml
+++ b/app/views/projects/jobs/_sidebar.html.haml
@@ -32,8 +32,8 @@
             = link_to download_project_job_artifacts_path(@project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do
               Download
 
-            - if @build.artifacts_metadata?
-              = link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do
+            - if @build.browsable_artifacts?
+              = link_to browse_namespace_project_job_artifacts_path(@project.namespace, @project, @build), class: 'btn btn-sm btn-default' do
                 Browse
 
     - if @build.trigger_request
diff --git a/changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml b/changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml
new file mode 100644
index 000000000000..3f1499a3ffe9
--- /dev/null
+++ b/changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml
@@ -0,0 +1,4 @@
+---
+title: Allow to Store Artifacts on Object Storage
+merge_request:
+author:
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index 221e3d6e03bc..28e9a5f420ad 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -138,6 +138,14 @@ production: &base
     enabled: true
     # The location where build artifacts are stored (default: shared/artifacts).
     # path: shared/artifacts
+    # object_store:
+    #   enabled: false
+    #   remote_directory: artifacts
+    #   connection:
+    #     provider: AWS # Only AWS supported at the moment
+    #     aws_access_key_id: AWS_ACCESS_KEY_ID
+    #     aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+    #     region: eu-central-1
 
   ## Git LFS
   lfs:
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index fa33e602e936..319af2e0b667 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -268,6 +268,12 @@ def cron_random_weekly_time
 Settings.artifacts['path']         = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
 Settings.artifacts['max_size']   ||= 100 # in megabytes
 
+Settings.artifacts['object_store'] ||= Settingslogic.new({})
+Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
+Settings.artifacts['object_store']['remote_directory'] ||= nil
+# Convert upload connection settings to use symbol keys, to make Fog happy
+Settings.artifacts['object_store']['connection']&.deep_symbolize_keys!
+
 #
 # Registry
 #
diff --git a/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb b/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
new file mode 100644
index 000000000000..deba890a478a
--- /dev/null
+++ b/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
@@ -0,0 +1,17 @@
+class AddArtifactsStoreToCiBuild < ActiveRecord::Migration
+  include Gitlab::Database::MigrationHelpers
+
+  DOWNTIME = false
+
+  disable_ddl_transaction!
+
+  def up
+    add_column_with_default(:ci_builds, :artifacts_file_store, :integer, default: 1)
+    add_column_with_default(:ci_builds, :artifacts_metadata_store, :integer, default: 1)
+  end
+
+  def down
+    remove_column(:ci_builds, :artifacts_file_store)
+    remove_column(:ci_builds, :artifacts_metadata_store)
+  end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 3dbe52c9c809..69e2a8cfd703 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -238,6 +238,8 @@
     t.integer "auto_canceled_by_id"
     t.boolean "retried"
     t.integer "stage_id"
+    t.integer "artifacts_file_store", default: 1, null: false
+    t.integer "artifacts_metadata_store", default: 1, null: false
   end
 
   add_index "ci_builds", ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree
diff --git a/doc/administration/job_artifacts.md b/doc/administration/job_artifacts.md
index 3587696225cd..582f3c67b0dd 100644
--- a/doc/administration/job_artifacts.md
+++ b/doc/administration/job_artifacts.md
@@ -85,12 +85,41 @@ _The artifacts are stored by default in
 
 1. Save the file and [restart GitLab][] for the changes to take effect.
 
-### Using object storage
+---
+
+**Using Object Store**
+
+The previously mentioned methods use the local disk to store artifacts. However,
+there is the option to use object stores like AWS' S3. To do this, set the
+`object_store` in your `gitlab.yml`. This relies on valid AWS
+credentials to be configured already. 
 
-In [GitLab Enterprise Edition Premium][eep] you can use an object storage like
-AWS S3 to store the artifacts.
+    ```yaml
+    artifacts:
+        enabled: true
+        path: /mnt/storage/artifacts
+        object_store:
+          enabled: true
+          remote_directory: my-bucket-name
+          connection:
+            provider: AWS
+            aws_access_key_id: S3_KEY_ID
+            aws_secret_key_id: S3_SECRET_KEY_ID
+            region: eu-central-1
+    ```
+
+This will allow you to migrate existing artifacts to object store,
+but all new artifacts will still be stored on the local disk.
+In the future you will be given an option to define a default storage artifacts
+for all new files. Currently the artifacts migration has to be executed manually:
+
+    ```bash
+    gitlab-rake gitlab:artifacts:migrate
+    ```
 
-[Learn how to use the object storage option.][ee-os]
+Please note, that enabling this feature
+will have the effect that artifacts are _not_ browsable anymore through the web
+interface. This limitation will be removed in one of the upcoming releases.
 
 ## Expiring artifacts
 
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index 0f4791841d26..7003390113b2 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -318,7 +318,7 @@ def present_artifacts!(artifacts_file)
       if artifacts_file.file_storage?
         present_file!(artifacts_file.path, artifacts_file.filename)
       else
-        redirect_to(artifacts_file.url)
+        redirect(artifacts_file.url)
       end
     end
 
diff --git a/lib/ci/api/builds.rb b/lib/ci/api/builds.rb
index e2e91ce99cdd..ecbdfb3e4b2f 100644
--- a/lib/ci/api/builds.rb
+++ b/lib/ci/api/builds.rb
@@ -192,7 +192,7 @@ class Builds < Grape::API
           end
 
           unless artifacts_file.file_storage?
-            return redirect_to build.artifacts_file.url
+            return redirect(build.artifacts_file.url)
           end
 
           present_file!(artifacts_file.path, artifacts_file.filename)
diff --git a/lib/tasks/gitlab/artifacts.rake b/lib/tasks/gitlab/artifacts.rake
new file mode 100644
index 000000000000..5676456b2a0d
--- /dev/null
+++ b/lib/tasks/gitlab/artifacts.rake
@@ -0,0 +1,19 @@
+desc "GitLab | Migrate files for artifacts to comply with new storage format"
+namespace :gitlab do
+  namespace :artifacts do
+    task migrate: :environment do
+      puts 'Artifacts'.color(:yellow)
+      Ci::Build.joins(:project).with_artifacts
+        .where(artifacts_file_store: ArtifactUploader::LOCAL_STORE)
+        .find_each(batch_size: 100) do |issue|
+        begin
+          build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
+          build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
+          print '.'
+        rescue
+          print 'F'
+        end
+      end
+    end
+  end
+end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index a77f01ecb002..796997248756 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -163,6 +163,11 @@
       end
     end
 
+    trait :remote_store do
+      artifacts_file_store ArtifactUploader::REMOTE_STORE
+      artifacts_metadata_store ArtifactUploader::REMOTE_STORE
+    end
+
     trait :artifacts_expired do
       after(:create) do |build, _|
         build.artifacts_file =
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 4ef3db3721f7..ac648301d18a 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -252,7 +252,9 @@ CommitStatus:
 - target_url
 - description
 - artifacts_file
+- artifacts_file_store
 - artifacts_metadata
+- artifacts_metadata_store
 - erased_by_id
 - erased_at
 - artifacts_expire_at
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 154b6759f46a..64aedb29816d 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -124,6 +124,50 @@
     end
   end
 
+  describe '#browsable_artifacts?' do
+    subject { build.browsable_artifacts? }
+  
+    context 'artifacts metadata does not exist' do
+      before do
+        build.update_attributes(artifacts_metadata: nil)
+      end
+
+      it { is_expected.to be_falsy }
+    end
+
+    context 'artifacts metadata does exists' do
+      let(:build) { create(:ci_build, :artifacts) }
+
+      it { is_expected.to be_truthy }
+    end
+  end
+
+  describe '#downloadable_single_artifacts_file?' do
+    let(:build) { create(:ci_build, :artifacts, artifacts_file_store: store) }
+
+    subject { build.downloadable_single_artifacts_file? }
+
+    before do
+      expect_any_instance_of(Ci::Build).to receive(:artifacts_metadata?).and_call_original
+    end
+
+    context 'artifacts are stored locally' do
+      let(:store) { ObjectStoreUploader::LOCAL_STORE }
+
+      it { is_expected.to be_truthy }
+    end
+
+    context 'artifacts are stored remotely' do
+      let(:store) { ObjectStoreUploader::REMOTE_STORE }
+
+      before do
+        stub_artifacts_object_storage
+      end
+
+      it { is_expected.to be_falsey }
+    end
+  end
+
   describe '#artifacts_expired?' do
     subject { build.artifacts_expired? }
 
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 8d647eb1c7e5..548b612b5b0f 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -1,17 +1,17 @@
 require 'spec_helper'
 
 describe API::Jobs, :api do
-  let!(:project) do
+  let(:project) do
     create(:project, :repository, public_builds: false)
   end
 
-  let!(:pipeline) do
+  let(:pipeline) do
     create(:ci_empty_pipeline, project: project,
                                sha: project.commit.id,
                                ref: project.default_branch)
   end
 
-  let!(:job) { create(:ci_build, pipeline: pipeline) }
+  let(:job) { create(:ci_build, pipeline: pipeline) }
 
   let(:user) { create(:user) }
   let(:api_user) { user }
@@ -26,6 +26,7 @@
     let(:query) { Hash.new }
 
     before do
+      job
       get api("/projects/#{project.id}/jobs", api_user), query
     end
 
@@ -89,6 +90,7 @@
     let(:query) { Hash.new }
 
     before do
+      job
       get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
     end
 
@@ -190,30 +192,41 @@
 
   describe 'GET /projects/:id/jobs/:job_id/artifacts' do
     before do
+      stub_artifacts_object_storage
       get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
     end
 
     context 'job with artifacts' do
-      let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+      context 'when artifacts are stored locally' do
+        let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
 
-      context 'authorized user' do
-        let(:download_headers) do
-          { 'Content-Transfer-Encoding' => 'binary',
-            'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+        context 'authorized user' do
+          let(:download_headers) do
+            { 'Content-Transfer-Encoding' => 'binary',
+              'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+          end
+
+          it 'returns specific job artifacts' do
+            expect(response).to have_http_status(200)
+            expect(response.headers).to include(download_headers)
+            expect(response.body).to match_file(job.artifacts_file.file.file)
+          end
         end
 
-        it 'returns specific job artifacts' do
-          expect(response).to have_http_status(200)
-          expect(response.headers).to include(download_headers)
-          expect(response.body).to match_file(job.artifacts_file.file.file)
+        context 'unauthorized user' do
+          let(:api_user) { nil }
+
+          it 'does not return specific job artifacts' do
+            expect(response).to have_http_status(401)
+          end
         end
       end
 
-      context 'unauthorized user' do
-        let(:api_user) { nil }
+      context 'when artifacts are stored remotely' do
+        let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
 
-        it 'does not return specific job artifacts' do
-          expect(response).to have_http_status(401)
+        it 'returns location redirect' do
+          expect(response).to have_http_status(302)
         end
       end
     end
@@ -228,6 +241,7 @@
     let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
 
     before do
+      stub_artifacts_object_storage
       job.success
     end
 
@@ -283,14 +297,24 @@ def get_for_ref(ref = pipeline.ref, job_name = job.name)
 
     context 'find proper job' do
       shared_examples 'a valid file' do
-        let(:download_headers) do
-          { 'Content-Transfer-Encoding' => 'binary',
-            'Content-Disposition' =>
-              "attachment; filename=#{job.artifacts_file.filename}" }
+        context 'when artifacts are stored locally' do
+          let(:download_headers) do
+            { 'Content-Transfer-Encoding' => 'binary',
+              'Content-Disposition' =>
+                "attachment; filename=#{job.artifacts_file.filename}" }
+          end
+
+          it { expect(response).to have_http_status(200) }
+          it { expect(response.headers).to include(download_headers) }
         end
 
-        it { expect(response).to have_http_status(200) }
-        it { expect(response.headers).to include(download_headers) }
+        context 'when artifacts are stored remotely' do
+          let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+
+          it 'returns location redirect' do
+            expect(response).to have_http_status(302)
+          end
+        end
       end
 
       context 'with regular branch' do
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index ca5d98c78ef8..358178f918da 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -185,7 +185,7 @@
     let(:project) { create(:empty_project, shared_runners_enabled: false) }
     let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
     let(:runner) { create(:ci_runner) }
-    let!(:job) do
+    let(:job) do
       create(:ci_build, :artifacts, :extended_options,
              pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate")
     end
@@ -200,6 +200,7 @@
       let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
 
       before do
+        job
         stub_container_registry_config(enabled: false)
       end
 
@@ -815,6 +816,7 @@ def force_patch_the_trace
       let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
 
       before do
+        stub_artifacts_object_storage
         job.run!
       end
 
@@ -1116,15 +1118,26 @@ def upload_artifacts(file, headers = {}, accelerated = true)
 
         context 'when job has artifacts' do
           let(:job) { create(:ci_build, :artifacts) }
-          let(:download_headers) do
-            { 'Content-Transfer-Encoding' => 'binary',
-              'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
-          end
 
           context 'when using job token' do
-            it 'download artifacts' do
-              expect(response).to have_http_status(200)
-              expect(response.headers).to include download_headers
+            context 'when artifacts are stored locally' do
+              let(:download_headers) do
+                { 'Content-Transfer-Encoding' => 'binary',
+                  'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+              end
+
+              it 'download artifacts' do
+                expect(response).to have_http_status(200)
+                expect(response.headers).to include download_headers
+              end
+            end
+
+            context 'when artifacts are stored remotely' do
+              let(:job) { create(:ci_build, :artifacts, :remote_store) }
+
+              it 'download artifacts' do
+                expect(response).to have_http_status(302)
+              end
             end
           end
 
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index dc95599546c8..37710aedbb1a 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -7,13 +7,14 @@
   let!(:developer) { create(:project_member, :developer, user: user, project: project) }
   let(:reporter) { create(:project_member, :reporter, project: project) }
   let(:guest) { create(:project_member, :guest, project: project) }
-  let!(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
-  let!(:build) { create(:ci_build, pipeline: pipeline) }
+  let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
+  let(:build) { create(:ci_build, pipeline: pipeline) }
 
   describe 'GET /projects/:id/builds ' do
     let(:query) { '' }
 
     before do
+      build
       create(:ci_build, :skipped, pipeline: pipeline)
 
       get v3_api("/projects/#{project.id}/builds?#{query}", api_user)
@@ -87,6 +88,10 @@
   end
 
   describe 'GET /projects/:id/repository/commits/:sha/builds' do
+    before do
+      build
+    end
+
     context 'when commit does not exist in repository' do
       before do
         get v3_api("/projects/#{project.id}/repository/commits/1a271fd1/builds", api_user)
@@ -187,22 +192,33 @@
 
   describe 'GET /projects/:id/builds/:build_id/artifacts' do
     before do
+      stub_artifacts_object_storage
       get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
     end
 
     context 'job with artifacts' do
-      let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
+      context 'when artifacts are stored locally' do
+        let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
+
+        context 'authorized user' do
+          let(:download_headers) do
+            { 'Content-Transfer-Encoding' => 'binary',
+              'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+          end
 
-      context 'authorized user' do
-        let(:download_headers) do
-          { 'Content-Transfer-Encoding' => 'binary',
-            'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+          it 'returns specific job artifacts' do
+            expect(response).to have_http_status(200)
+            expect(response.headers).to include(download_headers)
+            expect(response.body).to match_file(build.artifacts_file.file.file)
+          end
         end
+      end
 
-        it 'returns specific job artifacts' do
-          expect(response).to have_http_status(200)
-          expect(response.headers).to include(download_headers)
-          expect(response.body).to match_file(build.artifacts_file.file.file)
+      context 'when artifacts are stored remotely' do
+        let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+
+        it 'returns location redirect' do
+          expect(response).to have_http_status(302)
         end
       end
 
@@ -225,6 +241,7 @@
     let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
 
     before do
+      stub_artifacts_object_storage
       build.success
     end
 
@@ -280,14 +297,24 @@ def path_for_ref(ref = pipeline.ref, job = build.name)
 
     context 'find proper job' do
       shared_examples 'a valid file' do
-        let(:download_headers) do
-          { 'Content-Transfer-Encoding' => 'binary',
-            'Content-Disposition' =>
-              "attachment; filename=#{build.artifacts_file.filename}" }
+        context 'when artifacts are stored locally' do
+          let(:download_headers) do
+            { 'Content-Transfer-Encoding' => 'binary',
+              'Content-Disposition' =>
+                "attachment; filename=#{build.artifacts_file.filename}" }
+          end
+
+          it { expect(response).to have_http_status(200) }
+          it { expect(response.headers).to include(download_headers) }
         end
 
-        it { expect(response).to have_http_status(200) }
-        it { expect(response.headers).to include(download_headers) }
+        context 'when artifacts are stored remotely' do
+          let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+
+          it 'returns location redirect' do
+            expect(response).to have_http_status(302)
+          end
+        end
       end
 
       context 'with regular branch' do
diff --git a/spec/requests/ci/api/builds_spec.rb b/spec/requests/ci/api/builds_spec.rb
index c969d08d0ddd..76a9cf11ad67 100644
--- a/spec/requests/ci/api/builds_spec.rb
+++ b/spec/requests/ci/api/builds_spec.rb
@@ -470,6 +470,7 @@ def force_patch_the_trace
       let(:headers_with_token) { headers.merge(Ci::API::Helpers::BUILD_TOKEN_HEADER => token) }
 
       before do
+        stub_artifacts_object_storage
         build.run!
       end
 
@@ -807,16 +808,26 @@ def upload_artifacts(file, headers = {}, accelerated = true)
         end
 
         context 'build has artifacts' do
-          let(:build) { create(:ci_build, :artifacts) }
-          let(:download_headers) do
-            { 'Content-Transfer-Encoding' => 'binary',
-              'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
-          end
-
           shared_examples 'having downloadable artifacts' do
-            it 'download artifacts' do
-              expect(response).to have_http_status(200)
-              expect(response.headers).to include download_headers
+            context 'when stored locally' do
+              let(:build) { create(:ci_build, :artifacts) }
+              let(:download_headers) do
+                { 'Content-Transfer-Encoding' => 'binary',
+                  'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+              end
+
+              it 'download artifacts' do
+                expect(response).to have_http_status(200)
+                expect(response.headers).to include download_headers
+              end
+            end
+
+            context 'when stored remotely' do
+              let(:build) { create(:ci_build, :artifacts, :remote_store) }
+
+              it 'redirect to artifacts file' do
+                expect(response).to have_http_status(302)
+              end
             end
           end
 
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index ef9927c59693..365ecad3a394 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -22,7 +22,8 @@
     %i[type lock_version target_url base_tags
        commit_id deployments erased_by_id last_deployment project_id
        runner_id tag_taggings taggings tags trigger_request_id
-       user_id auto_canceled_by_id retried].freeze
+       user_id auto_canceled_by_id retried
+       artifacts_file_store artifacts_metadata_store].freeze
 
   shared_examples 'build duplication' do
     let(:stage) do
diff --git a/spec/support/stub_artifacts.rb b/spec/support/stub_artifacts.rb
new file mode 100644
index 000000000000..d531be5b8e7f
--- /dev/null
+++ b/spec/support/stub_artifacts.rb
@@ -0,0 +1,26 @@
+module StubConfiguration
+  def stub_artifacts_object_storage(enabled: true)
+    Fog.mock!
+    allow(Gitlab.config.artifacts.object_store).to receive_messages(
+      enabled: enabled,
+      remote_directory: 'artifacts',
+      connection: {
+        provider: 'AWS',
+        aws_access_key_id: 'AWS_ACCESS_KEY_ID',
+        aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY',
+        region: 'eu-central-1'
+      }
+    )
+
+    allow_any_instance_of(ArtifactUploader).to receive(:verify_license!) { true }
+
+    return unless enabled
+
+    ::Fog::Storage.new(Gitlab.config.artifacts.object_store.connection).tap do |connection|
+      begin
+        connection.directories.create(key: 'artifacts')
+      rescue Excon::Error::Conflict
+      end
+    end
+  end
+end
diff --git a/spec/uploaders/artifact_uploader_spec.rb b/spec/uploaders/artifact_uploader_spec.rb
index 2a3bd0e3bb25..f4ba4a8207fe 100644
--- a/spec/uploaders/artifact_uploader_spec.rb
+++ b/spec/uploaders/artifact_uploader_spec.rb
@@ -1,9 +1,10 @@
 require 'rails_helper'
 
 describe ArtifactUploader do
-  let(:job) { create(:ci_build) }
+  let(:store) { described_class::LOCAL_STORE }
+  let(:job) { create(:ci_build, artifacts_file_store: store) }
   let(:uploader) { described_class.new(job, :artifacts_file) }
-  let(:path) { Gitlab.config.artifacts.path }
+  let(:local_path) { Gitlab.config.artifacts.path }
 
   describe '.local_artifacts_store' do
     subject { described_class.local_artifacts_store }
@@ -17,16 +18,30 @@
 
   describe '.artifacts_upload_path' do
     subject { described_class.artifacts_upload_path }
-
-    it { is_expected.to start_with(path) }
+    
+    it { is_expected.to start_with(local_path) }
     it { is_expected.to end_with('tmp/uploads/') }
   end
 
   describe '#store_dir' do
     subject { uploader.store_dir }
 
-    it { is_expected.to start_with(path) }
-    it { is_expected.to end_with("#{job.project_id}/#{job.id}") }
+    let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
+
+    context 'when using local storage' do
+      it { is_expected.to start_with(local_path) }
+      it { is_expected.to end_with(path) }
+    end
+
+    context 'when using remote storage' do
+      let(:store) { described_class::REMOTE_STORE }
+      
+      before do
+        stub_artifacts_object_storage
+      end
+
+      it { is_expected.to eq(path) }
+    end
   end
 
   describe '#cache_dir' do
diff --git a/spec/uploaders/object_store_uploader_spec.rb b/spec/uploaders/object_store_uploader_spec.rb
new file mode 100644
index 000000000000..c6c7d47e703e
--- /dev/null
+++ b/spec/uploaders/object_store_uploader_spec.rb
@@ -0,0 +1,231 @@
+require 'rails_helper'
+require 'carrierwave/storage/fog'
+
+describe ObjectStoreUploader do
+  let(:uploader_class) { Class.new(described_class) }
+  let(:object) { double }
+  let(:uploader) { uploader_class.new(object, :artifacts_file) }
+
+  describe '#object_store' do
+    it "calls artifacts_file_store on object" do
+      expect(object).to receive(:artifacts_file_store)
+
+      uploader.object_store
+    end
+  end
+
+  describe '#object_store=' do
+    it "calls artifacts_file_store= on object" do
+      expect(object).to receive(:artifacts_file_store=).with(described_class::REMOTE_STORE)
+
+      uploader.object_store = described_class::REMOTE_STORE
+    end
+  end
+
+  context 'when using ArtifactsUploader' do
+    let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
+    let(:uploader) { job.artifacts_file }
+
+    context 'checking described_class' do
+      let(:store) { described_class::LOCAL_STORE }
+
+      it "uploader is of a described_class" do
+        expect(uploader).to be_a(described_class)
+      end
+    end
+
+    describe '#use_file' do
+      context 'when file is stored locally' do
+        let(:store) { described_class::LOCAL_STORE }
+
+        it "calls a regular path" do
+          expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/)
+        end
+      end
+
+      context 'when file is stored remotely' do
+        let(:store) { described_class::REMOTE_STORE }
+
+        before do
+          stub_artifacts_object_storage
+        end
+
+        it "calls a cache path" do
+          expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/)
+        end
+      end
+    end
+
+    describe '#migrate!' do
+      let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
+      let(:uploader) { job.artifacts_file }
+      let(:store) { described_class::LOCAL_STORE }
+      
+      subject { uploader.migrate!(new_store) }
+
+      context 'when using the same storage' do
+        let(:new_store) { store }
+
+        it "to not migrate the storage" do
+          subject
+
+          expect(uploader.object_store).to eq(store)
+        end
+      end
+
+      context 'when migrating to local storage' do
+        let(:store) { described_class::REMOTE_STORE }
+        let(:new_store) { described_class::LOCAL_STORE }
+        
+        before do
+          stub_artifacts_object_storage
+        end
+
+        it "local file does not exist" do
+          expect(File.exist?(uploader.path)).to eq(false)
+        end
+
+        it "does migrate the file" do
+          subject
+
+          expect(uploader.object_store).to eq(new_store)
+          expect(File.exist?(uploader.path)).to eq(true)
+        end
+      end
+
+      context 'when migrating to remote storage' do
+        let(:new_store) { described_class::REMOTE_STORE }
+        let!(:current_path) { uploader.path }
+
+        it "file does exist" do
+          expect(File.exist?(current_path)).to eq(true)
+        end
+        
+        context 'when storage is disabled' do
+          before do
+            stub_artifacts_object_storage(enabled: false) 
+          end
+
+          it "to raise an error" do
+            expect { subject }.to raise_error(/Object Storage is not enabled/)
+          end
+        end
+
+        context 'when credentials are set' do
+          before do
+            stub_artifacts_object_storage
+          end
+
+          it "does migrate the file" do
+            subject
+
+            expect(uploader.object_store).to eq(new_store)
+            expect(File.exist?(current_path)).to eq(false)
+          end
+
+          it "does delete original file" do
+            subject
+    
+            expect(File.exist?(current_path)).to eq(false)
+          end
+
+          context 'when subject save fails' do
+            before do
+              expect(job).to receive(:save!).and_raise(RuntimeError, "exception")
+            end
+
+            it "does catch an error" do
+              expect { subject }.to raise_error(/exception/)
+            end
+
+            it "original file is not removed" do
+              begin
+                subject
+              rescue
+              end
+
+              expect(File.exist?(current_path)).to eq(true)
+            end
+          end
+        end
+      end
+    end
+  end
+
+  describe '#fog_directory' do
+    let(:remote_directory) { 'directory' }
+
+    before do
+      uploader_class.storage_options double(
+        object_store: double(remote_directory: remote_directory))
+    end
+
+    subject { uploader.fog_directory }
+
+    it { is_expected.to eq(remote_directory) }
+  end
+
+  describe '#fog_credentials' do
+    let(:connection) { 'connection' }
+
+    before do
+      uploader_class.storage_options double(
+        object_store: double(connection: connection))
+    end
+
+    subject { uploader.fog_credentials }
+
+    it { is_expected.to eq(connection) }
+  end
+
+  describe '#fog_public' do
+    subject { uploader.fog_public }
+
+    it { is_expected.to eq(false) }
+  end
+
+  describe '#verify_license!' do
+    subject { uploader.verify_license!(nil) }
+
+    context 'when using local storage' do
+      before do
+        expect(object).to receive(:artifacts_file_store) { described_class::LOCAL_STORE }
+      end
+
+      it "does not raise an error" do
+        expect { subject }.not_to raise_error
+      end
+    end
+
+    context 'when using remote storage' do
+      let(:project) { double }
+
+      before do
+        uploader_class.storage_options double(
+          object_store: double(enabled: true))
+        expect(object).to receive(:artifacts_file_store) { described_class::REMOTE_STORE }
+        expect(object).to receive(:project) { project }
+      end
+
+      context 'feature is not available' do
+        before do
+          expect(project).to receive(:feature_available?).with(:object_storage) { false }
+        end
+
+        it "does raise an error" do
+          expect { subject }.to raise_error(/Object Storage feature is missing/)
+        end
+      end
+
+      context 'feature is available' do
+        before do
+          expect(project).to receive(:feature_available?).with(:object_storage) { true }
+        end
+
+        it "does not raise an error" do
+          expect { subject }.not_to raise_error
+        end
+      end
+    end
+  end
+end
-- 
GitLab


From bc76062774f01208403685965f4d780da4e03ebb Mon Sep 17 00:00:00 2001
From: Douwe Maan <douwe@gitlab.com>
Date: Thu, 7 Sep 2017 21:27:04 +0000
Subject: [PATCH 02/14] Merge branch 'jej/lfs-object-storage' into 'master'

Can migrate LFS objects to S3 style object storage

Closes #2841

See merge request !2760
---
 app/controllers/concerns/send_file_upload.rb  | 14 +++
 .../projects/artifacts_controller.rb          |  7 +-
 .../projects/lfs_storage_controller.rb        |  3 +-
 app/controllers/projects/raw_controller.rb    |  3 +-
 app/models/ci/build.rb                        |  1 +
 app/models/lfs_object.rb                      |  2 +
 app/uploaders/artifact_uploader.rb            | 24 +-----
 app/uploaders/lfs_object_uploader.rb          | 21 +++--
 app/uploaders/object_store_uploader.rb        | 60 ++++++++++++-
 app/workers/object_storage_upload_worker.rb   | 19 +++++
 .../unreleased-ee/jej-lfs-object-storage.yml  |  5 ++
 config/gitlab.yml.example                     | 38 ++++++++-
 config/initializers/1_settings.rb             | 12 ++-
 config/sidekiq_queues.yml                     |  1 +
 ...825015534_add_file_store_to_lfs_objects.rb | 35 ++++++++
 db/schema.rb                                  |  1 +
 lib/backup/artifacts.rb                       |  2 +-
 lib/tasks/gitlab/artifacts.rake               | 10 ++-
 lib/tasks/gitlab/lfs.rake                     | 22 +++++
 .../projects/artifacts_controller_spec.rb     | 53 +++++++++---
 .../projects/raw_controller_spec.rb           | 56 +++++++-----
 spec/requests/api/jobs_spec.rb                |  6 +-
 spec/requests/lfs_http_spec.rb                | 68 ++++++++++++++-
 spec/support/stub_artifacts.rb                | 26 ------
 spec/support/stub_object_storage.rb           | 32 +++++++
 spec/tasks/gitlab/lfs_rake_spec.rb            | 37 ++++++++
 spec/uploaders/artifact_uploader_spec.rb      |  4 +-
 spec/uploaders/lfs_object_uploader_spec.rb    | 71 +++++++++++++++-
 spec/uploaders/object_store_uploader_spec.rb  |  7 +-
 .../object_storage_upload_worker_spec.rb      | 85 +++++++++++++++++++
 30 files changed, 610 insertions(+), 115 deletions(-)
 create mode 100644 app/controllers/concerns/send_file_upload.rb
 create mode 100644 app/workers/object_storage_upload_worker.rb
 create mode 100644 changelogs/unreleased-ee/jej-lfs-object-storage.yml
 create mode 100644 db/migrate/20170825015534_add_file_store_to_lfs_objects.rb
 create mode 100644 lib/tasks/gitlab/lfs.rake
 delete mode 100644 spec/support/stub_artifacts.rb
 create mode 100644 spec/support/stub_object_storage.rb
 create mode 100644 spec/tasks/gitlab/lfs_rake_spec.rb
 create mode 100644 spec/workers/object_storage_upload_worker_spec.rb

diff --git a/app/controllers/concerns/send_file_upload.rb b/app/controllers/concerns/send_file_upload.rb
new file mode 100644
index 000000000000..d4de4cf1fda6
--- /dev/null
+++ b/app/controllers/concerns/send_file_upload.rb
@@ -0,0 +1,14 @@
+module SendFileUpload
+  def send_upload(file_upload, send_params: {}, redirect_params: {}, attachment: nil)
+    if attachment
+      redirect_params[:query] = { "response-content-disposition" => "attachment;filename=#{attachment.inspect}" }
+      send_params.merge!(filename: attachment, disposition: 'attachment')
+    end
+
+    if file_upload.file_storage?
+      send_file file_upload.path, send_params
+    else
+      redirect_to file_upload.url(**redirect_params)
+    end
+  end
+end
diff --git a/app/controllers/projects/artifacts_controller.rb b/app/controllers/projects/artifacts_controller.rb
index eb010923466d..a93b72a646b1 100644
--- a/app/controllers/projects/artifacts_controller.rb
+++ b/app/controllers/projects/artifacts_controller.rb
@@ -1,6 +1,7 @@
 class Projects::ArtifactsController < Projects::ApplicationController
   include ExtractsPath
   include RendersBlob
+  include SendFileUpload
 
   layout 'project'
   before_action :authorize_read_build!
@@ -10,11 +11,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
   before_action :entry, only: [:file]
 
   def download
-    if artifacts_file.file_storage?
-      send_file artifacts_file.path, disposition: 'attachment'
-    else
-      redirect_to artifacts_file.url
-    end
+    send_upload(artifacts_file, attachment: artifacts_file.filename)
   end
 
   def browse
diff --git a/app/controllers/projects/lfs_storage_controller.rb b/app/controllers/projects/lfs_storage_controller.rb
index 32759672b6c0..134892b5d7b4 100644
--- a/app/controllers/projects/lfs_storage_controller.rb
+++ b/app/controllers/projects/lfs_storage_controller.rb
@@ -1,6 +1,7 @@
 class Projects::LfsStorageController < Projects::GitHttpClientController
   include LfsRequest
   include WorkhorseRequest
+  include SendFileUpload
 
   skip_before_action :verify_workhorse_api!, only: [:download, :upload_finalize]
 
@@ -11,7 +12,7 @@ def download
       return
     end
 
-    send_file lfs_object.file.path, content_type: "application/octet-stream"
+    send_upload(lfs_object.file, send_params: { content_type: "application/octet-stream" })
   end
 
   def upload_authorize
diff --git a/app/controllers/projects/raw_controller.rb b/app/controllers/projects/raw_controller.rb
index a02cc477e081..9bc774b76366 100644
--- a/app/controllers/projects/raw_controller.rb
+++ b/app/controllers/projects/raw_controller.rb
@@ -2,6 +2,7 @@
 class Projects::RawController < Projects::ApplicationController
   include ExtractsPath
   include BlobHelper
+  include SendFileUpload
 
   before_action :require_non_empty_project
   before_action :assign_ref_vars
@@ -31,7 +32,7 @@ def send_lfs_object
     lfs_object = find_lfs_object
 
     if lfs_object && lfs_object.project_allowed_access?(@project)
-      send_file lfs_object.file.path, filename: @blob.name, disposition: 'attachment'
+      send_upload(lfs_object.file, attachment: @blob.name)
     else
       render_404
     end
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index c41355f5afff..1c8364b00da7 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -33,6 +33,7 @@ def persisted_environment
     scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) }
     scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
     scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
+    scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, ArtifactUploader::LOCAL_STORE]) }
     scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
     scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
     scope :ref_protected, -> { where(protected: true) }
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index b7cf96abe832..0056b0f80f46 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -4,6 +4,8 @@ class LfsObject < ActiveRecord::Base
 
   validates :oid, presence: true, uniqueness: true
 
+  scope :with_files_stored_locally, ->() { where(file_store: [nil, LfsObjectUploader::LOCAL_STORE]) }
+
   mount_uploader :file, LfsObjectUploader
 
   def storage_project(project)
diff --git a/app/uploaders/artifact_uploader.rb b/app/uploaders/artifact_uploader.rb
index 0bd2bd4f422c..f6e32aae2fdd 100644
--- a/app/uploaders/artifact_uploader.rb
+++ b/app/uploaders/artifact_uploader.rb
@@ -1,36 +1,16 @@
 class ArtifactUploader < ObjectStoreUploader
   storage_options Gitlab.config.artifacts
 
-  def self.local_artifacts_store
+  def self.local_store_path
     Gitlab.config.artifacts.path
   end
 
   def self.artifacts_upload_path
-    File.join(self.local_artifacts_store, 'tmp/uploads/')
-  end
-
-  def store_dir
-    if file_storage?
-      default_local_path
-    else
-      default_path
-    end
-  end
-
-  def cache_dir
-    File.join(self.class.local_artifacts_store, 'tmp/cache')
-  end
-
-  def work_dir
-    File.join(self.class.local_artifacts_store, 'tmp/work')
+    File.join(self.local_store_path, 'tmp/uploads/')
   end
 
   private
 
-  def default_local_path
-    File.join(self.class.local_artifacts_store, default_path)
-  end
-
   def default_path
     File.join(subject.created_at.utc.strftime('%Y_%m'), subject.project_id.to_s, subject.id.to_s)
   end
diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb
index d11ebf0f9ca4..8a5f599c1d34 100644
--- a/app/uploaders/lfs_object_uploader.rb
+++ b/app/uploaders/lfs_object_uploader.rb
@@ -1,19 +1,18 @@
-class LfsObjectUploader < GitlabUploader
-  storage :file
+class LfsObjectUploader < ObjectStoreUploader
+  storage_options Gitlab.config.lfs
+  after :store, :schedule_migration_to_object_storage
 
-  def store_dir
-    "#{Gitlab.config.lfs.storage_path}/#{model.oid[0, 2]}/#{model.oid[2, 2]}"
-  end
-
-  def cache_dir
-    "#{Gitlab.config.lfs.storage_path}/tmp/cache"
+  def self.local_store_path
+    Gitlab.config.lfs.storage_path
   end
 
   def filename
-    model.oid[4..-1]
+    subject.oid[4..-1]
   end
 
-  def work_dir
-    File.join(Gitlab.config.lfs.storage_path, 'tmp', 'work')
+  private
+
+  def default_path
+    "#{subject.oid[0, 2]}/#{subject.oid[2, 2]}"
   end
 end
diff --git a/app/uploaders/object_store_uploader.rb b/app/uploaders/object_store_uploader.rb
index 32d4d31b37c0..3a742d4f715e 100644
--- a/app/uploaders/object_store_uploader.rb
+++ b/app/uploaders/object_store_uploader.rb
@@ -20,6 +20,22 @@ def object_store_options
     def object_store_enabled?
       object_store_options&.enabled
     end
+
+    def background_upload_enabled?
+      object_store_options&.background_upload
+    end
+
+    def object_store_credentials
+      @object_store_credentials ||= object_store_options&.connection&.to_hash&.deep_symbolize_keys
+    end
+
+    def object_store_directory
+      object_store_options&.remote_directory
+    end
+
+    def local_store_path
+      raise NotImplementedError
+    end
   end
 
   attr_reader :subject, :field
@@ -38,6 +54,14 @@ def object_store=(value)
     subject.public_send(:"#{field}_store=", value)
   end
 
+  def store_dir
+    if file_storage?
+      default_local_path
+    else
+      default_path
+    end
+  end
+
   def use_file
     if file_storage?
       return yield path
@@ -85,6 +109,12 @@ def migrate!(new_store)
     end
   end
 
+  def schedule_migration_to_object_storage(new_file)
+    if self.class.object_store_enabled? && licensed? && file_storage?
+      ObjectStorageUploadWorker.perform_async(self.class.name, subject.class.name, field, subject.id)
+    end
+  end
+
   def fog_directory
     self.class.object_store_options.remote_directory
   end
@@ -109,7 +139,27 @@ def move_to_cache
   def verify_license!(new_file)
     return if file_storage?
 
-    raise 'Object Storage feature is missing' unless subject.project.feature_available?(:object_storage)
+    raise 'Object Storage feature is missing' unless licensed?
+  end
+
+  def exists?
+    file.try(:exists?)
+  end
+
+  def cache_dir
+    File.join(self.class.local_store_path, 'tmp/cache')
+  end
+
+  # Override this if you don't want to save local files by default to the Rails.root directory
+  def work_dir
+    # Default path set by CarrierWave:
+    # https://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L182
+    # CarrierWave.tmp_path
+    File.join(self.class.local_store_path, 'tmp/work')
+  end
+
+  def licensed?
+    License.feature_available?(:object_storage)
   end
 
   private
@@ -118,6 +168,14 @@ def set_default_local_store(new_file)
     self.object_store = LOCAL_STORE unless self.object_store
   end
 
+  def default_local_path
+    File.join(self.class.local_store_path, default_path)
+  end
+
+  def default_path
+    raise NotImplementedError
+  end
+
   def storage
     @storage ||=
       if object_store == REMOTE_STORE
diff --git a/app/workers/object_storage_upload_worker.rb b/app/workers/object_storage_upload_worker.rb
new file mode 100644
index 000000000000..0a374c4323f1
--- /dev/null
+++ b/app/workers/object_storage_upload_worker.rb
@@ -0,0 +1,19 @@
+class ObjectStorageUploadWorker
+  include Sidekiq::Worker
+  include DedicatedSidekiqQueue
+
+  def perform(uploader_class_name, subject_class_name, file_field, subject_id)
+    uploader_class = uploader_class_name.constantize
+    subject_class = subject_class_name.constantize
+
+    return unless uploader_class.object_store_enabled?
+    return unless uploader_class.background_upload_enabled?
+
+    subject = subject_class.find(subject_id)
+    file = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
+
+    return unless file.licensed?
+
+    file.migrate!(uploader_class::REMOTE_STORE)
+  end
+end
diff --git a/changelogs/unreleased-ee/jej-lfs-object-storage.yml b/changelogs/unreleased-ee/jej-lfs-object-storage.yml
new file mode 100644
index 000000000000..c7a6388afb1e
--- /dev/null
+++ b/changelogs/unreleased-ee/jej-lfs-object-storage.yml
@@ -0,0 +1,5 @@
+---
+title: LFS files can be stored in remote object storage such as S3
+merge_request: 2760
+author:
+type: added
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index 793ac0fdba9a..ec0d990aa0a1 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -147,7 +147,8 @@ production: &base
     # path: shared/artifacts
     # object_store:
     #   enabled: false
-    #   remote_directory: artifacts
+    #   remote_directory: artifacts # The bucket name
+    #   background_upload: false # Temporary option to limit automatic upload (Default: true)
     #   connection:
     #     provider: AWS # Only AWS supported at the moment
     #     aws_access_key_id: AWS_ACCESS_KEY_ID
@@ -159,6 +160,19 @@ production: &base
     enabled: true
     # The location where LFS objects are stored (default: shared/lfs-objects).
     # storage_path: shared/lfs-objects
+    object_store:
+      enabled: false
+      remote_directory: lfs-objects # Bucket name
+      # background_upload: false # Temporary option to limit automatic upload (Default: true)
+      connection:
+        provider: AWS
+        aws_access_key_id: AWS_ACCESS_KEY_ID
+        aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+        region: eu-central-1
+        # Use the following options to configure an AWS compatible host
+        # host: 'localhost' # default: s3.amazonaws.com
+        # endpoint: 'http://127.0.0.1:9000' # default: nil
+        # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
 
   ## GitLab Pages
   pages:
@@ -655,6 +669,28 @@ test:
     enabled: true
   lfs:
     enabled: false
+    # The location where LFS objects are stored (default: shared/lfs-objects).
+    # storage_path: shared/lfs-objects
+    object_store:
+      enabled: false
+      remote_directory: lfs-objects # The bucket name
+      connection:
+        provider: AWS # Only AWS supported at the moment
+        aws_access_key_id: AWS_ACCESS_KEY_ID
+        aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+        region: eu-central-1
+  artifacts:
+    enabled: true
+    # The location where build artifacts are stored (default: shared/artifacts).
+    # path: shared/artifacts
+    object_store:
+      enabled: false
+      remote_directory: artifacts # The bucket name
+      connection:
+        provider: AWS # Only AWS supported at the moment
+        aws_access_key_id: AWS_ACCESS_KEY_ID
+        aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+        region: eu-central-1
   gitlab:
     host: localhost
     port: 80
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 5729206774eb..e9893c0d4d69 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -302,8 +302,9 @@ def cron_random_weekly_time
 Settings.artifacts['object_store'] ||= Settingslogic.new({})
 Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
 Settings.artifacts['object_store']['remote_directory'] ||= nil
-# Convert upload connection settings to use symbol keys, to make Fog happy
-Settings.artifacts['object_store']['connection']&.deep_symbolize_keys!
+Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
 
 #
 # Registry
@@ -339,6 +340,13 @@ def cron_random_weekly_time
 Settings.lfs['enabled']      = true if Settings.lfs['enabled'].nil?
 Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
 
+Settings.lfs['object_store'] ||= Settingslogic.new({})
+Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
+Settings.lfs['object_store']['remote_directory'] ||= nil
+Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.lfs['object_store']['connection']&.deep_stringify_keys!
+
 #
 # Mattermost
 #
diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml
index 24c001362c61..883ffdcba4b7 100644
--- a/config/sidekiq_queues.yml
+++ b/config/sidekiq_queues.yml
@@ -63,3 +63,4 @@
   - [update_user_activity, 1]
   - [propagate_service_template, 1]
   - [background_migration, 1]
+  - [object_storage_upload, 1]
diff --git a/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb b/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb
new file mode 100644
index 000000000000..4d459ccab2c2
--- /dev/null
+++ b/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb
@@ -0,0 +1,35 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddFileStoreToLfsObjects < ActiveRecord::Migration
+  include Gitlab::Database::MigrationHelpers
+
+  # Set this constant to true if this migration requires downtime.
+  DOWNTIME = false
+
+  # When a migration requires downtime you **must** uncomment the following
+  # constant and define a short and easy to understand explanation as to why the
+  # migration requires downtime.
+  # DOWNTIME_REASON = ''
+
+  # When using the methods "add_concurrent_index", "remove_concurrent_index" or
+  # "add_column_with_default" you must disable the use of transactions
+  # as these methods can not run in an existing transaction.
+  # When using "add_concurrent_index" or "remove_concurrent_index" methods make sure
+  # that either of them is the _only_ method called in the migration,
+  # any other changes should go in a separate migration.
+  # This ensures that upon failure _only_ the index creation or removing fails
+  # and can be retried or reverted easily.
+  #
+  # To disable transactions uncomment the following line and remove these
+  # comments:
+  disable_ddl_transaction!
+
+  def up
+    add_column(:lfs_objects, :file_store, :integer)
+  end
+
+  def down
+    remove_column(:lfs_objects, :file_store)
+  end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 74634d14ccbb..9f293205a247 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -741,6 +741,7 @@
     t.datetime "created_at"
     t.datetime "updated_at"
     t.string "file"
+    t.integer "file_store"
   end
 
   add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree
diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb
index 1f4bda6f588f..d9436e1d5e5a 100644
--- a/lib/backup/artifacts.rb
+++ b/lib/backup/artifacts.rb
@@ -3,7 +3,7 @@
 module Backup
   class Artifacts < Files
     def initialize
-      super('artifacts', ArtifactUploader.local_artifacts_store)
+      super('artifacts', ArtifactUploader.local_store_path)
     end
 
     def create_files_dir
diff --git a/lib/tasks/gitlab/artifacts.rake b/lib/tasks/gitlab/artifacts.rake
index 5676456b2a0d..e079177eb3f8 100644
--- a/lib/tasks/gitlab/artifacts.rake
+++ b/lib/tasks/gitlab/artifacts.rake
@@ -2,10 +2,12 @@ desc "GitLab | Migrate files for artifacts to comply with new storage format"
 namespace :gitlab do
   namespace :artifacts do
     task migrate: :environment do
-      puts 'Artifacts'.color(:yellow)
-      Ci::Build.joins(:project).with_artifacts
-        .where(artifacts_file_store: ArtifactUploader::LOCAL_STORE)
-        .find_each(batch_size: 100) do |issue|
+      logger = Logger.new(STDOUT)
+      logger.info('Starting transfer of artifacts')
+
+      Ci::Build.joins(:project)
+        .with_artifacts_stored_locally
+        .find_each(batch_size: 10) do |build|
         begin
           build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
           build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
diff --git a/lib/tasks/gitlab/lfs.rake b/lib/tasks/gitlab/lfs.rake
new file mode 100644
index 000000000000..c17c05f8589a
--- /dev/null
+++ b/lib/tasks/gitlab/lfs.rake
@@ -0,0 +1,22 @@
+require 'logger'
+
+desc "GitLab | Migrate LFS objects to remote storage"
+namespace :gitlab do
+  namespace :lfs do
+    task migrate: :environment do
+      logger = Logger.new(STDOUT)
+      logger.info('Starting transfer of LFS files to object storage')
+
+      LfsObject.with_files_stored_locally
+        .find_each(batch_size: 10) do |lfs_object|
+          begin
+            lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+
+            logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
+          rescue => e
+            logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
+          end
+        end
+    end
+  end
+end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index caa63e7bd22d..2bd8f8e2bfcc 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -22,7 +22,7 @@
 
   describe 'GET download' do
     it 'sends the artifacts file' do
-      expect(controller).to receive(:send_file).with(job.artifacts_file.path, disposition: 'attachment').and_call_original
+      expect(controller).to receive(:send_file).with(job.artifacts_file.path, hash_including(disposition: 'attachment')).and_call_original
 
       get :download, namespace_id: project.namespace, project_id: project, job_id: job
     end
@@ -66,19 +66,52 @@
 
   describe 'GET raw' do
     context 'when the file exists' do
-      it 'serves the file using workhorse' do
-        get :raw, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt'
+      let(:path) { 'ci_artifacts.txt' }
+      let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline, artifacts_file_store: store, artifacts_metadata_store: store) }
 
-        send_data = response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]
+      shared_examples 'a valid file' do
+        it 'serves the file using workhorse' do
+          subject
 
-        expect(send_data).to start_with('artifacts-entry:')
+          expect(send_data).to start_with('artifacts-entry:')
 
-        base64_params = send_data.sub(/\Aartifacts\-entry:/, '')
-        params = JSON.parse(Base64.urlsafe_decode64(base64_params))
+          expect(params.keys).to eq(%w(Archive Entry))
+          expect(params['Archive']).to start_with(archive_path)
+          # On object storage, the URL can end with a query string
+          expect(params['Archive']).to match(/build_artifacts.zip(\?[^?]+)?$/)
+          expect(params['Entry']).to eq(Base64.encode64('ci_artifacts.txt'))
+        end
+
+        def send_data
+          response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]
+        end
 
-        expect(params.keys).to eq(%w(Archive Entry))
-        expect(params['Archive']).to end_with('build_artifacts.zip')
-        expect(params['Entry']).to eq(Base64.encode64('ci_artifacts.txt'))
+        def params
+          @params ||= begin
+            base64_params = send_data.sub(/\Aartifacts\-entry:/, '')
+            JSON.parse(Base64.urlsafe_decode64(base64_params))
+          end
+        end
+      end
+
+      context 'when using local file storage' do
+        it_behaves_like 'a valid file' do
+          let(:store) { ObjectStoreUploader::LOCAL_STORE }
+          let(:archive_path) { ArtifactUploader.local_store_path }
+        end
+      end
+
+      context 'when using remote file storage' do
+        before do
+          stub_artifacts_object_storage
+        end
+
+        it_behaves_like 'a valid file' do
+          let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
+          let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
+          let(:store) { ObjectStorage::Store::REMOTE }
+          let(:archive_path) { 'https://' }
+        end
       end
     end
   end
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index b4eaab29fed5..7e3d6574e8d6 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -8,10 +8,7 @@
       let(:id) { 'master/README.md' }
 
       it 'delivers ASCII file' do
-        get(:show,
-            namespace_id: public_project.namespace.to_param,
-            project_id: public_project,
-            id: id)
+        get_show(public_project, id)
 
         expect(response).to have_http_status(200)
         expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
@@ -25,10 +22,7 @@
       let(:id) { 'master/files/images/6049019_460s.jpg' }
 
       it 'sets image content type header' do
-        get(:show,
-            namespace_id: public_project.namespace.to_param,
-            project_id: public_project,
-            id: id)
+        get_show(public_project, id)
 
         expect(response).to have_http_status(200)
         expect(response.header['Content-Type']).to eq('image/jpeg')
@@ -54,21 +48,40 @@
 
           it 'serves the file' do
             expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
-            get(:show,
-                namespace_id: public_project.namespace.to_param,
-                project_id: public_project,
-                id: id)
+            get_show(public_project, id)
 
             expect(response).to have_http_status(200)
           end
+
+          context 'and lfs uses object storage' do
+            before do
+              lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+              lfs_object.save!
+              stub_lfs_object_storage
+              lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+            end
+
+            it 'responds with redirect to file' do
+              get_show(public_project, id)
+
+              expect(response).to have_gitlab_http_status(302)
+              expect(response.location).to include(lfs_object.reload.file.path)
+            end
+
+            it 'sets content disposition' do
+              get_show(public_project, id)
+
+              file_uri = URI.parse(response.location)
+              params = CGI.parse(file_uri.query)
+
+              expect(params["response-content-disposition"].first).to eq 'attachment;filename="lfs_object.iso"'
+            end
+          end
         end
 
         context 'when project does not have access' do
           it 'does not serve the file' do
-            get(:show,
-                namespace_id: public_project.namespace.to_param,
-                project_id: public_project,
-                id: id)
+            get_show(public_project, id)
 
             expect(response).to have_http_status(404)
           end
@@ -81,10 +94,7 @@
         end
 
         it 'delivers ASCII file' do
-          get(:show,
-              namespace_id: public_project.namespace.to_param,
-              project_id: public_project,
-              id: id)
+          get_show(public_project, id)
 
           expect(response).to have_http_status(200)
           expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
@@ -95,4 +105,10 @@
       end
     end
   end
+
+  def get_show(project, id)
+    get(:show, namespace_id: project.namespace.to_param,
+               project_id: project,
+               id: id)
+  end
 end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 8f213dbb597e..6be658a3adff 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -17,8 +17,12 @@
   let(:api_user) { user }
   let(:reporter) { create(:project_member, :reporter, project: project).user }
   let(:guest) { create(:project_member, :guest, project: project).user }
+  let(:cross_project_pipeline_enabled) { true }
+  let(:object_storage_enabled) { true }
 
   before do
+    stub_licensed_features(cross_project_pipelines: cross_project_pipeline_enabled,
+                           object_storage: object_storage_enabled)
     project.add_developer(user)
   end
 
@@ -319,7 +323,7 @@ def get_artifact_file(artifact_path)
     let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
 
     before do
-      stub_artifacts_object_storage
+      stub_artifacts_object_storage(licensed: :skip)
       job.success
     end
 
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 27d09b8202ef..00f45e5f7022 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -190,10 +190,12 @@
   describe 'when fetching lfs object' do
     let(:project) { create(:project) }
     let(:update_permissions) { }
+    let(:before_get) { }
 
     before do
       enable_lfs
       update_permissions
+      before_get
       get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
     end
 
@@ -238,6 +240,21 @@
             end
 
             it_behaves_like 'responds with a file'
+
+            context 'when LFS uses object storage' do
+              let(:before_get) do
+                stub_lfs_object_storage
+                lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+              end
+
+              it 'responds with redirect' do
+                expect(response).to have_gitlab_http_status(302)
+              end
+
+              it 'responds with the file location' do
+                expect(response.location).to include(lfs_object.reload.file.path)
+              end
+            end
           end
         end
 
@@ -944,6 +961,46 @@
             end
           end
 
+          context 'and workhorse requests upload finalize for a new lfs object' do
+            before do
+              allow_any_instance_of(LfsObjectUploader).to receive(:exists?) { false }
+            end
+
+            context 'with object storage disabled' do
+              it "doesn't attempt to migrate file to object storage" do
+                expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+                put_finalize(with_tempfile: true)
+              end
+            end
+
+            context 'with object storage enabled' do
+              before do
+                stub_lfs_object_storage
+              end
+
+              it 'schedules migration of file to object storage' do
+                expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
+
+                put_finalize(with_tempfile: true)
+              end
+            end
+          end
+
+          context 'and project has limit enabled but will stay under the limit' do
+            before do
+              allow_any_instance_of(EE::Project).to receive_messages(
+                actual_size_limit: 200,
+                size_limit_enabled?: true)
+
+              put_finalize
+            end
+
+            it 'responds with status 200' do
+              expect(response).to have_gitlab_http_status(200)
+            end
+          end
+
           context 'invalid tempfiles' do
             it 'rejects slashes in the tempfile name (path traversal' do
               put_finalize('foo/bar')
@@ -1143,7 +1200,9 @@ def put_authorize(verified: true)
       put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
     end
 
-    def put_finalize(lfs_tmp = lfs_tmp_file)
+    def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false)
+      setup_tempfile(lfs_tmp) if with_tempfile
+
       put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil,
           headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact
     end
@@ -1151,6 +1210,13 @@ def put_finalize(lfs_tmp = lfs_tmp_file)
     def lfs_tmp_file
       "#{sample_oid}012345678"
     end
+
+    def setup_tempfile(lfs_tmp)
+      upload_path = "#{Gitlab.config.lfs.storage_path}/tmp/upload"
+
+      FileUtils.mkdir_p(upload_path)
+      FileUtils.touch(File.join(upload_path, lfs_tmp))
+    end
   end
 
   def enable_lfs
diff --git a/spec/support/stub_artifacts.rb b/spec/support/stub_artifacts.rb
deleted file mode 100644
index d531be5b8e7f..000000000000
--- a/spec/support/stub_artifacts.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-module StubConfiguration
-  def stub_artifacts_object_storage(enabled: true)
-    Fog.mock!
-    allow(Gitlab.config.artifacts.object_store).to receive_messages(
-      enabled: enabled,
-      remote_directory: 'artifacts',
-      connection: {
-        provider: 'AWS',
-        aws_access_key_id: 'AWS_ACCESS_KEY_ID',
-        aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY',
-        region: 'eu-central-1'
-      }
-    )
-
-    allow_any_instance_of(ArtifactUploader).to receive(:verify_license!) { true }
-
-    return unless enabled
-
-    ::Fog::Storage.new(Gitlab.config.artifacts.object_store.connection).tap do |connection|
-      begin
-        connection.directories.create(key: 'artifacts')
-      rescue Excon::Error::Conflict
-      end
-    end
-  end
-end
diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb
new file mode 100644
index 000000000000..df7e05585d2e
--- /dev/null
+++ b/spec/support/stub_object_storage.rb
@@ -0,0 +1,32 @@
+module StubConfiguration
+  def stub_object_storage_uploader(config:, uploader:, remote_directory:, enabled: true, licensed: true)
+    Fog.mock!
+
+    allow(config).to receive(:enabled) { enabled }
+
+    stub_licensed_features(object_storage: licensed) unless licensed == :skip
+
+    return unless enabled
+
+    ::Fog::Storage.new(uploader.object_store_credentials).tap do |connection|
+      begin
+        connection.directories.create(key: remote_directory)
+      rescue Excon::Error::Conflict
+      end
+    end
+  end
+
+  def stub_artifacts_object_storage(**params)
+    stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
+                                 uploader: ArtifactUploader,
+                                 remote_directory: 'artifacts',
+                                 **params)
+  end
+
+  def stub_lfs_object_storage(**params)
+    stub_object_storage_uploader(config: Gitlab.config.lfs.object_store,
+                                 uploader: LfsObjectUploader,
+                                 remote_directory: 'lfs-objects',
+                                 **params)
+  end
+end
diff --git a/spec/tasks/gitlab/lfs_rake_spec.rb b/spec/tasks/gitlab/lfs_rake_spec.rb
new file mode 100644
index 000000000000..faed24f2010c
--- /dev/null
+++ b/spec/tasks/gitlab/lfs_rake_spec.rb
@@ -0,0 +1,37 @@
+require 'rake_helper'
+
+describe 'gitlab:lfs namespace rake task' do
+  before :all do
+    Rake.application.rake_require 'tasks/gitlab/lfs'
+  end
+
+  describe 'migrate' do
+    let(:local) { ObjectStoreUploader::LOCAL_STORE }
+    let(:remote) { ObjectStoreUploader::REMOTE_STORE }
+    let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
+
+    def lfs_migrate
+      run_rake_task('gitlab:lfs:migrate')
+    end
+
+    context 'object storage disabled' do
+      before do
+        stub_lfs_object_storage(enabled: false)
+      end
+
+      it "doesn't migrate files" do
+        expect { lfs_migrate }.not_to change { lfs_object.reload.file_store }
+      end
+    end
+
+    context 'object storage enabled' do
+      before do
+        stub_lfs_object_storage
+      end
+
+      it 'migrates local file to object storage' do
+        expect { lfs_migrate }.to change { lfs_object.reload.file_store }.from(local).to(remote)
+      end
+    end
+  end
+end
diff --git a/spec/uploaders/artifact_uploader_spec.rb b/spec/uploaders/artifact_uploader_spec.rb
index f4ba4a8207fe..88f394b2938b 100644
--- a/spec/uploaders/artifact_uploader_spec.rb
+++ b/spec/uploaders/artifact_uploader_spec.rb
@@ -6,8 +6,8 @@
   let(:uploader) { described_class.new(job, :artifacts_file) }
   let(:local_path) { Gitlab.config.artifacts.path }
 
-  describe '.local_artifacts_store' do
-    subject { described_class.local_artifacts_store }
+  describe '.local_store_path' do
+    subject { described_class.local_store_path }
 
     it "delegate to artifacts path" do
       expect(Gitlab.config.artifacts).to receive(:path)
diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb
index 7088bc23334c..1e09958d369a 100644
--- a/spec/uploaders/lfs_object_uploader_spec.rb
+++ b/spec/uploaders/lfs_object_uploader_spec.rb
@@ -2,7 +2,7 @@
 
 describe LfsObjectUploader do
   let(:lfs_object) { create(:lfs_object, :with_file) }
-  let(:uploader) { described_class.new(lfs_object) }
+  let(:uploader) { described_class.new(lfs_object, :file) }
   let(:path) { Gitlab.config.lfs.storage_path }
 
   describe '#move_to_cache' do
@@ -37,4 +37,73 @@
     it { is_expected.to start_with(path) }
     it { is_expected.to end_with('/tmp/work') }
   end
+
+  describe 'migration to object storage' do
+    context 'with object storage disabled' do
+      it "is skipped" do
+        expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+        lfs_object
+      end
+    end
+
+    context 'with object storage enabled' do
+      before do
+        stub_lfs_object_storage
+      end
+
+      it 'is scheduled to run after creation' do
+        expect(ObjectStorageUploadWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
+
+        lfs_object
+      end
+    end
+
+    context 'with object storage unlicenced' do
+      before do
+        stub_lfs_object_storage(licensed: false)
+      end
+
+      it 'is skipped' do
+        expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+        lfs_object
+      end
+    end
+  end
+
+  describe 'remote file' do
+    let(:remote) { described_class::REMOTE_STORE }
+    let(:lfs_object) { create(:lfs_object, file_store: remote) }
+
+    context 'with object storage enabled' do
+      before do
+        stub_lfs_object_storage
+      end
+
+      it 'can store file remotely' do
+        allow(ObjectStorageUploadWorker).to receive(:perform_async)
+
+        store_file(lfs_object)
+
+        expect(lfs_object.file_store).to eq remote
+        expect(lfs_object.file.path).not_to be_blank
+      end
+    end
+
+    context 'with object storage unlicenced' do
+      before do
+        stub_lfs_object_storage(licensed: false)
+      end
+
+      it 'can not store file remotely' do
+        expect { store_file(lfs_object) }.to raise_error('Object Storage feature is missing')
+      end
+    end
+  end
+
+  def store_file(lfs_object)
+    lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+    lfs_object.save!
+  end
 end
diff --git a/spec/uploaders/object_store_uploader_spec.rb b/spec/uploaders/object_store_uploader_spec.rb
index c6c7d47e703e..c55545029803 100644
--- a/spec/uploaders/object_store_uploader_spec.rb
+++ b/spec/uploaders/object_store_uploader_spec.rb
@@ -198,18 +198,15 @@
     end
 
     context 'when using remote storage' do
-      let(:project) { double }
-
       before do
         uploader_class.storage_options double(
           object_store: double(enabled: true))
         expect(object).to receive(:artifacts_file_store) { described_class::REMOTE_STORE }
-        expect(object).to receive(:project) { project }
       end
 
       context 'feature is not available' do
         before do
-          expect(project).to receive(:feature_available?).with(:object_storage) { false }
+          expect(License).to receive(:feature_available?).with(:object_storage) { false }
         end
 
         it "does raise an error" do
@@ -219,7 +216,7 @@
 
       context 'feature is available' do
         before do
-          expect(project).to receive(:feature_available?).with(:object_storage) { true }
+          expect(License).to receive(:feature_available?).with(:object_storage) { true }
         end
 
         it "does not raise an error" do
diff --git a/spec/workers/object_storage_upload_worker_spec.rb b/spec/workers/object_storage_upload_worker_spec.rb
new file mode 100644
index 000000000000..8a8f7a065a07
--- /dev/null
+++ b/spec/workers/object_storage_upload_worker_spec.rb
@@ -0,0 +1,85 @@
+require 'spec_helper'
+
+describe ObjectStorageUploadWorker do
+  let(:local) { ObjectStoreUploader::LOCAL_STORE }
+  let(:remote) { ObjectStoreUploader::REMOTE_STORE }
+
+  def perform
+    described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
+  end
+
+  context 'for LFS' do
+    let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
+    let(:uploader_class) { LfsObjectUploader }
+    let(:subject_class) { LfsObject }
+    let(:file_field) { :file }
+    let(:subject_id) { lfs_object.id }
+
+    context 'when object storage is enabled' do
+      before do
+        stub_lfs_object_storage
+      end
+
+      it 'uploads object to storage' do
+        expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
+      end
+
+      context 'when background upload is disabled' do
+        before do
+          allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false }
+        end
+
+        it 'is skipped' do
+          expect { perform }.not_to change { lfs_object.reload.file_store }
+        end
+      end
+    end
+
+    context 'when object storage is disabled' do
+      before do
+        stub_lfs_object_storage(enabled: false)
+      end
+
+      it "doesn't migrate files" do
+        perform
+
+        expect(lfs_object.reload.file_store).to eq(local)
+      end
+    end
+  end
+
+  context 'for artifacts' do
+    let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
+    let(:uploader_class) { ArtifactUploader }
+    let(:subject_class) { Ci::Build }
+    let(:file_field) { :artifacts_file }
+    let(:subject_id) { job.id }
+
+    context 'when local storage is used' do
+      let(:store) { local }
+
+      context 'and remote storage is defined' do
+        before do
+          stub_artifacts_object_storage
+          job
+        end
+
+        it "migrates file to remote storage" do
+          perform
+
+          expect(job.reload.artifacts_file_store).to eq(remote)
+        end
+
+        context 'for artifacts_metadata' do
+          let(:file_field) { :artifacts_metadata }
+
+          it 'migrates metadata to remote storage' do
+            perform
+
+            expect(job.reload.artifacts_metadata_store).to eq(remote)
+          end
+        end
+      end
+    end
+  end
+end
-- 
GitLab


From d7448f161fd2730b1238285d5f1c8fc72d31b099 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= <ayufan@ayufan.eu>
Date: Fri, 25 Aug 2017 12:15:27 +0000
Subject: [PATCH 03/14] Merge branch 'zj-improve-object-store-rake-task' into
 'master'

Use proper logging for artifacts rake task

See merge request !2762
---
 .../unreleased-ee/zj-improve-object-store-rake-task.yml  | 5 +++++
 lib/tasks/gitlab/artifacts.rake                          | 9 ++++++---
 2 files changed, 11 insertions(+), 3 deletions(-)
 create mode 100644 changelogs/unreleased-ee/zj-improve-object-store-rake-task.yml

diff --git a/changelogs/unreleased-ee/zj-improve-object-store-rake-task.yml b/changelogs/unreleased-ee/zj-improve-object-store-rake-task.yml
new file mode 100644
index 000000000000..70ffaa45bfd6
--- /dev/null
+++ b/changelogs/unreleased-ee/zj-improve-object-store-rake-task.yml
@@ -0,0 +1,5 @@
+---
+title: Use a logger for the artifacts migration rake task
+merge_request:
+author:
+type: changed
diff --git a/lib/tasks/gitlab/artifacts.rake b/lib/tasks/gitlab/artifacts.rake
index e079177eb3f8..53514490d59c 100644
--- a/lib/tasks/gitlab/artifacts.rake
+++ b/lib/tasks/gitlab/artifacts.rake
@@ -1,3 +1,5 @@
+require 'logger'
+
 desc "GitLab | Migrate files for artifacts to comply with new storage format"
 namespace :gitlab do
   namespace :artifacts do
@@ -11,9 +13,10 @@ namespace :gitlab do
         begin
           build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
           build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
-          print '.'
-        rescue
-          print 'F'
+
+          logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
+        rescue => e
+          logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
         end
       end
     end
-- 
GitLab


From 2b7b60728426c10ef1188a1073d3630805773a35 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= <ayufan@ayufan.eu>
Date: Mon, 4 Sep 2017 09:52:26 +0000
Subject: [PATCH 04/14] Merge branch 'zj-fix-resolver-issue' into 'master'

Switch out resolver to mitigate SocketError

See merge request !2809
---
 lib/tasks/gitlab/artifacts.rake | 1 +
 1 file changed, 1 insertion(+)

diff --git a/lib/tasks/gitlab/artifacts.rake b/lib/tasks/gitlab/artifacts.rake
index 53514490d59c..29d8a145be8f 100644
--- a/lib/tasks/gitlab/artifacts.rake
+++ b/lib/tasks/gitlab/artifacts.rake
@@ -1,4 +1,5 @@
 require 'logger'
+require 'resolv-replace'
 
 desc "GitLab | Migrate files for artifacts to comply with new storage format"
 namespace :gitlab do
-- 
GitLab


From ec72abf53fd82ca3e7f126536a83b27b368696ec Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= <ayufan@ayufan.eu>
Date: Tue, 12 Sep 2017 09:39:21 +0000
Subject: [PATCH 05/14] Merge branch
 'jej/object-storage-uploader-migrate-with-license-callback' into 'master'

ObjectStoreUploader#migrate! uses with_callbacks to trigger verify_license!

Closes #3370

See merge request !2863
---
 app/uploaders/object_store_uploader.rb       | 24 +++++++++++---------
 spec/uploaders/object_store_uploader_spec.rb | 10 ++++++++
 2 files changed, 23 insertions(+), 11 deletions(-)

diff --git a/app/uploaders/object_store_uploader.rb b/app/uploaders/object_store_uploader.rb
index 3a742d4f715e..9b9f47d5943a 100644
--- a/app/uploaders/object_store_uploader.rb
+++ b/app/uploaders/object_store_uploader.rb
@@ -94,18 +94,20 @@ def migrate!(new_store)
     # change storage
     self.object_store = new_store
 
-    storage.store!(file).tap do |new_file|
-      # since we change storage store the new storage
-      # in case of failure delete new file
-      begin
-        subject.save!
-      rescue => e
-        new_file.delete
-        self.object_store = old_store
-        raise e
+    with_callbacks(:store, file) do
+      storage.store!(file).tap do |new_file|
+        # since we change storage store the new storage
+        # in case of failure delete new file
+        begin
+          subject.save!
+        rescue => e
+          new_file.delete
+          self.object_store = old_store
+          raise e
+        end
+
+        old_file.delete
       end
-
-      old_file.delete
     end
   end
 
diff --git a/spec/uploaders/object_store_uploader_spec.rb b/spec/uploaders/object_store_uploader_spec.rb
index c55545029803..dd08a40eb97e 100644
--- a/spec/uploaders/object_store_uploader_spec.rb
+++ b/spec/uploaders/object_store_uploader_spec.rb
@@ -111,6 +111,16 @@
           end
         end
 
+        context 'when storage is unlicensed' do
+          before do
+            stub_artifacts_object_storage(licensed: false)
+          end
+
+          it "raises an error" do
+            expect { subject }.to raise_error(/Object Storage feature is missing/)
+          end
+        end
+
         context 'when credentials are set' do
           before do
             stub_artifacts_object_storage
-- 
GitLab


From 6ca02a41500790b3e9061dd8836540955b9aaf7c Mon Sep 17 00:00:00 2001
From: Grzegorz Bizon <grzegorz@gitlab.com>
Date: Tue, 5 Dec 2017 14:31:33 +0000
Subject: [PATCH 06/14] Merge branch 'zj-multiple-artifacts-ee' into 'master'

Multiple artifacts ee

See merge request gitlab-org/gitlab-ee!3276
---
 .../projects/artifacts_controller.rb          |   5 +-
 app/models/ci/build.rb                        |  14 +--
 app/uploaders/job_artifact_uploader.rb        |  20 +--
 app/uploaders/legacy_artifact_uploader.rb     |  20 +--
 app/uploaders/lfs_object_uploader.rb          |   4 +-
 app/uploaders/object_store_uploader.rb        |  27 ++--
 config/gitlab.yml.example                     |   1 +
 ...0918072949_add_file_store_job_artifacts.rb |  14 +++
 db/schema.rb                                  |   1 +
 lib/tasks/gitlab/artifacts.rake               |   4 +-
 .../projects/artifacts_controller_spec.rb     |  11 +-
 spec/factories/ci/job_artifacts.rb            |   4 +
 spec/models/ci/build_spec.rb                  |  16 ++-
 spec/requests/api/jobs_spec.rb                |  61 +++++++--
 spec/requests/api/runner_spec.rb              |  20 ++-
 spec/requests/api/v3/builds_spec.rb           |  16 ++-
 spec/serializers/pipeline_serializer_spec.rb  |   3 +-
 spec/support/stub_object_storage.rb           |   2 +-
 spec/tasks/gitlab/artifacts_rake_spec.rb      | 118 ++++++++++++++++++
 spec/uploaders/job_artifact_uploader_spec.rb  |  14 ++-
 .../legacy_artifact_uploader_spec.rb          |   3 +-
 spec/uploaders/object_store_uploader_spec.rb  |  99 +++++++++++++--
 .../object_storage_upload_worker_spec.rb      |  37 ++++--
 23 files changed, 403 insertions(+), 111 deletions(-)
 create mode 100644 db/migrate/20170918072949_add_file_store_job_artifacts.rb
 create mode 100644 spec/tasks/gitlab/artifacts_rake_spec.rb

diff --git a/app/controllers/projects/artifacts_controller.rb b/app/controllers/projects/artifacts_controller.rb
index 3995a2fc37ac..abc283d7aa98 100644
--- a/app/controllers/projects/artifacts_controller.rb
+++ b/app/controllers/projects/artifacts_controller.rb
@@ -42,8 +42,7 @@ def file
   end
 
   def raw
-    path = Gitlab::Ci::Build::Artifacts::Path
-      .new(params[:path])
+    path = Gitlab::Ci::Build::Artifacts::Path.new(params[:path])
 
     send_artifacts_entry(build, path)
   end
@@ -72,7 +71,7 @@ def extract_ref_name_and_path
   end
 
   def validate_artifacts!
-    render_404 unless build && build.artifacts?
+    render_404 unless build&.artifacts?
   end
 
   def build
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index ddd075e1fcbe..7cf8bdd968b8 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -45,7 +45,7 @@ def persisted_environment
     end
     scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
     scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
-    scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, ArtifactUploader::LOCAL_STORE]) }
+    scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::LOCAL_STORE]) }
     scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
     scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
     scope :ref_protected, -> { where(protected: true) }
@@ -361,22 +361,10 @@ def execute_hooks
       project.running_or_pending_build_count(force: true)
     end
 
-    def artifacts?
-      !artifacts_expired? && artifacts_file.exists?
-    end
-
     def browsable_artifacts?
       artifacts_metadata?
     end
 
-    def downloadable_single_artifacts_file?
-      artifacts_metadata? && artifacts_file.file_storage?
-    end
-
-    def artifacts_metadata?
-      artifacts? && artifacts_metadata.exists?
-    end
-
     def artifacts_metadata_entry(path, **options)
       artifacts_metadata.use_file do |metadata_path|
         metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb
index 15dfb5a5763a..a0757dbe6b22 100644
--- a/app/uploaders/job_artifact_uploader.rb
+++ b/app/uploaders/job_artifact_uploader.rb
@@ -1,5 +1,5 @@
-class JobArtifactUploader < GitlabUploader
-  storage :file
+class JobArtifactUploader < ObjectStoreUploader
+  storage_options Gitlab.config.artifacts
 
   def self.local_store_path
     Gitlab.config.artifacts.path
@@ -15,24 +15,8 @@ def size
     model.size
   end
 
-  def store_dir
-    default_local_path
-  end
-
-  def cache_dir
-    File.join(self.class.local_store_path, 'tmp/cache')
-  end
-
-  def work_dir
-    File.join(self.class.local_store_path, 'tmp/work')
-  end
-
   private
 
-  def default_local_path
-    File.join(self.class.local_store_path, default_path)
-  end
-
   def default_path
     creation_date = model.created_at.utc.strftime('%Y_%m_%d')
 
diff --git a/app/uploaders/legacy_artifact_uploader.rb b/app/uploaders/legacy_artifact_uploader.rb
index 4f7f8a63108b..476a46c17548 100644
--- a/app/uploaders/legacy_artifact_uploader.rb
+++ b/app/uploaders/legacy_artifact_uploader.rb
@@ -1,5 +1,5 @@
-class LegacyArtifactUploader < GitlabUploader
-  storage :file
+class LegacyArtifactUploader < ObjectStoreUploader
+  storage_options Gitlab.config.artifacts
 
   def self.local_store_path
     Gitlab.config.artifacts.path
@@ -9,24 +9,8 @@ def self.artifacts_upload_path
     File.join(self.local_store_path, 'tmp/uploads/')
   end
 
-  def store_dir
-    default_local_path
-  end
-
-  def cache_dir
-    File.join(self.class.local_store_path, 'tmp/cache')
-  end
-
-  def work_dir
-    File.join(self.class.local_store_path, 'tmp/work')
-  end
-
   private
 
-  def default_local_path
-    File.join(self.class.local_store_path, default_path)
-  end
-
   def default_path
     File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
   end
diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb
index 8a5f599c1d34..88cf0450dcda 100644
--- a/app/uploaders/lfs_object_uploader.rb
+++ b/app/uploaders/lfs_object_uploader.rb
@@ -7,12 +7,12 @@ def self.local_store_path
   end
 
   def filename
-    subject.oid[4..-1]
+    model.oid[4..-1]
   end
 
   private
 
   def default_path
-    "#{subject.oid[0, 2]}/#{subject.oid[2, 2]}"
+    "#{model.oid[0, 2]}/#{model.oid[2, 2]}"
   end
 end
diff --git a/app/uploaders/object_store_uploader.rb b/app/uploaders/object_store_uploader.rb
index 9b9f47d5943a..b5de0357a5f0 100644
--- a/app/uploaders/object_store_uploader.rb
+++ b/app/uploaders/object_store_uploader.rb
@@ -38,11 +38,16 @@ def local_store_path
     end
   end
 
-  attr_reader :subject, :field
+  def file_storage?
+    storage.is_a?(CarrierWave::Storage::File)
+  end
+
+  def file_cache_storage?
+    cache_storage.is_a?(CarrierWave::Storage::File)
+  end
 
-  def initialize(subject, field)
-    @subject = subject
-    @field = field
+  def real_object_store
+    model.public_send(store_serialization_column) # rubocop:disable GitlabSecurity/PublicSend
   end
 
   def object_store
@@ -51,7 +56,7 @@ def object_store
 
   def object_store=(value)
     @storage = nil
-    subject.public_send(:"#{field}_store=", value)
+    model.public_send(:"#{store_serialization_column}=", value) # rubocop:disable GitlabSecurity/PublicSend
   end
 
   def store_dir
@@ -99,7 +104,7 @@ def migrate!(new_store)
         # since we change storage store the new storage
         # in case of failure delete new file
         begin
-          subject.save!
+          model.save!
         rescue => e
           new_file.delete
           self.object_store = old_store
@@ -113,7 +118,7 @@ def migrate!(new_store)
 
   def schedule_migration_to_object_storage(new_file)
     if self.class.object_store_enabled? && licensed? && file_storage?
-      ObjectStorageUploadWorker.perform_async(self.class.name, subject.class.name, field, subject.id)
+      ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
     end
   end
 
@@ -178,6 +183,14 @@ def default_path
     raise NotImplementedError
   end
 
+  def serialization_column
+    model.class.uploader_option(mounted_as, :mount_on) || mounted_as
+  end
+
+  def store_serialization_column
+    :"#{serialization_column}_store"
+  end
+
   def storage
     @storage ||=
       if object_store == REMOTE_STORE
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index 0ffacad400b7..e2256c5c1182 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -672,6 +672,7 @@ test:
         aws_secret_access_key: AWS_SECRET_ACCESS_KEY
         region: eu-central-1
   artifacts:
+    path: tmp/tests/artifacts
     enabled: true
     # The location where build artifacts are stored (default: shared/artifacts).
     # path: shared/artifacts
diff --git a/db/migrate/20170918072949_add_file_store_job_artifacts.rb b/db/migrate/20170918072949_add_file_store_job_artifacts.rb
new file mode 100644
index 000000000000..8c265bb6acad
--- /dev/null
+++ b/db/migrate/20170918072949_add_file_store_job_artifacts.rb
@@ -0,0 +1,14 @@
+class AddFileStoreJobArtifacts < ActiveRecord::Migration
+  include Gitlab::Database::MigrationHelpers
+
+  disable_ddl_transaction!
+  DOWNTIME = false
+
+  def up
+    add_column(:ci_job_artifacts, :file_store, :integer)
+  end
+
+  def down
+    remove_column(:ci_job_artifacts, :file_store)
+  end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 8846bbd975c0..8aa6a87657e2 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -326,6 +326,7 @@
     t.integer "project_id", null: false
     t.integer "job_id", null: false
     t.integer "file_type", null: false
+    t.integer "file_store"
     t.integer "size", limit: 8
     t.datetime_with_timezone "created_at", null: false
     t.datetime_with_timezone "updated_at", null: false
diff --git a/lib/tasks/gitlab/artifacts.rake b/lib/tasks/gitlab/artifacts.rake
index 29d8a145be8f..494317d99c77 100644
--- a/lib/tasks/gitlab/artifacts.rake
+++ b/lib/tasks/gitlab/artifacts.rake
@@ -12,8 +12,8 @@ namespace :gitlab do
         .with_artifacts_stored_locally
         .find_each(batch_size: 10) do |build|
         begin
-          build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
-          build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
+          build.artifacts_file.migrate!(ObjectStoreUploader::REMOTE_STORE)
+          build.artifacts_metadata.migrate!(ObjectStoreUploader::REMOTE_STORE)
 
           logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
         rescue => e
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index bc3d277fc8ed..581b3e4e4abf 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -1,7 +1,7 @@
 require 'spec_helper'
 
 describe Projects::ArtifactsController do
-  set(:user) { create(:user) }
+  let(:user) { project.owner }
   set(:project) { create(:project, :repository, :public) }
 
   let(:pipeline) do
@@ -15,8 +15,6 @@
   let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
 
   before do
-    project.add_developer(user)
-
     sign_in(user)
   end
 
@@ -115,12 +113,12 @@
   describe 'GET raw' do
     context 'when the file exists' do
       let(:path) { 'ci_artifacts.txt' }
-      let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline, artifacts_file_store: store, artifacts_metadata_store: store) }
 
       shared_examples 'a valid file' do
         it 'serves the file using workhorse' do
           subject
 
+          expect(response).to have_gitlab_http_status(200)
           expect(send_data).to start_with('artifacts-entry:')
 
           expect(params.keys).to eq(%w(Archive Entry))
@@ -144,8 +142,9 @@ def params
 
       context 'when using local file storage' do
         it_behaves_like 'a valid file' do
+          let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
           let(:store) { ObjectStoreUploader::LOCAL_STORE }
-          let(:archive_path) { ArtifactUploader.local_store_path }
+          let(:archive_path) { JobArtifactUploader.local_store_path }
         end
       end
 
@@ -157,7 +156,7 @@ def params
         it_behaves_like 'a valid file' do
           let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
           let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
-          let(:store) { ObjectStorage::Store::REMOTE }
+          let(:store) { ObjectStoreUploader::REMOTE_STORE }
           let(:archive_path) { 'https://' }
         end
       end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 538dc4228327..391852496956 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -5,6 +5,10 @@
     job factory: :ci_build
     file_type :archive
 
+    trait :remote_store do
+      file_store JobArtifactUploader::REMOTE_STORE
+    end
+
     after :build do |artifact|
       artifact.project ||= artifact.job.project
     end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 1a20c2dda00e..83352421d7db 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -158,6 +158,20 @@
       end
     end
 
+    context 'when legacy artifacts are used' do
+      let(:build) { create(:ci_build, :legacy_artifacts) }
+
+      subject { build.artifacts? }
+
+      context 'artifacts archive does not exist' do
+        let(:build) { create(:ci_build) }
+
+        context 'is not expired' do
+          it { is_expected.to be_truthy }
+        end
+      end
+    end
+
     context 'when legacy artifacts are used' do
       let(:build) { create(:ci_build, :legacy_artifacts) }
 
@@ -190,7 +204,7 @@
   
     context 'artifacts metadata does not exist' do
       before do
-        build.update_attributes(artifacts_metadata: nil)
+        build.update_attributes(legacy_artifacts_metadata: nil)
       end
 
       it { is_expected.to be_falsy }
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index a435945fea2b..9ebf5bf7e97b 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -288,14 +288,21 @@ def get_artifact_file(artifact_path)
       get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
     end
 
-    context 'job with artifacts' do
-      context 'when artifacts are stored locally' do
-        let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+    context 'normal authentication' do
+      before do
+        stub_artifacts_object_storage
+      end
 
-        context 'authorized user' do
-          let(:download_headers) do
-            { 'Content-Transfer-Encoding' => 'binary',
-              'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+      context 'job with artifacts' do
+        context 'when artifacts are stored locally' do
+          let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+
+          before do
+            get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+          end
+
+          context 'authorized user' do
+            it_behaves_like 'downloads artifact'
           end
 
           it 'returns specific job artifacts' do
@@ -305,13 +312,40 @@ def get_artifact_file(artifact_path)
           end
         end
 
-        context 'unauthorized user' do
-          let(:api_user) { nil }
+        context 'when artifacts are stored remotely' do
+          let(:job) { create(:ci_build, pipeline: pipeline) }
+          let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
+
+          before do
+            job.reload
+
+            get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+          end
 
           it 'does not return specific job artifacts' do
             expect(response).to have_http_status(401)
           end
         end
+
+        it 'does not return job artifacts if not uploaded' do
+          get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+
+          expect(response).to have_gitlab_http_status(404)
+        end
+      end
+    end
+
+    context 'authorized by job_token' do
+      let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
+
+      before do
+        get api("/projects/#{project.id}/jobs/#{job.id}/artifacts"), job_token: job.token
+      end
+
+      context 'user is developer' do
+        let(:api_user) { user }
+
+        it_behaves_like 'downloads artifact'
       end
 
       context 'when artifacts are stored remotely' do
@@ -402,7 +436,14 @@ def get_for_ref(ref = pipeline.ref, job_name = job.name)
         end
 
         context 'when artifacts are stored remotely' do
-          let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+          let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
+          let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
+
+          before do
+            job.reload
+
+            get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+          end
 
           it 'returns location redirect' do
             expect(response).to have_http_status(302)
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 3406b17401f6..5c6eee09285f 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -1151,12 +1151,15 @@ def upload_artifacts(file, headers = {}, accelerated = true)
       describe 'GET /api/v4/jobs/:id/artifacts' do
         let(:token) { job.token }
 
-        before do
-          download_artifact
-        end
-
         context 'when job has artifacts' do
-          let(:job) { create(:ci_build, :artifacts) }
+          let(:job) { create(:ci_build) }
+          let(:store) { JobArtifactUploader::LOCAL_STORE }
+
+          before do
+            create(:ci_job_artifact, :archive, file_store: store, job: job)
+
+            download_artifact
+          end
 
           context 'when using job token' do
             context 'when artifacts are stored locally' do
@@ -1172,7 +1175,8 @@ def upload_artifacts(file, headers = {}, accelerated = true)
             end
 
             context 'when artifacts are stored remotely' do
-              let(:job) { create(:ci_build, :artifacts, :remote_store) }
+              let(:store) { JobArtifactUploader::REMOTE_STORE }
+              let!(:job) { create(:ci_build) }
 
               it 'download artifacts' do
                 expect(response).to have_http_status(302)
@@ -1191,12 +1195,16 @@ def upload_artifacts(file, headers = {}, accelerated = true)
 
         context 'when job does not has artifacts' do
           it 'responds with not found' do
+            download_artifact
+
             expect(response).to have_gitlab_http_status(404)
           end
         end
 
         def download_artifact(params = {}, request_headers = headers)
           params = params.merge(token: token)
+          job.reload
+
           get api("/jobs/#{job.id}/artifacts"), params, request_headers
         end
       end
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index 266ae6542277..862bf7e540d0 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -215,10 +215,13 @@
       end
 
       context 'when artifacts are stored remotely' do
-        let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+        let(:build) { create(:ci_build, pipeline: pipeline) }
+        let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
 
         it 'returns location redirect' do
-          expect(response).to have_http_status(302)
+          get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
+
+          expect(response).to have_gitlab_http_status(302)
         end
       end
 
@@ -309,7 +312,14 @@ def path_for_ref(ref = pipeline.ref, job = build.name)
         end
 
         context 'when artifacts are stored remotely' do
-          let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+          let(:build) { create(:ci_build, pipeline: pipeline) }
+          let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
+
+          before do
+            build.reload
+
+            get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
+          end
 
           it 'returns location redirect' do
             expect(response).to have_http_status(302)
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index 88d347322a69..e40edbfb421b 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -117,7 +117,8 @@
       shared_examples 'no N+1 queries' do
         it 'verifies number of queries', :request_store do
           recorded = ActiveRecord::QueryRecorder.new { subject }
-          expect(recorded.count).to be_within(1).of(36)
+
+          expect(recorded.count).to be_within(1).of(40)
           expect(recorded.cached_count).to eq(0)
         end
       end
diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb
index df7e05585d2e..cf5746bc29f1 100644
--- a/spec/support/stub_object_storage.rb
+++ b/spec/support/stub_object_storage.rb
@@ -18,7 +18,7 @@ def stub_object_storage_uploader(config:, uploader:, remote_directory:, enabled:
 
   def stub_artifacts_object_storage(**params)
     stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
-                                 uploader: ArtifactUploader,
+                                 uploader: JobArtifactUploader,
                                  remote_directory: 'artifacts',
                                  **params)
   end
diff --git a/spec/tasks/gitlab/artifacts_rake_spec.rb b/spec/tasks/gitlab/artifacts_rake_spec.rb
new file mode 100644
index 000000000000..a30823b8875c
--- /dev/null
+++ b/spec/tasks/gitlab/artifacts_rake_spec.rb
@@ -0,0 +1,118 @@
+require 'rake_helper'
+
+describe 'gitlab:artifacts namespace rake task' do
+  before(:context) do
+    Rake.application.rake_require 'tasks/gitlab/artifacts'
+  end
+
+  let(:object_storage_enabled) { false }
+
+  before do
+    stub_artifacts_object_storage(enabled: object_storage_enabled)
+  end
+
+  subject { run_rake_task('gitlab:artifacts:migrate') }
+
+  context 'legacy artifacts' do
+    describe 'migrate' do
+      let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
+
+      context 'when local storage is used' do
+        let(:store) { ObjectStoreUploader::LOCAL_STORE }
+
+        context 'and job does not have file store defined' do
+          let(:object_storage_enabled) { true }
+          let(:store) { nil }
+
+          it "migrates file to remote storage" do
+            subject
+
+            expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+            expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+          end
+        end
+
+        context 'and remote storage is defined' do
+          let(:object_storage_enabled) { true }
+
+          it "migrates file to remote storage" do
+            subject
+
+            expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+            expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+          end
+        end
+
+        context 'and remote storage is not defined' do
+          it "fails to migrate to remote storage" do
+            subject
+
+            expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
+            expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE)
+          end
+        end
+      end
+
+      context 'when remote storage is used' do
+        let(:object_storage_enabled) { true }
+
+        let(:store) { ObjectStoreUploader::REMOTE_STORE }
+
+        it "file stays on remote storage" do
+          subject
+
+          expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+          expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+        end
+      end
+    end
+  end
+
+  context 'job artifacts' do
+    let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
+
+    context 'when local storage is used' do
+      let(:store) { ObjectStoreUploader::LOCAL_STORE }
+
+      context 'and job does not have file store defined' do
+        let(:object_storage_enabled) { true }
+        let(:store) { nil }
+
+        it "migrates file to remote storage" do
+          subject
+
+          expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+        end
+      end
+
+      context 'and remote storage is defined' do
+        let(:object_storage_enabled) { true }
+
+        it "migrates file to remote storage" do
+          subject
+
+          expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+        end
+      end
+
+      context 'and remote storage is not defined' do
+        it "fails to migrate to remote storage" do
+          subject
+
+          expect(artifact.reload.file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
+        end
+      end
+    end
+
+    context 'when remote storage is used' do
+      let(:object_storage_enabled) { true }
+      let(:store) { ObjectStoreUploader::REMOTE_STORE }
+
+      it "file stays on remote storage" do
+        subject
+
+        expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+      end
+    end
+  end
+end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index 14fd5f3600f6..decea35c86de 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -1,7 +1,8 @@
 require 'spec_helper'
 
 describe JobArtifactUploader do
-  let(:job_artifact) { create(:ci_job_artifact) }
+  let(:store) { described_class::LOCAL_STORE }
+  let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
   let(:uploader) { described_class.new(job_artifact, :file) }
   let(:local_path) { Gitlab.config.artifacts.path }
 
@@ -15,6 +16,17 @@
       it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
       it { is_expected.to end_with(path) }
     end
+
+    context 'when using remote storage' do
+      let(:store) { described_class::REMOTE_STORE }
+
+      before do
+        stub_artifacts_object_storage
+      end
+
+      it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
+      it { is_expected.to end_with(path) }
+    end
   end
 
   describe '#cache_dir' do
diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb
index efeffb78772d..7b316072f471 100644
--- a/spec/uploaders/legacy_artifact_uploader_spec.rb
+++ b/spec/uploaders/legacy_artifact_uploader_spec.rb
@@ -1,7 +1,8 @@
 require 'rails_helper'
 
 describe LegacyArtifactUploader do
-  let(:job) { create(:ci_build) }
+  let(:store) { described_class::LOCAL_STORE }
+  let(:job) { create(:ci_build, artifacts_file_store: store) }
   let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
   let(:local_path) { Gitlab.config.artifacts.path }
 
diff --git a/spec/uploaders/object_store_uploader_spec.rb b/spec/uploaders/object_store_uploader_spec.rb
index dd08a40eb97e..2f52867bb91e 100644
--- a/spec/uploaders/object_store_uploader_spec.rb
+++ b/spec/uploaders/object_store_uploader_spec.rb
@@ -4,27 +4,91 @@
 describe ObjectStoreUploader do
   let(:uploader_class) { Class.new(described_class) }
   let(:object) { double }
-  let(:uploader) { uploader_class.new(object, :artifacts_file) }
+  let(:uploader) { uploader_class.new(object, :file) }
+
+  before do
+    allow(object.class).to receive(:uploader_option).with(:file, :mount_on) { nil }
+  end
 
   describe '#object_store' do
     it "calls artifacts_file_store on object" do
-      expect(object).to receive(:artifacts_file_store)
+      expect(object).to receive(:file_store)
 
       uploader.object_store
     end
+
+    context 'when store is null' do
+      before do
+        expect(object).to receive(:file_store).twice.and_return(nil)
+      end
+
+      it "returns LOCAL_STORE" do
+        expect(uploader.real_object_store).to be_nil
+        expect(uploader.object_store).to eq(described_class::LOCAL_STORE)
+      end
+    end
+
+    context 'when value is set' do
+      before do
+        expect(object).to receive(:file_store).twice.and_return(described_class::REMOTE_STORE)
+      end
+
+      it "returns given value" do
+        expect(uploader.real_object_store).not_to be_nil
+        expect(uploader.object_store).to eq(described_class::REMOTE_STORE)
+      end
+    end
   end
 
   describe '#object_store=' do
     it "calls artifacts_file_store= on object" do
-      expect(object).to receive(:artifacts_file_store=).with(described_class::REMOTE_STORE)
+      expect(object).to receive(:file_store=).with(described_class::REMOTE_STORE)
 
       uploader.object_store = described_class::REMOTE_STORE
     end
   end
 
-  context 'when using ArtifactsUploader' do
-    let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
-    let(:uploader) { job.artifacts_file }
+  describe '#file_storage?' do
+    context 'when file storage is used' do
+      before do
+        expect(object).to receive(:file_store).and_return(described_class::LOCAL_STORE)
+      end
+
+      it { expect(uploader).to be_file_storage }
+    end
+
+    context 'when is remote storage' do
+      before do
+        uploader_class.storage_options double(
+          object_store: double(enabled: true))
+        expect(object).to receive(:file_store).and_return(described_class::REMOTE_STORE)
+      end
+
+      it { expect(uploader).not_to be_file_storage }
+    end
+  end
+
+  describe '#file_cache_storage?' do
+    context 'when file storage is used' do
+      before do
+        uploader_class.cache_storage(:file)
+      end
+
+      it { expect(uploader).to be_file_cache_storage }
+    end
+
+    context 'when is remote storage' do
+      before do
+        uploader_class.cache_storage(:fog)
+      end
+
+      it { expect(uploader).not_to be_file_cache_storage }
+    end
+  end
+
+  context 'when using JobArtifactsUploader' do
+    let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
+    let(:uploader) { artifact.file }
 
     context 'checking described_class' do
       let(:store) { described_class::LOCAL_STORE }
@@ -32,6 +96,19 @@
       it "uploader is of a described_class" do
         expect(uploader).to be_a(described_class)
       end
+
+      it 'moves files locally' do
+        expect(uploader.move_to_store).to be(true)
+        expect(uploader.move_to_cache).to be(true)
+      end
+    end
+
+    context 'when store is null' do
+      let(:store) { nil }
+
+      it "sets the store to LOCAL_STORE" do
+        expect(artifact.file_store).to eq(described_class::LOCAL_STORE)
+      end
     end
 
     describe '#use_file' do
@@ -57,8 +134,8 @@
     end
 
     describe '#migrate!' do
-      let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
-      let(:uploader) { job.artifacts_file }
+      let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
+      let(:uploader) { artifact.file }
       let(:store) { described_class::LOCAL_STORE }
       
       subject { uploader.migrate!(new_store) }
@@ -141,7 +218,7 @@
 
           context 'when subject save fails' do
             before do
-              expect(job).to receive(:save!).and_raise(RuntimeError, "exception")
+              expect(artifact).to receive(:save!).and_raise(RuntimeError, "exception")
             end
 
             it "does catch an error" do
@@ -199,7 +276,7 @@
 
     context 'when using local storage' do
       before do
-        expect(object).to receive(:artifacts_file_store) { described_class::LOCAL_STORE }
+        expect(object).to receive(:file_store) { described_class::LOCAL_STORE }
       end
 
       it "does not raise an error" do
@@ -211,7 +288,7 @@
       before do
         uploader_class.storage_options double(
           object_store: double(enabled: true))
-        expect(object).to receive(:artifacts_file_store) { described_class::REMOTE_STORE }
+        expect(object).to receive(:file_store) { described_class::REMOTE_STORE }
       end
 
       context 'feature is not available' do
diff --git a/spec/workers/object_storage_upload_worker_spec.rb b/spec/workers/object_storage_upload_worker_spec.rb
index 8a8f7a065a07..0922b5feccdd 100644
--- a/spec/workers/object_storage_upload_worker_spec.rb
+++ b/spec/workers/object_storage_upload_worker_spec.rb
@@ -48,12 +48,12 @@ def perform
     end
   end
 
-  context 'for artifacts' do
-    let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
-    let(:uploader_class) { ArtifactUploader }
+  context 'for legacy artifacts' do
+    let(:build) { create(:ci_build, :legacy_artifacts) }
+    let(:uploader_class) { LegacyArtifactUploader }
     let(:subject_class) { Ci::Build }
     let(:file_field) { :artifacts_file }
-    let(:subject_id) { job.id }
+    let(:subject_id) { build.id }
 
     context 'when local storage is used' do
       let(:store) { local }
@@ -61,13 +61,12 @@ def perform
       context 'and remote storage is defined' do
         before do
           stub_artifacts_object_storage
-          job
         end
 
         it "migrates file to remote storage" do
           perform
 
-          expect(job.reload.artifacts_file_store).to eq(remote)
+          expect(build.reload.artifacts_file_store).to eq(remote)
         end
 
         context 'for artifacts_metadata' do
@@ -76,10 +75,34 @@ def perform
           it 'migrates metadata to remote storage' do
             perform
 
-            expect(job.reload.artifacts_metadata_store).to eq(remote)
+            expect(build.reload.artifacts_metadata_store).to eq(remote)
           end
         end
       end
     end
   end
+
+  context 'for job artifacts' do
+    let(:artifact) { create(:ci_job_artifact, :archive) }
+    let(:uploader_class) { JobArtifactUploader }
+    let(:subject_class) { Ci::JobArtifact }
+    let(:file_field) { :file }
+    let(:subject_id) { artifact.id }
+
+    context 'when local storage is used' do
+      let(:store) { local }
+
+      context 'and remote storage is defined' do
+        before do
+          stub_artifacts_object_storage
+        end
+
+        it "migrates file to remote storage" do
+          perform
+
+          expect(artifact.reload.file_store).to eq(remote)
+        end
+      end
+    end
+  end
 end
-- 
GitLab


From 87f11d2cf539d9539b439b54355f0dadaf4ebf76 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= <ayufan@ayufan.eu>
Date: Fri, 8 Dec 2017 09:09:06 +0000
Subject: [PATCH 07/14] Merge branch 'zj-auto-upload-job-artifacts' into
 'master'

Transfer job archives after creation

See merge request gitlab-org/gitlab-ee!3646
---
 app/models/ci/job_artifact.rb                 |  7 ++
 app/models/lfs_object.rb                      |  9 ++
 app/uploaders/lfs_object_uploader.rb          |  1 -
 app/uploaders/object_store_uploader.rb        | 11 ++-
 app/workers/object_storage_upload_worker.rb   |  6 +-
 .../zj-auto-upload-job-artifacts.yml          |  5 +
 config/gitlab.yml.example                     |  1 +
 spec/ee/spec/models/ee/lfs_object_spec.rb     | 96 +++++++++++++++++++
 .../object_storage_upload_worker_spec.rb      |  6 +-
 spec/models/ci/job_artifact_spec.rb           | 58 +++++++++++
 spec/requests/lfs_http_spec.rb                |  2 +-
 spec/support/stub_object_storage.rb           |  3 +-
 spec/uploaders/lfs_object_uploader_spec.rb    |  2 +-
 13 files changed, 195 insertions(+), 12 deletions(-)
 create mode 100644 changelogs/unreleased-ee/zj-auto-upload-job-artifacts.yml
 create mode 100644 spec/ee/spec/models/ee/lfs_object_spec.rb
 rename spec/{ => ee}/workers/object_storage_upload_worker_spec.rb (93%)

diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 84fc6863567c..1aea897aacac 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -1,5 +1,6 @@
 module Ci
   class JobArtifact < ActiveRecord::Base
+    include AfterCommitQueue
     extend Gitlab::Ci::Model
 
     belongs_to :project
@@ -9,6 +10,12 @@ class JobArtifact < ActiveRecord::Base
 
     mount_uploader :file, JobArtifactUploader
 
+    after_save if: :file_changed?, on: [:create, :update] do
+      run_after_commit do
+        file.schedule_migration_to_object_storage
+      end
+    end
+
     enum file_type: {
       archive: 1,
       metadata: 2
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index 38b8c41024a8..6ad792aab308 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -1,4 +1,7 @@
 class LfsObject < ActiveRecord::Base
+  prepend EE::LfsObject
+  include AfterCommitQueue
+
   has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
   has_many :projects, through: :lfs_objects_projects
 
@@ -8,6 +11,12 @@ class LfsObject < ActiveRecord::Base
 
   mount_uploader :file, LfsObjectUploader
 
+  after_save if: :file_changed?, on: [:create, :update] do
+    run_after_commit do
+      file.schedule_migration_to_object_storage
+    end
+  end
+
   def project_allowed_access?(project)
     projects.exists?(project.lfs_storage_project.id)
   end
diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb
index 88cf0450dcda..fa42e4710b7d 100644
--- a/app/uploaders/lfs_object_uploader.rb
+++ b/app/uploaders/lfs_object_uploader.rb
@@ -1,6 +1,5 @@
 class LfsObjectUploader < ObjectStoreUploader
   storage_options Gitlab.config.lfs
-  after :store, :schedule_migration_to_object_storage
 
   def self.local_store_path
     Gitlab.config.lfs.storage_path
diff --git a/app/uploaders/object_store_uploader.rb b/app/uploaders/object_store_uploader.rb
index b5de0357a5f0..bb25dc4219fd 100644
--- a/app/uploaders/object_store_uploader.rb
+++ b/app/uploaders/object_store_uploader.rb
@@ -116,10 +116,13 @@ def migrate!(new_store)
     end
   end
 
-  def schedule_migration_to_object_storage(new_file)
-    if self.class.object_store_enabled? && licensed? && file_storage?
-      ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
-    end
+  def schedule_migration_to_object_storage(*args)
+    return unless self.class.object_store_enabled?
+    return unless self.class.background_upload_enabled?
+    return unless self.licensed?
+    return unless self.file_storage?
+
+    ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
   end
 
   def fog_directory
diff --git a/app/workers/object_storage_upload_worker.rb b/app/workers/object_storage_upload_worker.rb
index 0a374c4323f1..0b9411ff2df6 100644
--- a/app/workers/object_storage_upload_worker.rb
+++ b/app/workers/object_storage_upload_worker.rb
@@ -2,6 +2,8 @@ class ObjectStorageUploadWorker
   include Sidekiq::Worker
   include DedicatedSidekiqQueue
 
+  sidekiq_options retry: 5
+
   def perform(uploader_class_name, subject_class_name, file_field, subject_id)
     uploader_class = uploader_class_name.constantize
     subject_class = subject_class_name.constantize
@@ -9,7 +11,9 @@ def perform(uploader_class_name, subject_class_name, file_field, subject_id)
     return unless uploader_class.object_store_enabled?
     return unless uploader_class.background_upload_enabled?
 
-    subject = subject_class.find(subject_id)
+    subject = subject_class.find_by(id: subject_id)
+    return unless subject
+
     file = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
 
     return unless file.licensed?
diff --git a/changelogs/unreleased-ee/zj-auto-upload-job-artifacts.yml b/changelogs/unreleased-ee/zj-auto-upload-job-artifacts.yml
new file mode 100644
index 000000000000..4335f85e3609
--- /dev/null
+++ b/changelogs/unreleased-ee/zj-auto-upload-job-artifacts.yml
@@ -0,0 +1,5 @@
+---
+title: Transfer job archives to object storage after creation
+merge_request:
+author:
+type: added
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index e2256c5c1182..d8fa31381847 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -679,6 +679,7 @@ test:
     object_store:
       enabled: false
       remote_directory: artifacts # The bucket name
+      background_upload: false
       connection:
         provider: AWS # Only AWS supported at the moment
         aws_access_key_id: AWS_ACCESS_KEY_ID
diff --git a/spec/ee/spec/models/ee/lfs_object_spec.rb b/spec/ee/spec/models/ee/lfs_object_spec.rb
new file mode 100644
index 000000000000..b02327b4c734
--- /dev/null
+++ b/spec/ee/spec/models/ee/lfs_object_spec.rb
@@ -0,0 +1,96 @@
+require 'spec_helper'
+
+describe LfsObject do
+  describe '#local_store?' do
+    it 'returns true when file_store is nil' do
+      subject.file_store = nil
+
+      expect(subject.local_store?).to eq true
+    end
+
+    it 'returns true when file_store is equal to LfsObjectUploader::LOCAL_STORE' do
+      subject.file_store = LfsObjectUploader::LOCAL_STORE
+
+      expect(subject.local_store?).to eq true
+    end
+
+    it 'returns false whe file_store is equal to LfsObjectUploader::REMOTE_STORE' do
+      subject.file_store = LfsObjectUploader::REMOTE_STORE
+
+      expect(subject.local_store?).to eq false
+    end
+  end
+
+  describe '#destroy' do
+    subject { create(:lfs_object, :with_file) }
+
+    context 'when running in a Geo primary node' do
+      set(:primary) { create(:geo_node, :primary) }
+      set(:secondary) { create(:geo_node) }
+
+      it 'logs an event to the Geo event log' do
+        expect { subject.destroy }.to change(Geo::LfsObjectDeletedEvent, :count).by(1)
+      end
+    end
+  end
+
+  describe '#schedule_migration_to_object_storage' do
+    before do
+      stub_lfs_setting(enabled: true)
+    end
+
+    subject { create(:lfs_object, :with_file) }
+
+    context 'when object storage is disabled' do
+      before do
+        stub_lfs_object_storage(enabled: false)
+      end
+
+      it 'does not schedule the migration' do
+        expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+        subject
+      end
+    end
+
+    context 'when object storage is enabled' do
+      context 'when background upload is enabled' do
+        context 'when is licensed' do
+          before do
+            stub_lfs_object_storage(background_upload: true)
+          end
+
+          it 'schedules the model for migration' do
+            expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
+
+            subject
+          end
+        end
+
+        context 'when is unlicensed' do
+          before do
+            stub_lfs_object_storage(background_upload: true, licensed: false)
+          end
+
+          it 'does not schedule the migration' do
+            expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+            subject
+          end
+        end
+      end
+
+      context 'when background upload is disabled' do
+        before do
+          stub_lfs_object_storage(background_upload: false)
+        end
+
+        it 'schedules the model for migration' do
+          expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+          subject
+        end
+      end
+    end
+  end
+end
diff --git a/spec/workers/object_storage_upload_worker_spec.rb b/spec/ee/workers/object_storage_upload_worker_spec.rb
similarity index 93%
rename from spec/workers/object_storage_upload_worker_spec.rb
rename to spec/ee/workers/object_storage_upload_worker_spec.rb
index 0922b5feccdd..d421fdf95a95 100644
--- a/spec/workers/object_storage_upload_worker_spec.rb
+++ b/spec/ee/workers/object_storage_upload_worker_spec.rb
@@ -17,7 +17,7 @@ def perform
 
     context 'when object storage is enabled' do
       before do
-        stub_lfs_object_storage
+        stub_lfs_object_storage(background_upload: true)
       end
 
       it 'uploads object to storage' do
@@ -60,7 +60,7 @@ def perform
 
       context 'and remote storage is defined' do
         before do
-          stub_artifacts_object_storage
+          stub_artifacts_object_storage(background_upload: true)
         end
 
         it "migrates file to remote storage" do
@@ -94,7 +94,7 @@ def perform
 
       context 'and remote storage is defined' do
         before do
-          stub_artifacts_object_storage
+          stub_artifacts_object_storage(background_upload: true)
         end
 
         it "migrates file to remote storage" do
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 0e18a326c68d..a10afb98d2b2 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -12,6 +12,64 @@
   it { is_expected.to respond_to(:created_at) }
   it { is_expected.to respond_to(:updated_at) }
 
+  describe 'callbacks' do
+    subject { create(:ci_job_artifact, :archive) }
+
+    describe '#schedule_migration_to_object_storage' do
+      context 'when object storage is disabled' do
+        before do
+          stub_artifacts_object_storage(enabled: false)
+        end
+
+        it 'does not schedule the migration' do
+          expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+          subject
+        end
+      end
+
+      context 'when object storage is enabled' do
+        context 'when background upload is enabled' do
+          context 'when is licensed' do
+            before do
+              stub_artifacts_object_storage(background_upload: true)
+            end
+
+            it 'schedules the model for migration' do
+              expect(ObjectStorageUploadWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
+
+              subject
+            end
+          end
+
+          context 'when is unlicensed' do
+            before do
+              stub_artifacts_object_storage(background_upload: true, licensed: false)
+            end
+
+            it 'does not schedule the migration' do
+              expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+              subject
+            end
+          end
+        end
+
+        context 'when background upload is disabled' do
+          before do
+            stub_artifacts_object_storage(background_upload: false)
+          end
+
+          it 'schedules the model for migration' do
+            expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+            subject
+          end
+        end
+      end
+    end
+  end
+
   describe '#set_size' do
     it 'sets the size' do
       expect(artifact.size).to eq(106365)
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index b59485057016..d7bdfde918c3 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -1010,7 +1010,7 @@
 
             context 'with object storage enabled' do
               before do
-                stub_lfs_object_storage
+                stub_lfs_object_storage(background_upload: true)
               end
 
               it 'schedules migration of file to object storage' do
diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb
index cf5746bc29f1..4f469648d5ce 100644
--- a/spec/support/stub_object_storage.rb
+++ b/spec/support/stub_object_storage.rb
@@ -1,8 +1,9 @@
 module StubConfiguration
-  def stub_object_storage_uploader(config:, uploader:, remote_directory:, enabled: true, licensed: true)
+  def stub_object_storage_uploader(config:, uploader:, remote_directory:, enabled: true, licensed: true, background_upload: false)
     Fog.mock!
 
     allow(config).to receive(:enabled) { enabled }
+    allow(config).to receive(:background_upload) { background_upload }
 
     stub_licensed_features(object_storage: licensed) unless licensed == :skip
 
diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb
index 1e09958d369a..9b8e2835ebc0 100644
--- a/spec/uploaders/lfs_object_uploader_spec.rb
+++ b/spec/uploaders/lfs_object_uploader_spec.rb
@@ -49,7 +49,7 @@
 
     context 'with object storage enabled' do
       before do
-        stub_lfs_object_storage
+        stub_lfs_object_storage(background_upload: true)
       end
 
       it 'is scheduled to run after creation' do
-- 
GitLab


From a7dae52e9d27adde427ef8aa066c0761071a3cd9 Mon Sep 17 00:00:00 2001
From: Sean McGivern <sean@mcgivern.me.uk>
Date: Fri, 2 Feb 2018 13:59:43 +0000
Subject: [PATCH 08/14] Merge branch '4163-move-uploads-to-object-storage' into
 'master'

Move uploads to object storage

Closes #4163

See merge request gitlab-org/gitlab-ee!3867
---
 app/controllers/concerns/uploads_actions.rb   |  61 ++-
 app/controllers/groups/uploads_controller.rb  |  30 +-
 .../projects/lfs_storage_controller.rb        |   2 +-
 .../projects/uploads_controller.rb            |  21 +-
 app/controllers/uploads_controller.rb         |  75 ++--
 app/models/appearance.rb                      |   1 +
 app/models/ci/build.rb                        |   2 +-
 app/models/concerns/avatarable.rb             |  24 +
 app/models/group.rb                           |  20 +-
 app/models/lfs_object.rb                      |   2 +-
 app/models/note.rb                            |   1 +
 app/models/project.rb                         |  14 +-
 app/models/upload.rb                          |  50 ++-
 app/models/user.rb                            |  16 +-
 .../migrate_attachments_service.rb            |   4 +-
 app/uploaders/attachment_uploader.rb          |  10 +-
 app/uploaders/avatar_uploader.rb              |  19 +-
 app/uploaders/file_mover.rb                   |   6 +-
 app/uploaders/file_uploader.rb                | 122 ++++--
 app/uploaders/gitlab_uploader.rb              |  79 ++--
 app/uploaders/job_artifact_uploader.rb        |  19 +-
 app/uploaders/legacy_artifact_uploader.rb     |  15 +-
 app/uploaders/lfs_object_uploader.rb          |  20 +-
 app/uploaders/namespace_file_uploader.rb      |  25 +-
 app/uploaders/object_store_uploader.rb        | 215 ---------
 app/uploaders/personal_file_uploader.rb       |  43 +-
 app/uploaders/records_uploads.rb              |  80 ++--
 app/uploaders/uploader_helper.rb              |   9 +-
 app/uploaders/workhorse.rb                    |   7 +
 app/workers/object_storage_upload_worker.rb   |  16 +-
 app/workers/upload_checksum_worker.rb         |   2 +-
 .../4163-move-uploads-to-object-storage.yml   |   5 +
 config/gitlab.yml.example                     |  29 ++
 config/initializers/1_settings.rb             |  32 +-
 ...71214144320_add_store_column_to_uploads.rb |  12 +
 ...119135717_add_uploader_index_to_uploads.rb |  20 +
 db/schema.rb                                  |   3 +-
 doc/development/file_storage.md               | 104 ++++-
 ee/app/models/ee/ci/job_artifact.rb           |  25 ++
 ee/app/models/ee/lfs_object.rb                |  23 +
 ee/app/models/geo/fdw/ci/job_artifact.rb      |  11 +
 ee/app/models/geo/fdw/lfs_object.rb           |   9 +
 ee/app/services/geo/files_expire_service.rb   |  77 ++++
 ...d_storage_attachments_migration_service.rb |  55 +++
 .../geo/job_artifact_deleted_event_store.rb   |  48 ++
 .../geo/lfs_object_deleted_event_store.rb     |  49 +++
 ee/app/uploaders/object_storage.rb            | 265 +++++++++++
 ee/lib/gitlab/geo/file_transfer.rb            |  24 +
 ee/lib/gitlab/geo/log_cursor/daemon.rb        | 266 +++++++++++
 lib/api/runner.rb                             |   6 +-
 lib/backup/artifacts.rb                       |   2 +-
 .../populate_untracked_uploads.rb             |   2 +-
 .../prepare_untracked_uploads.rb              |   9 +-
 lib/gitlab/gfm/uploads_rewriter.rb            |   2 +-
 lib/gitlab/import_export/uploads_saver.rb     |   8 +-
 lib/gitlab/uploads_transfer.rb                |   2 +-
 lib/gitlab/workhorse.rb                       |   4 +-
 lib/tasks/gitlab/artifacts.rake               |   4 +-
 lib/tasks/gitlab/lfs.rake                     |   2 +-
 .../groups/uploads_controller_spec.rb         |   4 +-
 .../projects/artifacts_controller_spec.rb     |   6 +-
 .../projects/raw_controller_spec.rb           |   4 +-
 spec/controllers/uploads_controller_spec.rb   |  13 +
 .../geo/attachment_registry_finder_spec.rb    | 270 ++++++++++++
 .../spec/lib/gitlab/geo/file_transfer_spec.rb |  22 +
 .../lib/gitlab/geo/log_cursor/daemon_spec.rb  | 414 ++++++++++++++++++
 spec/ee/spec/models/ee/lfs_object_spec.rb     |   8 +-
 .../migrate_attachments_service_spec.rb       |  50 +++
 .../geo/file_download_service_spec.rb         | 227 ++++++++++
 .../services/geo/files_expire_service_spec.rb |  51 +++
 ...rage_attachments_migration_service_spec.rb |  83 ++++
 .../geo/file_download_dispatch_worker_spec.rb | 291 ++++++++++++
 .../object_storage_upload_worker_spec.rb      |   4 +-
 spec/factories/ci/job_artifacts.rb            |   2 +-
 spec/factories/geo/event_log.rb               | 121 +++++
 spec/factories/groups.rb                      |   2 +-
 spec/factories/notes.rb                       |   4 +-
 spec/factories/projects.rb                    |   2 +-
 spec/factories/uploads.rb                     |  27 +-
 spec/factories/users.rb                       |   2 +-
 .../prepare_untracked_uploads_spec.rb         |  57 +--
 spec/lib/gitlab/gfm/uploads_rewriter_spec.rb  |   2 +-
 .../import_export/uploads_restorer_spec.rb    |   9 +-
 .../import_export/uploads_saver_spec.rb       |   4 +-
 .../remove_empty_fork_networks_spec.rb        |   4 +
 spec/models/namespace_spec.rb                 |   2 +-
 spec/models/upload_spec.rb                    |  73 +--
 spec/requests/api/runner_spec.rb              |   8 +-
 spec/requests/lfs_http_spec.rb                |   8 +-
 spec/services/issues/move_service_spec.rb     |   2 +-
 .../migrate_attachments_service_spec.rb       |   4 +-
 .../uploads_actions_shared_examples.rb        |  62 +--
 .../object_storage_shared_examples.rb         | 126 ++++++
 spec/support/stub_object_storage.rb           |   7 +
 spec/support/test_env.rb                      |   2 +-
 .../track_untracked_uploads_helpers.rb        |   2 +-
 spec/tasks/gitlab/artifacts_rake_spec.rb      |  32 +-
 spec/tasks/gitlab/lfs_rake_spec.rb            |   4 +-
 spec/uploaders/attachment_uploader_spec.rb    |  41 +-
 spec/uploaders/avatar_uploader_spec.rb        |  44 +-
 spec/uploaders/file_mover_spec.rb             |  18 +-
 spec/uploaders/file_uploader_spec.rb          | 128 ++----
 spec/uploaders/job_artifact_uploader_spec.rb  |  46 +-
 .../legacy_artifact_uploader_spec.rb          |  52 +--
 spec/uploaders/lfs_object_uploader_spec.rb    |  43 +-
 .../uploaders/namespace_file_uploader_spec.rb |  36 +-
 spec/uploaders/object_storage_spec.rb         | 350 +++++++++++++++
 spec/uploaders/object_store_uploader_spec.rb  | 315 -------------
 spec/uploaders/personal_file_uploader_spec.rb |  45 +-
 spec/uploaders/records_uploads_spec.rb        |  73 ++-
 spec/workers/upload_checksum_worker_spec.rb   |  29 +-
 111 files changed, 3972 insertions(+), 1371 deletions(-)
 delete mode 100644 app/uploaders/object_store_uploader.rb
 create mode 100644 app/uploaders/workhorse.rb
 create mode 100644 changelogs/unreleased-ee/4163-move-uploads-to-object-storage.yml
 create mode 100644 db/migrate/20171214144320_add_store_column_to_uploads.rb
 create mode 100644 db/migrate/20180119135717_add_uploader_index_to_uploads.rb
 create mode 100644 ee/app/models/ee/ci/job_artifact.rb
 create mode 100644 ee/app/models/ee/lfs_object.rb
 create mode 100644 ee/app/models/geo/fdw/ci/job_artifact.rb
 create mode 100644 ee/app/models/geo/fdw/lfs_object.rb
 create mode 100644 ee/app/services/geo/files_expire_service.rb
 create mode 100644 ee/app/services/geo/hashed_storage_attachments_migration_service.rb
 create mode 100644 ee/app/services/geo/job_artifact_deleted_event_store.rb
 create mode 100644 ee/app/services/geo/lfs_object_deleted_event_store.rb
 create mode 100644 ee/app/uploaders/object_storage.rb
 create mode 100644 ee/lib/gitlab/geo/file_transfer.rb
 create mode 100644 ee/lib/gitlab/geo/log_cursor/daemon.rb
 create mode 100644 spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb
 create mode 100644 spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb
 create mode 100644 spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb
 create mode 100644 spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb
 create mode 100644 spec/ee/spec/services/geo/file_download_service_spec.rb
 create mode 100644 spec/ee/spec/services/geo/files_expire_service_spec.rb
 create mode 100644 spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb
 create mode 100644 spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb
 create mode 100644 spec/factories/geo/event_log.rb
 create mode 100644 spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
 create mode 100644 spec/uploaders/object_storage_spec.rb
 delete mode 100644 spec/uploaders/object_store_uploader_spec.rb

diff --git a/app/controllers/concerns/uploads_actions.rb b/app/controllers/concerns/uploads_actions.rb
index a6fb1f400011..61554029d095 100644
--- a/app/controllers/concerns/uploads_actions.rb
+++ b/app/controllers/concerns/uploads_actions.rb
@@ -1,6 +1,8 @@
 module UploadsActions
   include Gitlab::Utils::StrongMemoize
 
+  UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze
+
   def create
     link_to_file = UploadService.new(model, params[:file], uploader_class).execute
 
@@ -17,34 +19,71 @@ def create
     end
   end
 
+  # This should either
+  #   - send the file directly
+  #   - or redirect to its URL
+  #
   def show
     return render_404 unless uploader.exists?
 
-    disposition = uploader.image_or_video? ? 'inline' : 'attachment'
-
-    expires_in 0.seconds, must_revalidate: true, private: true
+    if uploader.file_storage?
+      disposition = uploader.image_or_video? ? 'inline' : 'attachment'
+      expires_in 0.seconds, must_revalidate: true, private: true
 
-    send_file uploader.file.path, disposition: disposition
+      send_file uploader.file.path, disposition: disposition
+    else
+      redirect_to uploader.url
+    end
   end
 
   private
 
+  def uploader_class
+    raise NotImplementedError
+  end
+
+  def upload_mount
+    mounted_as = params[:mounted_as]
+    mounted_as if UPLOAD_MOUNTS.include?(mounted_as)
+  end
+
+  def uploader_mounted?
+    upload_model_class < CarrierWave::Mount::Extension && !upload_mount.nil?
+  end
+
   def uploader
     strong_memoize(:uploader) do
-      return if show_model.nil?
+      if uploader_mounted?
+        model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend
+      else
+        build_uploader_from_upload || build_uploader_from_params
+      end
+    end
+  end
 
-      file_uploader = FileUploader.new(show_model, params[:secret])
-      file_uploader.retrieve_from_store!(params[:filename])
+  def build_uploader_from_upload
+    return nil unless params[:secret] && params[:filename]
 
-      file_uploader
-    end
+    upload_path = uploader_class.upload_path(params[:secret], params[:filename])
+    upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_path)
+    upload&.build_uploader
+  end
+
+  def build_uploader_from_params
+    uploader = uploader_class.new(model, params[:secret])
+    uploader.retrieve_from_store!(params[:filename])
+    uploader
   end
 
   def image_or_video?
     uploader && uploader.exists? && uploader.image_or_video?
   end
 
-  def uploader_class
-    FileUploader
+  def find_model
+    nil
+  end
+
+  def model
+    strong_memoize(:model) { find_model }
   end
 end
diff --git a/app/controllers/groups/uploads_controller.rb b/app/controllers/groups/uploads_controller.rb
index e6bd9806401d..f1578f75e88b 100644
--- a/app/controllers/groups/uploads_controller.rb
+++ b/app/controllers/groups/uploads_controller.rb
@@ -7,29 +7,23 @@ class Groups::UploadsController < Groups::ApplicationController
 
   private
 
-  def show_model
-    strong_memoize(:show_model) do
-      group_id = params[:group_id]
-
-      Group.find_by_full_path(group_id)
-    end
+  def upload_model_class
+    Group
   end
 
-  def authorize_upload_file!
-    render_404 unless can?(current_user, :upload_file, group)
+  def uploader_class
+    NamespaceFileUploader
   end
 
-  def uploader
-    strong_memoize(:uploader) do
-      file_uploader = uploader_class.new(show_model, params[:secret])
-      file_uploader.retrieve_from_store!(params[:filename])
-      file_uploader
-    end
-  end
+  def find_model
+    return @group if @group
 
-  def uploader_class
-    NamespaceFileUploader
+    group_id = params[:group_id]
+
+    Group.find_by_full_path(group_id)
   end
 
-  alias_method :model, :group
+  def authorize_upload_file!
+    render_404 unless can?(current_user, :upload_file, group)
+  end
 end
diff --git a/app/controllers/projects/lfs_storage_controller.rb b/app/controllers/projects/lfs_storage_controller.rb
index 5b0f3d11d9e3..88fc373945ab 100644
--- a/app/controllers/projects/lfs_storage_controller.rb
+++ b/app/controllers/projects/lfs_storage_controller.rb
@@ -61,7 +61,7 @@ def tmp_filename
 
   def store_file(oid, size, tmp_file)
     # Define tmp_file_path early because we use it in "ensure"
-    tmp_file_path = File.join("#{Gitlab.config.lfs.storage_path}/tmp/upload", tmp_file)
+    tmp_file_path = File.join(LfsObjectUploader.workhorse_upload_path, tmp_file)
 
     object = LfsObject.find_or_create_by(oid: oid, size: size)
     file_exists = object.file.exists? || move_tmp_file_to_storage(object, tmp_file_path)
diff --git a/app/controllers/projects/uploads_controller.rb b/app/controllers/projects/uploads_controller.rb
index 4685bbe80b4b..f5cf089ad98d 100644
--- a/app/controllers/projects/uploads_controller.rb
+++ b/app/controllers/projects/uploads_controller.rb
@@ -1,6 +1,7 @@
 class Projects::UploadsController < Projects::ApplicationController
   include UploadsActions
 
+  # These will kick you out if you don't have access.
   skip_before_action :project, :repository,
     if: -> { action_name == 'show' && image_or_video? }
 
@@ -8,14 +9,20 @@ class Projects::UploadsController < Projects::ApplicationController
 
   private
 
-  def show_model
-    strong_memoize(:show_model) do
-      namespace = params[:namespace_id]
-      id = params[:project_id]
+  def upload_model_class
+    Project
+  end
 
-      Project.find_by_full_path("#{namespace}/#{id}")
-    end
+  def uploader_class
+    FileUploader
   end
 
-  alias_method :model, :project
+  def find_model
+    return @project if @project
+
+    namespace = params[:namespace_id]
+    id = params[:project_id]
+
+    Project.find_by_full_path("#{namespace}/#{id}")
+  end
 end
diff --git a/app/controllers/uploads_controller.rb b/app/controllers/uploads_controller.rb
index 16a74f82d3f6..3d227b0a9551 100644
--- a/app/controllers/uploads_controller.rb
+++ b/app/controllers/uploads_controller.rb
@@ -1,19 +1,34 @@
 class UploadsController < ApplicationController
   include UploadsActions
 
+  UnknownUploadModelError = Class.new(StandardError)
+
+  MODEL_CLASSES = {
+    "user"             => User,
+    "project"          => Project,
+    "note"             => Note,
+    "group"            => Group,
+    "appearance"       => Appearance,
+    "personal_snippet" => PersonalSnippet,
+    nil                => PersonalSnippet
+  }.freeze
+
+  rescue_from UnknownUploadModelError, with: :render_404
+
   skip_before_action :authenticate_user!
+  before_action :upload_mount_satisfied?
   before_action :find_model
   before_action :authorize_access!, only: [:show]
   before_action :authorize_create_access!, only: [:create]
 
-  private
+  def uploader_class
+    PersonalFileUploader
+  end
 
   def find_model
     return nil unless params[:id]
 
-    return render_404 unless upload_model && upload_mount
-
-    @model = upload_model.find(params[:id])
+    upload_model_class.find(params[:id])
   end
 
   def authorize_access!
@@ -53,55 +68,17 @@ def render_unauthorized
     end
   end
 
-  def upload_model
-    upload_models = {
-      "user"    => User,
-      "project" => Project,
-      "note"    => Note,
-      "group"   => Group,
-      "appearance" => Appearance,
-      "personal_snippet" => PersonalSnippet
-    }
-
-    upload_models[params[:model]]
-  end
-
-  def upload_mount
-    return true unless params[:mounted_as]
-
-    upload_mounts = %w(avatar attachment file logo header_logo)
-
-    if upload_mounts.include?(params[:mounted_as])
-      params[:mounted_as]
-    end
+  def upload_model_class
+    MODEL_CLASSES[params[:model]] || raise(UnknownUploadModelError)
   end
 
-  def uploader
-    return @uploader if defined?(@uploader)
-
-    case model
-    when nil
-      @uploader = PersonalFileUploader.new(nil, params[:secret])
-
-      @uploader.retrieve_from_store!(params[:filename])
-    when PersonalSnippet
-      @uploader = PersonalFileUploader.new(model, params[:secret])
-
-      @uploader.retrieve_from_store!(params[:filename])
-    else
-      @uploader = @model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend
-
-      redirect_to @uploader.url unless @uploader.file_storage?
-    end
-
-    @uploader
+  def upload_model_class_has_mounts?
+    upload_model_class < CarrierWave::Mount::Extension
   end
 
-  def uploader_class
-    PersonalFileUploader
-  end
+  def upload_mount_satisfied?
+    return true unless upload_model_class_has_mounts?
 
-  def model
-    @model ||= find_model
+    upload_model_class.uploader_options.has_key?(upload_mount)
   end
 end
diff --git a/app/models/appearance.rb b/app/models/appearance.rb
index 76cfe28742aa..dcd14c08f3c9 100644
--- a/app/models/appearance.rb
+++ b/app/models/appearance.rb
@@ -11,6 +11,7 @@ class Appearance < ActiveRecord::Base
 
   mount_uploader :logo,         AttachmentUploader
   mount_uploader :header_logo,  AttachmentUploader
+
   has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
 
   CACHE_KEY = 'current_appearance'.freeze
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index b65daa376d21..4eeccd4d934d 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -45,7 +45,7 @@ def persisted_environment
     end
     scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
     scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
-    scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::LOCAL_STORE]) }
+    scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
     scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
     scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
     scope :ref_protected, -> { where(protected: true) }
diff --git a/app/models/concerns/avatarable.rb b/app/models/concerns/avatarable.rb
index 10659030910d..d35e37935fb0 100644
--- a/app/models/concerns/avatarable.rb
+++ b/app/models/concerns/avatarable.rb
@@ -1,6 +1,30 @@
 module Avatarable
   extend ActiveSupport::Concern
 
+  included do
+    prepend ShadowMethods
+
+    validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
+    validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
+
+    mount_uploader :avatar, AvatarUploader
+  end
+
+  module ShadowMethods
+    def avatar_url(**args)
+      # We use avatar_path instead of overriding avatar_url because of carrierwave.
+      # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
+
+      avatar_path(only_path: args.fetch(:only_path, true)) || super
+    end
+  end
+
+  def avatar_type
+    unless self.avatar.image?
+      self.errors.add :avatar, "only images allowed"
+    end
+  end
+
   def avatar_path(only_path: true)
     return unless self[:avatar].present?
 
diff --git a/app/models/group.rb b/app/models/group.rb
index fddace033873..5d1e2f629829 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -29,18 +29,14 @@ class Group < Namespace
   has_many :variables, class_name: 'Ci::GroupVariable'
   has_many :custom_attributes, class_name: 'GroupCustomAttribute'
 
-  validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
+  has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+
   validate :visibility_level_allowed_by_projects
   validate :visibility_level_allowed_by_sub_groups
   validate :visibility_level_allowed_by_parent
 
-  validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
-
   validates :two_factor_grace_period, presence: true, numericality: { greater_than_or_equal_to: 0 }
 
-  mount_uploader :avatar, AvatarUploader
-  has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
-
   after_create :post_create_hook
   after_destroy :post_destroy_hook
   after_save :update_two_factor_requirement
@@ -116,12 +112,6 @@ def visibility_level_allowed?(level = self.visibility_level)
       visibility_level_allowed_by_sub_groups?(level)
   end
 
-  def avatar_url(**args)
-    # We use avatar_path instead of overriding avatar_url because of carrierwave.
-    # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
-    avatar_path(args)
-  end
-
   def lfs_enabled?
     return false unless Gitlab.config.lfs.enabled
     return Gitlab.config.lfs.enabled if self[:lfs_enabled].nil?
@@ -193,12 +183,6 @@ def last_owner?(user)
     owners.include?(user) && owners.size == 1
   end
 
-  def avatar_type
-    unless self.avatar.image?
-      self.errors.add :avatar, "only images allowed"
-    end
-  end
-
   def post_create_hook
     Gitlab::AppLogger.info("Group \"#{name}\" was created")
 
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index 6ad792aab308..65c157d61cad 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -7,7 +7,7 @@ class LfsObject < ActiveRecord::Base
 
   validates :oid, presence: true, uniqueness: true
 
-  scope :with_files_stored_locally, ->() { where(file_store: [nil, LfsObjectUploader::LOCAL_STORE]) }
+  scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
 
   mount_uploader :file, LfsObjectUploader
 
diff --git a/app/models/note.rb b/app/models/note.rb
index 184fbd5f5ae9..a84db8982e59 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -88,6 +88,7 @@ def values
     end
   end
 
+  # @deprecated attachments are handler by the MarkdownUploader
   mount_uploader :attachment, AttachmentUploader
 
   # Scopes
diff --git a/app/models/project.rb b/app/models/project.rb
index fbe65e700a43..b3c2b599129f 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -255,9 +255,6 @@ class Project < ActiveRecord::Base
   validates :star_count, numericality: { greater_than_or_equal_to: 0 }
   validate :check_limit, on: :create
   validate :check_repository_path_availability, on: :update, if: ->(project) { project.renamed? }
-  validate :avatar_type,
-    if: ->(project) { project.avatar.present? && project.avatar_changed? }
-  validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
   validate :visibility_level_allowed_by_group
   validate :visibility_level_allowed_as_fork
   validate :check_wiki_path_conflict
@@ -265,7 +262,6 @@ class Project < ActiveRecord::Base
     presence: true,
     inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } }
 
-  mount_uploader :avatar, AvatarUploader
   has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
 
   # Scopes
@@ -917,20 +913,12 @@ def jira_tracker?
     issues_tracker.to_param == 'jira'
   end
 
-  def avatar_type
-    unless self.avatar.image?
-      self.errors.add :avatar, 'only images allowed'
-    end
-  end
-
   def avatar_in_git
     repository.avatar
   end
 
   def avatar_url(**args)
-    # We use avatar_path instead of overriding avatar_url because of carrierwave.
-    # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
-    avatar_path(args) || (Gitlab::Routing.url_helpers.project_avatar_url(self) if avatar_in_git)
+    Gitlab::Routing.url_helpers.project_avatar_url(self) if avatar_in_git
   end
 
   # For compatibility with old code
diff --git a/app/models/upload.rb b/app/models/upload.rb
index f194d7bdb808..e227baea9945 100644
--- a/app/models/upload.rb
+++ b/app/models/upload.rb
@@ -9,44 +9,52 @@ class Upload < ActiveRecord::Base
   validates :model, presence: true
   validates :uploader, presence: true
 
-  before_save  :calculate_checksum, if:     :foreground_checksum?
-  after_commit :schedule_checksum,  unless: :foreground_checksum?
+  before_save  :calculate_checksum!, if: :foreground_checksummable?
+  after_commit :schedule_checksum,   if: :checksummable?
 
-  def self.remove_path(path)
-    where(path: path).destroy_all
-  end
-
-  def self.record(uploader)
-    remove_path(uploader.relative_path)
-
-    create(
-      size: uploader.file.size,
-      path: uploader.relative_path,
-      model: uploader.model,
-      uploader: uploader.class.to_s
-    )
+  def self.hexdigest(path)
+    Digest::SHA256.file(path).hexdigest
   end
 
   def absolute_path
+    raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
     return path unless relative_path?
 
     uploader_class.absolute_path(self)
   end
 
-  def calculate_checksum
-    return unless exist?
+  def calculate_checksum!
+    self.checksum = nil
+    return unless checksummable?
 
     self.checksum = Digest::SHA256.file(absolute_path).hexdigest
   end
 
+  def build_uploader
+    uploader_class.new(model).tap do |uploader|
+      uploader.upload = self
+      uploader.retrieve_from_store!(identifier)
+    end
+  end
+
   def exist?
     File.exist?(absolute_path)
   end
 
   private
 
-  def foreground_checksum?
-    size <= CHECKSUM_THRESHOLD
+  def checksummable?
+    checksum.nil? && local? && exist?
+  end
+
+  def local?
+    return true if store.nil?
+
+    store == ObjectStorage::Store::LOCAL
+  end
+
+  def foreground_checksummable?
+    checksummable? && size <= CHECKSUM_THRESHOLD
   end
 
   def schedule_checksum
@@ -57,6 +65,10 @@ def relative_path?
     !path.start_with?('/')
   end
 
+  def identifier
+    File.basename(path)
+  end
+
   def uploader_class
     Object.const_get(uploader)
   end
diff --git a/app/models/user.rb b/app/models/user.rb
index 4484ee9ff4c3..eb6d12b5ec56 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -134,6 +134,7 @@ def update_tracked_fields!(request)
   has_many :assigned_merge_requests,  dependent: :nullify, foreign_key: :assignee_id, class_name: "MergeRequest" # rubocop:disable Cop/ActiveRecordDependent
 
   has_many :custom_attributes, class_name: 'UserCustomAttribute'
+  has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
 
   #
   # Validations
@@ -156,12 +157,10 @@ def update_tracked_fields!(request)
   validate :namespace_uniq, if: :username_changed?
   validate :namespace_move_dir_allowed, if: :username_changed?
 
-  validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
   validate :unique_email, if: :email_changed?
   validate :owns_notification_email, if: :notification_email_changed?
   validate :owns_public_email, if: :public_email_changed?
   validate :signup_domain_valid?, on: :create, if: ->(user) { !user.created_by_id }
-  validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
 
   before_validation :sanitize_attrs
   before_validation :set_notification_email, if: :email_changed?
@@ -223,9 +222,6 @@ def inactive_message
     end
   end
 
-  mount_uploader :avatar, AvatarUploader
-  has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
-
   # Scopes
   scope :admins, -> { where(admin: true) }
   scope :blocked, -> { with_states(:blocked, :ldap_blocked) }
@@ -521,12 +517,6 @@ def namespace_move_dir_allowed
     end
   end
 
-  def avatar_type
-    unless avatar.image?
-      errors.add :avatar, "only images allowed"
-    end
-  end
-
   def unique_email
     if !emails.exists?(email: email) && Email.exists?(email: email)
       errors.add(:email, 'has already been taken')
@@ -854,9 +844,7 @@ def temp_oauth_email?
   end
 
   def avatar_url(size: nil, scale: 2, **args)
-    # We use avatar_path instead of overriding avatar_url because of carrierwave.
-    # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
-    avatar_path(args) || GravatarService.new.execute(email, size, scale, username: username)
+    GravatarService.new.execute(email, size, scale, username: username)
   end
 
   def primary_email_verified?
diff --git a/app/services/projects/hashed_storage/migrate_attachments_service.rb b/app/services/projects/hashed_storage/migrate_attachments_service.rb
index f8aaec8a9c06..bc897d891d5a 100644
--- a/app/services/projects/hashed_storage/migrate_attachments_service.rb
+++ b/app/services/projects/hashed_storage/migrate_attachments_service.rb
@@ -14,9 +14,9 @@ def execute
         @old_path = project.full_path
         @new_path = project.disk_path
 
-        origin = FileUploader.dynamic_path_segment(project)
+        origin = FileUploader.absolute_base_dir(project)
         project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
-        target = FileUploader.dynamic_path_segment(project)
+        target = FileUploader.absolute_base_dir(project)
 
         result = move_folder!(origin, target)
         project.save!
diff --git a/app/uploaders/attachment_uploader.rb b/app/uploaders/attachment_uploader.rb
index 109eb2fea0b5..cd819dc9bff6 100644
--- a/app/uploaders/attachment_uploader.rb
+++ b/app/uploaders/attachment_uploader.rb
@@ -1,10 +1,12 @@
 class AttachmentUploader < GitlabUploader
-  include RecordsUploads
+  include RecordsUploads::Concern
+  include ObjectStorage::Concern
+  prepend ObjectStorage::Extension::RecordsUploads
   include UploaderHelper
 
-  storage :file
+  private
 
-  def store_dir
-    "#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
+  def dynamic_segment
+    File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s)
   end
 end
diff --git a/app/uploaders/avatar_uploader.rb b/app/uploaders/avatar_uploader.rb
index cbb79376d5f8..5848e6c6994e 100644
--- a/app/uploaders/avatar_uploader.rb
+++ b/app/uploaders/avatar_uploader.rb
@@ -1,20 +1,13 @@
 class AvatarUploader < GitlabUploader
-  include RecordsUploads
   include UploaderHelper
-
-  storage :file
-
-  def store_dir
-    "#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
-  end
+  include RecordsUploads::Concern
+  include ObjectStorage::Concern
+  prepend ObjectStorage::Extension::RecordsUploads
 
   def exists?
     model.avatar.file && model.avatar.file.present?
   end
 
-  # We set move_to_store and move_to_cache to 'false' to prevent stealing
-  # the avatar file from a project when forking it.
-  # https://gitlab.com/gitlab-org/gitlab-ce/issues/26158
   def move_to_store
     false
   end
@@ -22,4 +15,10 @@ def move_to_store
   def move_to_cache
     false
   end
+
+  private
+
+  def dynamic_segment
+    File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s)
+  end
 end
diff --git a/app/uploaders/file_mover.rb b/app/uploaders/file_mover.rb
index 00c2888d2241..f37567d61411 100644
--- a/app/uploaders/file_mover.rb
+++ b/app/uploaders/file_mover.rb
@@ -21,13 +21,11 @@ def move
   end
 
   def update_markdown
-    updated_text = model.read_attribute(update_field).gsub(temp_file_uploader.to_markdown, uploader.to_markdown)
+    updated_text = model.read_attribute(update_field)
+                        .gsub(temp_file_uploader.markdown_link, uploader.markdown_link)
     model.update_attribute(update_field, updated_text)
-
-    true
   rescue
     revert
-
     false
   end
 
diff --git a/app/uploaders/file_uploader.rb b/app/uploaders/file_uploader.rb
index 0b591e3bbbbe..81952dacce48 100644
--- a/app/uploaders/file_uploader.rb
+++ b/app/uploaders/file_uploader.rb
@@ -1,23 +1,40 @@
+# This class breaks the actual CarrierWave concept.
+# Every uploader should use a base_dir that is model agnostic so we can build
+# back URLs from base_dir-relative paths saved in the `Upload` model.
+#
+# As the `.base_dir` is model dependent and **not** saved in the upload model (see #upload_path)
+# there is no way to build back the correct file path without the model, which defies
+# CarrierWave way of storing files.
+#
 class FileUploader < GitlabUploader
-  include RecordsUploads
   include UploaderHelper
+  include RecordsUploads::Concern
+  include ObjectStorage::Concern
+  prepend ObjectStorage::Extension::RecordsUploads
 
   MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}
+  DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)}
 
-  storage :file
+  attr_accessor :model
+
+  def self.root
+    File.join(options.storage_path, 'uploads')
+  end
 
-  def self.absolute_path(upload_record)
+  def self.absolute_path(upload)
     File.join(
-      self.dynamic_path_segment(upload_record.model),
-      upload_record.path
+      absolute_base_dir(upload.model),
+      upload.path # already contain the dynamic_segment, see #upload_path
     )
   end
 
-  # Not using `GitlabUploader.base_dir` because all project namespaces are in
-  # the `public/uploads` dir.
-  #
-  def self.base_dir
-    root_dir
+  def self.base_dir(model)
+    model_path_segment(model)
+  end
+
+  # used in migrations and import/exports
+  def self.absolute_base_dir(model)
+    File.join(root, base_dir(model))
   end
 
   # Returns the part of `store_dir` that can change based on the model's current
@@ -29,63 +46,94 @@ def self.base_dir
   # model - Object that responds to `full_path` and `disk_path`
   #
   # Returns a String without a trailing slash
-  def self.dynamic_path_segment(model)
+  def self.model_path_segment(model)
     if model.hashed_storage?(:attachments)
-      dynamic_path_builder(model.disk_path)
+      model.disk_path
     else
-      dynamic_path_builder(model.full_path)
+      model.full_path
     end
   end
 
-  # Auxiliary method to build dynamic path segment when not using a project model
-  #
-  # Prefer to use the `.dynamic_path_segment` as it includes Hashed Storage specific logic
-  def self.dynamic_path_builder(path)
-    File.join(CarrierWave.root, base_dir, path)
+  def self.upload_path(secret, identifier)
+    File.join(secret, identifier)
   end
 
-  attr_accessor :model
-  attr_reader :secret
+  def self.generate_secret
+    SecureRandom.hex
+  end
 
   def initialize(model, secret = nil)
     @model = model
-    @secret = secret || generate_secret
+    @secret = secret
   end
 
-  def store_dir
-    File.join(dynamic_path_segment, @secret)
+  def base_dir
+    self.class.base_dir(@model)
   end
 
-  def relative_path
-    self.file.path.sub("#{dynamic_path_segment}/", '')
+  # we don't need to know the actual path, an uploader instance should be
+  # able to yield the file content on demand, so we should build the digest
+  def absolute_path
+    self.class.absolute_path(@upload)
   end
 
-  def to_markdown
-    to_h[:markdown]
+  def upload_path
+    self.class.upload_path(dynamic_segment, identifier)
   end
 
-  def to_h
-    filename = image_or_video? ? self.file.basename : self.file.filename
-    escaped_filename = filename.gsub("]", "\\]")
+  def model_path_segment
+    self.class.model_path_segment(@model)
+  end
 
-    markdown = "[#{escaped_filename}](#{secure_url})"
+  def store_dir
+    File.join(base_dir, dynamic_segment)
+  end
+
+  def markdown_link
+    markdown = "[#{markdown_name}](#{secure_url})"
     markdown.prepend("!") if image_or_video? || dangerous?
+    markdown
+  end
 
+  def to_h
     {
-      alt:      filename,
+      alt:      markdown_name,
       url:      secure_url,
-      markdown: markdown
+      markdown: markdown_link
     }
   end
 
+  def filename
+    self.file.filename
+  end
+
+  # the upload does not hold the secret, but holds the path
+  # which contains the secret: extract it
+  def upload=(value)
+    if matches = DYNAMIC_PATH_PATTERN.match(value.path)
+      @secret = matches[:secret]
+      @identifier = matches[:identifier]
+    end
+
+    super
+  end
+
+  def secret
+    @secret ||= self.class.generate_secret
+  end
+
   private
 
-  def dynamic_path_segment
-    self.class.dynamic_path_segment(model)
+  def markdown_name
+    (image_or_video? ? File.basename(filename, File.extname(filename)) : filename).gsub("]", "\\]")
   end
 
-  def generate_secret
-    SecureRandom.hex
+  def identifier
+    @identifier ||= filename
+  end
+
+  def dynamic_segment
+    secret
   end
 
   def secure_url
diff --git a/app/uploaders/gitlab_uploader.rb b/app/uploaders/gitlab_uploader.rb
index 7f72b3ce4711..ba2ceb0c8cf4 100644
--- a/app/uploaders/gitlab_uploader.rb
+++ b/app/uploaders/gitlab_uploader.rb
@@ -1,64 +1,56 @@
 class GitlabUploader < CarrierWave::Uploader::Base
-  def self.absolute_path(upload_record)
-    File.join(CarrierWave.root, upload_record.path)
-  end
+  class_attribute :options
 
-  def self.root_dir
-    'uploads'
-  end
+  class << self
+    # DSL setter
+    def storage_options(options)
+      self.options = options
+    end
 
-  # When object storage is used, keep the `root_dir` as `base_dir`.
-  # The files aren't really in folders there, they just have a name.
-  # The files that contain user input in their name, also contain a hash, so
-  # the names are still unique
-  #
-  # This method is overridden in the `FileUploader`
-  def self.base_dir
-    return root_dir unless file_storage?
+    def root
+      options.storage_path
+    end
 
-    File.join(root_dir, '-', 'system')
-  end
+    # represent the directory namespacing at the class level
+    def base_dir
+      options.fetch('base_dir', '')
+    end
 
-  def self.file_storage?
-    self.storage == CarrierWave::Storage::File
+    def file_storage?
+      storage == CarrierWave::Storage::File
+    end
+
+    def absolute_path(upload_record)
+      File.join(root, upload_record.path)
+    end
   end
 
+  storage_options Gitlab.config.uploads
+
   delegate :base_dir, :file_storage?, to: :class
 
   def file_cache_storage?
     cache_storage.is_a?(CarrierWave::Storage::File)
   end
 
-  # Reduce disk IO
   def move_to_cache
-    true
+    file_storage?
   end
 
-  # Reduce disk IO
   def move_to_store
-    true
-  end
-
-  # Designed to be overridden by child uploaders that have a dynamic path
-  # segment -- that is, a path that changes based on mutable attributes of its
-  # associated model
-  #
-  # For example, `FileUploader` builds the storage path based on the associated
-  # project model's `path_with_namespace` value, which can change when the
-  # project or its containing namespace is moved or renamed.
-  def relative_path
-    self.file.path.sub("#{root}/", '')
+    file_storage?
   end
 
   def exists?
     file.present?
   end
 
-  # Override this if you don't want to save files by default to the Rails.root directory
+  def cache_dir
+    File.join(root, base_dir, 'tmp/cache')
+  end
+
   def work_dir
-    # Default path set by CarrierWave:
-    # https://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L182
-    CarrierWave.tmp_path
+    File.join(root, base_dir, 'tmp/work')
   end
 
   def filename
@@ -67,6 +59,17 @@ def filename
 
   private
 
+  # Designed to be overridden by child uploaders that have a dynamic path
+  # segment -- that is, a path that changes based on mutable attributes of its
+  # associated model
+  #
+  # For example, `FileUploader` builds the storage path based on the associated
+  # project model's `path_with_namespace` value, which can change when the
+  # project or its containing namespace is moved or renamed.
+  def dynamic_segment
+    raise(NotImplementedError)
+  end
+
   # To prevent files from moving across filesystems, override the default
   # implementation:
   # http://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L181-L183
@@ -74,6 +77,6 @@ def workfile_path(for_file = original_filename)
     # To be safe, keep this directory outside of the the cache directory
     # because calling CarrierWave.clean_cache_files! will remove any files in
     # the cache directory.
-    File.join(work_dir, @cache_id, version_name.to_s, for_file)
+    File.join(work_dir, cache_id, version_name.to_s, for_file)
   end
 end
diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb
index a0757dbe6b22..3ad3e6ea32b0 100644
--- a/app/uploaders/job_artifact_uploader.rb
+++ b/app/uploaders/job_artifact_uploader.rb
@@ -1,13 +1,8 @@
-class JobArtifactUploader < ObjectStoreUploader
-  storage_options Gitlab.config.artifacts
-
-  def self.local_store_path
-    Gitlab.config.artifacts.path
-  end
+class JobArtifactUploader < GitlabUploader
+  extend Workhorse::UploadPath
+  include ObjectStorage::Concern
 
-  def self.artifacts_upload_path
-    File.join(self.local_store_path, 'tmp/uploads/')
-  end
+  storage_options Gitlab.config.artifacts
 
   def size
     return super if model.size.nil?
@@ -15,9 +10,13 @@ def size
     model.size
   end
 
+  def store_dir
+    dynamic_segment
+  end
+
   private
 
-  def default_path
+  def dynamic_segment
     creation_date = model.created_at.utc.strftime('%Y_%m_%d')
 
     File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
diff --git a/app/uploaders/legacy_artifact_uploader.rb b/app/uploaders/legacy_artifact_uploader.rb
index 476a46c17548..b726b0534938 100644
--- a/app/uploaders/legacy_artifact_uploader.rb
+++ b/app/uploaders/legacy_artifact_uploader.rb
@@ -1,17 +1,16 @@
-class LegacyArtifactUploader < ObjectStoreUploader
-  storage_options Gitlab.config.artifacts
+class LegacyArtifactUploader < GitlabUploader
+  extend Workhorse::UploadPath
+  include ObjectStorage::Concern
 
-  def self.local_store_path
-    Gitlab.config.artifacts.path
-  end
+  storage_options Gitlab.config.artifacts
 
-  def self.artifacts_upload_path
-    File.join(self.local_store_path, 'tmp/uploads/')
+  def store_dir
+    dynamic_segment
   end
 
   private
 
-  def default_path
+  def dynamic_segment
     File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
   end
 end
diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb
index fa42e4710b7d..e7cce1bbb0a1 100644
--- a/app/uploaders/lfs_object_uploader.rb
+++ b/app/uploaders/lfs_object_uploader.rb
@@ -1,17 +1,25 @@
-class LfsObjectUploader < ObjectStoreUploader
-  storage_options Gitlab.config.lfs
+class LfsObjectUploader < GitlabUploader
+  extend Workhorse::UploadPath
+  include ObjectStorage::Concern
 
-  def self.local_store_path
-    Gitlab.config.lfs.storage_path
+  # LfsObject are in `tmp/upload` instead of `tmp/uploads`
+  def self.workhorse_upload_path
+    File.join(root, 'tmp/upload')
   end
 
+  storage_options Gitlab.config.lfs
+
   def filename
     model.oid[4..-1]
   end
 
+  def store_dir
+    dynamic_segment
+  end
+
   private
 
-  def default_path
-    "#{model.oid[0, 2]}/#{model.oid[2, 2]}"
+  def dynamic_segment
+    File.join(model.oid[0, 2], model.oid[2, 2])
   end
 end
diff --git a/app/uploaders/namespace_file_uploader.rb b/app/uploaders/namespace_file_uploader.rb
index 672126e9ec23..269415b19265 100644
--- a/app/uploaders/namespace_file_uploader.rb
+++ b/app/uploaders/namespace_file_uploader.rb
@@ -1,15 +1,26 @@
 class NamespaceFileUploader < FileUploader
-  def self.base_dir
-    File.join(root_dir, '-', 'system', 'namespace')
+  # Re-Override
+  def self.root
+    options.storage_path
   end
 
-  def self.dynamic_path_segment(model)
-    dynamic_path_builder(model.id.to_s)
+  def self.base_dir(model)
+    File.join(options.base_dir, 'namespace', model_path_segment(model))
   end
 
-  private
+  def self.model_path_segment(model)
+    File.join(model.id.to_s)
+  end
+
+  # Re-Override
+  def store_dir
+    store_dirs[object_store]
+  end
 
-  def secure_url
-    File.join('/uploads', @secret, file.filename)
+  def store_dirs
+    {
+      Store::LOCAL => File.join(base_dir, dynamic_segment),
+      Store::REMOTE => File.join('namespace', model_path_segment, dynamic_segment)
+    }
   end
 end
diff --git a/app/uploaders/object_store_uploader.rb b/app/uploaders/object_store_uploader.rb
deleted file mode 100644
index bb25dc4219fd..000000000000
--- a/app/uploaders/object_store_uploader.rb
+++ /dev/null
@@ -1,215 +0,0 @@
-require 'fog/aws'
-require 'carrierwave/storage/fog'
-
-class ObjectStoreUploader < GitlabUploader
-  before :store, :set_default_local_store
-  before :store, :verify_license!
-
-  LOCAL_STORE = 1
-  REMOTE_STORE = 2
-
-  class << self
-    def storage_options(options)
-      @storage_options = options
-    end
-
-    def object_store_options
-      @storage_options&.object_store
-    end
-
-    def object_store_enabled?
-      object_store_options&.enabled
-    end
-
-    def background_upload_enabled?
-      object_store_options&.background_upload
-    end
-
-    def object_store_credentials
-      @object_store_credentials ||= object_store_options&.connection&.to_hash&.deep_symbolize_keys
-    end
-
-    def object_store_directory
-      object_store_options&.remote_directory
-    end
-
-    def local_store_path
-      raise NotImplementedError
-    end
-  end
-
-  def file_storage?
-    storage.is_a?(CarrierWave::Storage::File)
-  end
-
-  def file_cache_storage?
-    cache_storage.is_a?(CarrierWave::Storage::File)
-  end
-
-  def real_object_store
-    model.public_send(store_serialization_column) # rubocop:disable GitlabSecurity/PublicSend
-  end
-
-  def object_store
-    subject.public_send(:"#{field}_store")
-  end
-
-  def object_store=(value)
-    @storage = nil
-    model.public_send(:"#{store_serialization_column}=", value) # rubocop:disable GitlabSecurity/PublicSend
-  end
-
-  def store_dir
-    if file_storage?
-      default_local_path
-    else
-      default_path
-    end
-  end
-
-  def use_file
-    if file_storage?
-      return yield path
-    end
-
-    begin
-      cache_stored_file!
-      yield cache_path
-    ensure
-      cache_storage.delete_dir!(cache_path(nil))
-    end
-  end
-
-  def filename
-    super || file&.filename
-  end
-
-  def migrate!(new_store)
-    raise 'Undefined new store' unless new_store
-
-    return unless object_store != new_store
-    return unless file
-
-    old_file = file
-    old_store = object_store
-
-    # for moving remote file we need to first store it locally
-    cache_stored_file! unless file_storage?
-
-    # change storage
-    self.object_store = new_store
-
-    with_callbacks(:store, file) do
-      storage.store!(file).tap do |new_file|
-        # since we change storage store the new storage
-        # in case of failure delete new file
-        begin
-          model.save!
-        rescue => e
-          new_file.delete
-          self.object_store = old_store
-          raise e
-        end
-
-        old_file.delete
-      end
-    end
-  end
-
-  def schedule_migration_to_object_storage(*args)
-    return unless self.class.object_store_enabled?
-    return unless self.class.background_upload_enabled?
-    return unless self.licensed?
-    return unless self.file_storage?
-
-    ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
-  end
-
-  def fog_directory
-    self.class.object_store_options.remote_directory
-  end
-
-  def fog_credentials
-    self.class.object_store_options.connection
-  end
-
-  def fog_public
-    false
-  end
-
-  def move_to_store
-    file.try(:storage) == storage
-  end
-
-  def move_to_cache
-    file.try(:storage) == cache_storage
-  end
-
-  # We block storing artifacts on Object Storage, not receiving
-  def verify_license!(new_file)
-    return if file_storage?
-
-    raise 'Object Storage feature is missing' unless licensed?
-  end
-
-  def exists?
-    file.try(:exists?)
-  end
-
-  def cache_dir
-    File.join(self.class.local_store_path, 'tmp/cache')
-  end
-
-  # Override this if you don't want to save local files by default to the Rails.root directory
-  def work_dir
-    # Default path set by CarrierWave:
-    # https://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L182
-    # CarrierWave.tmp_path
-    File.join(self.class.local_store_path, 'tmp/work')
-  end
-
-  def licensed?
-    License.feature_available?(:object_storage)
-  end
-
-  private
-
-  def set_default_local_store(new_file)
-    self.object_store = LOCAL_STORE unless self.object_store
-  end
-
-  def default_local_path
-    File.join(self.class.local_store_path, default_path)
-  end
-
-  def default_path
-    raise NotImplementedError
-  end
-
-  def serialization_column
-    model.class.uploader_option(mounted_as, :mount_on) || mounted_as
-  end
-
-  def store_serialization_column
-    :"#{serialization_column}_store"
-  end
-
-  def storage
-    @storage ||=
-      if object_store == REMOTE_STORE
-        remote_storage
-      else
-        local_storage
-      end
-  end
-
-  def remote_storage
-    raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
-
-    CarrierWave::Storage::Fog.new(self)
-  end
-
-  def local_storage
-    CarrierWave::Storage::File.new(self)
-  end
-end
diff --git a/app/uploaders/personal_file_uploader.rb b/app/uploaders/personal_file_uploader.rb
index 3298ad104ec9..440972affecf 100644
--- a/app/uploaders/personal_file_uploader.rb
+++ b/app/uploaders/personal_file_uploader.rb
@@ -1,23 +1,40 @@
 class PersonalFileUploader < FileUploader
-  def self.dynamic_path_segment(model)
-    File.join(CarrierWave.root, model_path(model))
+  # Re-Override
+  def self.root
+    options.storage_path
   end
 
-  def self.base_dir
-    File.join(root_dir, '-', 'system')
+  def self.base_dir(model)
+    File.join(options.base_dir, model_path_segment(model))
   end
 
-  private
+  def self.model_path_segment(model)
+    return 'temp/' unless model
 
-  def secure_url
-    File.join(self.class.model_path(model), secret, file.filename)
+    File.join(model.class.to_s.underscore, model.id.to_s)
+  end
+
+  def object_store
+    return Store::LOCAL unless model
+
+    super
+  end
+
+  # Revert-Override
+  def store_dir
+    store_dirs[object_store]
+  end
+
+  def store_dirs
+    {
+      Store::LOCAL => File.join(base_dir, dynamic_segment),
+      Store::REMOTE => File.join(model_path_segment, dynamic_segment)
+    }
   end
 
-  def self.model_path(model)
-    if model
-      File.join("/#{base_dir}", model.class.to_s.underscore, model.id.to_s)
-    else
-      File.join("/#{base_dir}", 'temp')
-    end
+  private
+
+  def secure_url
+    File.join('/', base_dir, secret, file.filename)
   end
 end
diff --git a/app/uploaders/records_uploads.rb b/app/uploaders/records_uploads.rb
index feb4f04d7b75..dfb8dccec57d 100644
--- a/app/uploaders/records_uploads.rb
+++ b/app/uploaders/records_uploads.rb
@@ -1,35 +1,61 @@
 module RecordsUploads
-  extend ActiveSupport::Concern
+  module Concern
+    extend ActiveSupport::Concern
 
-  included do
-    after :store,   :record_upload
-    before :remove, :destroy_upload
-  end
+    attr_accessor :upload
 
-  # After storing an attachment, create a corresponding Upload record
-  #
-  # NOTE: We're ignoring the argument passed to this callback because we want
-  # the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the
-  # `Tempfile` object the callback gets.
-  #
-  # Called `after :store`
-  def record_upload(_tempfile = nil)
-    return unless model
-    return unless file_storage?
-    return unless file.exists?
-
-    Upload.record(self)
-  end
+    included do
+      after  :store,  :record_upload
+      before :remove, :destroy_upload
+    end
+
+    # After storing an attachment, create a corresponding Upload record
+    #
+    # NOTE: We're ignoring the argument passed to this callback because we want
+    # the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the
+    # `Tempfile` object the callback gets.
+    #
+    # Called `after :store`
+    def record_upload(_tempfile = nil)
+      return unless model
+      return unless file && file.exists?
+
+      Upload.transaction do
+        uploads.where(path: upload_path).delete_all
+        upload.destroy! if upload
+
+        self.upload = build_upload_from_uploader(self)
+        upload.save!
+      end
+    end
+
+    def upload_path
+      File.join(store_dir, filename.to_s)
+    end
+
+    private
+
+    def uploads
+      Upload.order(id: :desc).where(uploader: self.class.to_s)
+    end
 
-  private
+    def build_upload_from_uploader(uploader)
+      Upload.new(
+        size: uploader.file.size,
+        path: uploader.upload_path,
+        model: uploader.model,
+        uploader: uploader.class.to_s
+      )
+    end
 
-  # Before removing an attachment, destroy any Upload records at the same path
-  #
-  # Called `before :remove`
-  def destroy_upload(*args)
-    return unless file_storage?
-    return unless file
+    # Before removing an attachment, destroy any Upload records at the same path
+    #
+    # Called `before :remove`
+    def destroy_upload(*args)
+      return unless file && file.exists?
 
-    Upload.remove_path(relative_path)
+      self.upload = nil
+      uploads.where(path: upload_path).delete_all
+    end
   end
 end
diff --git a/app/uploaders/uploader_helper.rb b/app/uploaders/uploader_helper.rb
index 7635c20ab3a5..fd446d310922 100644
--- a/app/uploaders/uploader_helper.rb
+++ b/app/uploaders/uploader_helper.rb
@@ -32,14 +32,7 @@ def dangerous?
   def extension_match?(extensions)
     return false unless file
 
-    extension =
-      if file.respond_to?(:extension)
-        file.extension
-      else
-        # Not all CarrierWave storages respond to :extension
-        File.extname(file.path).delete('.')
-      end
-
+    extension = file.try(:extension) || File.extname(file.path).delete('.')
     extensions.include?(extension.downcase)
   end
 end
diff --git a/app/uploaders/workhorse.rb b/app/uploaders/workhorse.rb
new file mode 100644
index 000000000000..782032cf5163
--- /dev/null
+++ b/app/uploaders/workhorse.rb
@@ -0,0 +1,7 @@
+module Workhorse
+  module UploadPath
+    def workhorse_upload_path
+      File.join(root, base_dir, 'tmp/uploads')
+    end
+  end
+end
diff --git a/app/workers/object_storage_upload_worker.rb b/app/workers/object_storage_upload_worker.rb
index 0b9411ff2df6..e087261770f1 100644
--- a/app/workers/object_storage_upload_worker.rb
+++ b/app/workers/object_storage_upload_worker.rb
@@ -8,16 +8,16 @@ def perform(uploader_class_name, subject_class_name, file_field, subject_id)
     uploader_class = uploader_class_name.constantize
     subject_class = subject_class_name.constantize
 
+    return unless uploader_class < ObjectStorage::Concern
     return unless uploader_class.object_store_enabled?
+    return unless uploader_class.licensed?
     return unless uploader_class.background_upload_enabled?
 
-    subject = subject_class.find_by(id: subject_id)
-    return unless subject
-
-    file = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
-
-    return unless file.licensed?
-
-    file.migrate!(uploader_class::REMOTE_STORE)
+    subject = subject_class.find(subject_id)
+    uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
+    uploader.migrate!(ObjectStorage::Store::REMOTE)
+  rescue RecordNotFound
+    # does not retry when the record do not exists
+    Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
   end
 end
diff --git a/app/workers/upload_checksum_worker.rb b/app/workers/upload_checksum_worker.rb
index 9222760c0316..65d40336f18b 100644
--- a/app/workers/upload_checksum_worker.rb
+++ b/app/workers/upload_checksum_worker.rb
@@ -3,7 +3,7 @@ class UploadChecksumWorker
 
   def perform(upload_id)
     upload = Upload.find(upload_id)
-    upload.calculate_checksum
+    upload.calculate_checksum!
     upload.save!
   rescue ActiveRecord::RecordNotFound
     Rails.logger.error("UploadChecksumWorker: couldn't find upload #{upload_id}, skipping")
diff --git a/changelogs/unreleased-ee/4163-move-uploads-to-object-storage.yml b/changelogs/unreleased-ee/4163-move-uploads-to-object-storage.yml
new file mode 100644
index 000000000000..18910f0d97b0
--- /dev/null
+++ b/changelogs/unreleased-ee/4163-move-uploads-to-object-storage.yml
@@ -0,0 +1,5 @@
+---
+title: Add object storage support for uploads.
+merge_request: 3867
+author:
+type: added
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index cab72032d22f..c360c42509a1 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -174,6 +174,25 @@ production: &base
         # endpoint: 'http://127.0.0.1:9000' # default: nil
         # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
 
+  ## Uploads (attachments, avatars, etc...)
+  uploads:
+    # The location where uploads objects are stored (default: public/).
+    # storage_path: public/
+    # base_dir: uploads/-/system
+    object_store:
+      enabled: true
+      remote_directory: uploads # Bucket name
+      # background_upload: false # Temporary option to limit automatic upload (Default: true)
+      connection:
+        provider: AWS
+        aws_access_key_id: AWS_ACCESS_KEY_ID
+        aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+        region: eu-central-1
+        # Use the following options to configure an AWS compatible host
+        # host: 'localhost' # default: s3.amazonaws.com
+        # endpoint: 'http://127.0.0.1:9000' # default: nil
+        # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
+
   ## GitLab Pages
   pages:
     enabled: false
@@ -686,6 +705,16 @@ test:
         aws_access_key_id: AWS_ACCESS_KEY_ID
         aws_secret_access_key: AWS_SECRET_ACCESS_KEY
         region: eu-central-1
+  uploads:
+    storage_path: tmp/tests/public
+    enabled: true
+    object_store:
+      enabled: false
+      connection:
+        provider: AWS # Only AWS supported at the moment
+        aws_access_key_id: AWS_ACCESS_KEY_ID
+        aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+        region: eu-central-1
   gitlab:
     host: localhost
     port: 80
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index b0cfd50233ae..ab953583df9b 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -298,13 +298,15 @@ def cron_for_usage_ping
 #
 Settings['artifacts'] ||= Settingslogic.new({})
 Settings.artifacts['enabled']      = true if Settings.artifacts['enabled'].nil?
-Settings.artifacts['path']         = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
-Settings.artifacts['max_size']   ||= 100 # in megabytes
+Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts"))
+# Settings.artifact['path'] is deprecated, use `storage_path` instead
+Settings.artifacts['path']         = Settings.artifacts['storage_path']
+Settings.artifacts['max_size'] ||= 100 # in megabytes
 
 Settings.artifacts['object_store'] ||= Settingslogic.new({})
-Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
-Settings.artifacts['object_store']['remote_directory'] ||= nil
-Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
+Settings.artifacts['object_store']['enabled']           ||= false
+Settings.artifacts['object_store']['remote_directory']  ||= nil
+Settings.artifacts['object_store']['background_upload'] ||= true
 # Convert upload connection settings to use string keys, to make Fog happy
 Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
 
@@ -342,14 +344,26 @@ def cron_for_usage_ping
 Settings['lfs'] ||= Settingslogic.new({})
 Settings.lfs['enabled']      = true if Settings.lfs['enabled'].nil?
 Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
-
 Settings.lfs['object_store'] ||= Settingslogic.new({})
-Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
-Settings.lfs['object_store']['remote_directory'] ||= nil
-Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
+Settings.lfs['object_store']['enabled']           ||= false
+Settings.lfs['object_store']['remote_directory']  ||= nil
+Settings.lfs['object_store']['background_upload'] ||= true
 # Convert upload connection settings to use string keys, to make Fog happy
 Settings.lfs['object_store']['connection']&.deep_stringify_keys!
 
+#
+# Uploads
+#
+Settings['uploads'] ||= Settingslogic.new({})
+Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
+Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
+Settings.uploads['object_store'] ||= Settingslogic.new({})
+Settings.uploads['object_store']['enabled']           ||= false
+Settings.uploads['object_store']['remote_directory']  ||= 'uploads'
+Settings.uploads['object_store']['background_upload'] ||= true
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.uploads['object_store']['connection']&.deep_stringify_keys!
+
 #
 # Mattermost
 #
diff --git a/db/migrate/20171214144320_add_store_column_to_uploads.rb b/db/migrate/20171214144320_add_store_column_to_uploads.rb
new file mode 100644
index 000000000000..bad20dcdbcf5
--- /dev/null
+++ b/db/migrate/20171214144320_add_store_column_to_uploads.rb
@@ -0,0 +1,12 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddStoreColumnToUploads < ActiveRecord::Migration
+  include Gitlab::Database::MigrationHelpers
+
+  DOWNTIME = false
+
+  def change
+    add_column :uploads, :store, :integer
+  end
+end
diff --git a/db/migrate/20180119135717_add_uploader_index_to_uploads.rb b/db/migrate/20180119135717_add_uploader_index_to_uploads.rb
new file mode 100644
index 000000000000..a678c3d049f6
--- /dev/null
+++ b/db/migrate/20180119135717_add_uploader_index_to_uploads.rb
@@ -0,0 +1,20 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddUploaderIndexToUploads < ActiveRecord::Migration
+  include Gitlab::Database::MigrationHelpers
+
+  DOWNTIME = false
+
+  disable_ddl_transaction!
+
+  def up
+    remove_concurrent_index :uploads, :path
+    add_concurrent_index    :uploads, [:uploader, :path], using: :btree
+  end
+
+  def down
+    remove_concurrent_index :uploads, [:uploader, :path]
+    add_concurrent_index    :uploads, :path, using: :btree
+  end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 02c44bccc610..b6800ff926e5 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -1760,11 +1760,12 @@
     t.string "model_type"
     t.string "uploader", null: false
     t.datetime "created_at", null: false
+    t.integer "store"
   end
 
   add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree
   add_index "uploads", ["model_id", "model_type"], name: "index_uploads_on_model_id_and_model_type", using: :btree
-  add_index "uploads", ["path"], name: "index_uploads_on_path", using: :btree
+  add_index "uploads", ["uploader", "path"], name: "index_uploads_on_uploader_and_path", using: :btree
 
   create_table "user_agent_details", force: :cascade do |t|
     t.string "user_agent", null: false
diff --git a/doc/development/file_storage.md b/doc/development/file_storage.md
index cf00e24e11a8..76354b928203 100644
--- a/doc/development/file_storage.md
+++ b/doc/development/file_storage.md
@@ -14,8 +14,8 @@ There are many places where file uploading is used, according to contexts:
   - User snippet attachments
 * Project
   - Project avatars
-  - Issues/MR Markdown attachments
-  - Issues/MR Legacy Markdown attachments
+  - Issues/MR/Notes Markdown attachments
+  - Issues/MR/Notes Legacy Markdown attachments
   - CI Build Artifacts
   - LFS Objects
 
@@ -25,7 +25,7 @@ There are many places where file uploading is used, according to contexts:
 GitLab started saving everything on local disk. While directory location changed from previous versions,
 they are still not 100% standardized. You can see them below:
 
-| Description                           | In DB? | Relative path                                               | Uploader class         | model_type |
+| Description                           | In DB? | Relative path (from CarrierWave.root)                       | Uploader class         | model_type |
 | ------------------------------------- | ------ | ----------------------------------------------------------- | ---------------------- | ---------- |
 | Instance logo                         | yes    | uploads/-/system/appearance/logo/:id/:filename              | `AttachmentUploader`   | Appearance |
 | Header logo                           | yes    | uploads/-/system/appearance/header_logo/:id/:filename       | `AttachmentUploader`   | Appearance |
@@ -33,17 +33,107 @@ they are still not 100% standardized. You can see them below:
 | User avatars                          | yes    | uploads/-/system/user/avatar/:id/:filename                  | `AvatarUploader`       | User       |
 | User snippet attachments              | yes    | uploads/-/system/personal_snippet/:id/:random_hex/:filename | `PersonalFileUploader` | Snippet    |
 | Project avatars                       | yes    | uploads/-/system/project/avatar/:id/:filename               | `AvatarUploader`       | Project    |
-| Issues/MR Markdown attachments        | yes    | uploads/:project_path_with_namespace/:random_hex/:filename  | `FileUploader`         | Project    |
-| Issues/MR Legacy Markdown attachments | no     | uploads/-/system/note/attachment/:id/:filename              | `AttachmentUploader`   | Note       |
+| Issues/MR/Notes Markdown attachments        | yes    | uploads/:project_path_with_namespace/:random_hex/:filename  | `FileUploader`         | Project    |
+| Issues/MR/Notes Legacy Markdown attachments | no     | uploads/-/system/note/attachment/:id/:filename              | `AttachmentUploader`   | Note       |
 | CI Artifacts (CE)                     | yes    | shared/artifacts/:year_:month/:project_id/:id               | `ArtifactUploader`     | Ci::Build  |
 | LFS Objects  (CE)                     | yes    | shared/lfs-objects/:hex/:hex/:object_hash                   | `LfsObjectUploader`    | LfsObject  |
 
 CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader`
-while in EE they inherit the `ObjectStoreUploader` and store files in and S3 API compatible object store.
+while in EE they inherit the `ObjectStorage` and store files in and S3 API compatible object store.
 
-In the case of Issues/MR Markdown attachments, there is a different approach using the [Hashed Storage] layout,
+In the case of Issues/MR/Notes Markdown attachments, there is a different approach using the [Hashed Storage] layout,
 instead of basing the path into a mutable variable `:project_path_with_namespace`, it's possible to use the
 hash of the project ID instead, if project migrates to the new approach (introduced in 10.2).
 
+### Path segments
+
+Files are stored at multiple locations and use different path schemes. 
+All the `GitlabUploader` derived classes should comply with this path segment schema:
+
+```
+|   GitlabUploader
+| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
+| `<gitlab_root>/public/` | `uploads/-/system/`       | `user/avatar/:id/`                | `:filename`                      |
+| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
+| `CarrierWave.root`      | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment`  | `CarrierWave::Uploader#filename` |
+|                         | `CarrierWave::Uploader#store_dir`                             |                                  | 
+
+|   FileUploader
+| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
+| `<gitlab_root>/shared/` | `artifacts/`              | `:year_:month/:id`                | `:filename`                      |
+| `<gitlab_root>/shared/` | `snippets/`               | `:secret/`                        | `:filename`                      |
+| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
+| `CarrierWave.root`      | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment`  | `CarrierWave::Uploader#filename` |
+|                         | `CarrierWave::Uploader#store_dir`                             |                                  | 
+|                         |                           | `FileUploader#upload_path                                            |
+
+|   ObjectStore::Concern (store = remote)
+| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
+| `<bucket_name>`         | <ignored>                 | `user/avatar/:id/`                  | `:filename`                      |
+| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
+| `#fog_dir`              | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment`    | `CarrierWave::Uploader#filename` |
+|                         |                           | `ObjectStorage::Concern#store_dir`  |                                  | 
+|                         |                           | `ObjectStorage::Concern#upload_path                                    |
+```
+
+The `RecordsUploads::Concern` concern will create an `Upload` entry for every file stored by a `GitlabUploader` persisting the dynamic parts of the path using
+`GitlabUploader#dynamic_path`. You may then use the `Upload#build_uploader` method to manipulate the file.
+
+## Object Storage
+
+By including the `ObjectStorage::Concern` in the `GitlabUploader` derived class, you may enable the object storage for this uploader. To enable the object storage
+in your uploader, you need to either 1) include `RecordsUpload::Concern` and prepend `ObjectStorage::Extension::RecordsUploads` or 2) mount the uploader and create a new field named `<mount>_store`.
+
+The `CarrierWave::Uploader#store_dir` is overriden to
+
+ - `GitlabUploader.base_dir` + `GitlabUploader.dynamic_segment` when the store is LOCAL
+ - `GitlabUploader.dynamic_segment` when the store is REMOTE (the bucket name is used to namespace)
+
+### Using `ObjectStorage::Extension::RecordsUploads`
+
+> Note: this concern will automatically include `RecordsUploads::Concern` if not already included.
+
+The `ObjectStorage::Concern` uploader will search for the matching `Upload` to select the correct object store. The `Upload` is mapped using `#store_dirs + identifier` for each store (LOCAL/REMOTE).
+
+```ruby
+class SongUploader < GitlabUploader
+  include RecordsUploads::Concern
+  include ObjectStorage::Concern
+  prepend ObjectStorage::Extension::RecordsUploads
+
+  ...
+end
+
+class Thing < ActiveRecord::Base
+  mount :theme, SongUploader # we have a great theme song!
+
+  ...
+end
+```
+
+### Using a mounted uploader
+
+The `ObjectStorage::Concern` will query the `model.<mount>_store` attribute to select the correct object store.
+This column must be present in the model schema.
+
+```ruby
+class SongUploader < GitlabUploader
+  include ObjectStorage::Concern
+
+  ...
+end
+
+class Thing < ActiveRecord::Base
+  attr_reader :theme_store # this is an ActiveRecord attribute
+  mount :theme, SongUploader # we have a great theme song!
+
+  def theme_store
+    super || ObjectStorage::Store::LOCAL
+  end
+
+  ...
+end
+```
+
 [CarrierWave]: https://github.com/carrierwaveuploader/carrierwave
 [Hashed Storage]: ../administration/repository_storage_types.md
diff --git a/ee/app/models/ee/ci/job_artifact.rb b/ee/app/models/ee/ci/job_artifact.rb
new file mode 100644
index 000000000000..02c6715f4471
--- /dev/null
+++ b/ee/app/models/ee/ci/job_artifact.rb
@@ -0,0 +1,25 @@
+module EE
+  # CI::JobArtifact EE mixin
+  #
+  # This module is intended to encapsulate EE-specific model logic
+  # and be prepended in the `Ci::JobArtifact` model
+  module Ci::JobArtifact
+    extend ActiveSupport::Concern
+
+    prepended do
+      after_destroy :log_geo_event
+
+      scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::Store::LOCAL]) }
+    end
+
+    def local_store?
+      [nil, JobArtifactUploader::Store::LOCAL].include?(self.file_store)
+    end
+
+    private
+
+    def log_geo_event
+      ::Geo::JobArtifactDeletedEventStore.new(self).create
+    end
+  end
+end
diff --git a/ee/app/models/ee/lfs_object.rb b/ee/app/models/ee/lfs_object.rb
new file mode 100644
index 000000000000..6962c2bea4fc
--- /dev/null
+++ b/ee/app/models/ee/lfs_object.rb
@@ -0,0 +1,23 @@
+module EE
+  # LFS Object EE mixin
+  #
+  # This module is intended to encapsulate EE-specific model logic
+  # and be prepended in the `LfsObject` model
+  module LfsObject
+    extend ActiveSupport::Concern
+
+    prepended do
+      after_destroy :log_geo_event
+    end
+
+    def local_store?
+      [nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
+    end
+
+    private
+
+    def log_geo_event
+      ::Geo::LfsObjectDeletedEventStore.new(self).create
+    end
+  end
+end
diff --git a/ee/app/models/geo/fdw/ci/job_artifact.rb b/ee/app/models/geo/fdw/ci/job_artifact.rb
new file mode 100644
index 000000000000..eaca84b332e8
--- /dev/null
+++ b/ee/app/models/geo/fdw/ci/job_artifact.rb
@@ -0,0 +1,11 @@
+module Geo
+  module Fdw
+    module Ci
+      class JobArtifact < ::Geo::BaseFdw
+        self.table_name = Gitlab::Geo.fdw_table('ci_job_artifacts')
+
+        scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::Store::LOCAL]) }
+      end
+    end
+  end
+end
diff --git a/ee/app/models/geo/fdw/lfs_object.rb b/ee/app/models/geo/fdw/lfs_object.rb
new file mode 100644
index 000000000000..18aae28518d9
--- /dev/null
+++ b/ee/app/models/geo/fdw/lfs_object.rb
@@ -0,0 +1,9 @@
+module Geo
+  module Fdw
+    class LfsObject < ::Geo::BaseFdw
+      self.table_name = Gitlab::Geo.fdw_table('lfs_objects')
+
+      scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
+    end
+  end
+end
diff --git a/ee/app/services/geo/files_expire_service.rb b/ee/app/services/geo/files_expire_service.rb
new file mode 100644
index 000000000000..e3604674d858
--- /dev/null
+++ b/ee/app/services/geo/files_expire_service.rb
@@ -0,0 +1,77 @@
+module Geo
+  class FilesExpireService
+    include ::Gitlab::Geo::LogHelpers
+
+    BATCH_SIZE = 500
+
+    attr_reader :project, :old_full_path
+
+    def initialize(project, old_full_path)
+      @project = project
+      @old_full_path = old_full_path
+    end
+
+    # Expire already replicated uploads
+    #
+    # This is a fallback solution to support projects that haven't rolled out to hashed-storage yet.
+    #
+    # Note: Unless we add some locking mechanism, this will be best effort only
+    # as if there are files that are being replicated during this execution, they will not
+    # be expired.
+    #
+    # The long-term solution is to use hashed storage.
+    def execute
+      return unless Gitlab::Geo.secondary?
+
+      uploads = finder.find_project_uploads(project)
+      log_info("Expiring replicated attachments after project rename", count: uploads.count)
+
+      schedule_file_removal(uploads)
+      mark_for_resync!
+    end
+
+    # Project's base directory for attachments storage
+    #
+    # @return base directory where all uploads for the project are stored
+    def base_dir
+      @base_dir ||= File.join(FileUploader.root, old_full_path)
+    end
+
+    private
+
+    def schedule_file_removal(uploads)
+      paths_to_remove = uploads.find_each(batch_size: BATCH_SIZE).each_with_object([]) do |upload, to_remove|
+        file_path = File.join(base_dir, upload.path)
+
+        if File.exist?(file_path)
+          to_remove << [file_path]
+
+          log_info("Scheduled to remove file", file_path: file_path)
+        end
+      end
+
+      Geo::FileRemovalWorker.bulk_perform_async(paths_to_remove)
+    end
+
+    def mark_for_resync!
+      finder.find_file_registries_uploads(project).delete_all
+    end
+
+    def finder
+      @finder ||= ::Geo::ExpireUploadsFinder.new
+    end
+
+    # This is called by LogHelpers to build json log with context info
+    #
+    # @see ::Gitlab::Geo::LogHelpers
+    def base_log_data(message)
+      {
+        class: self.class.name,
+        project_id: project.id,
+        project_path: project.full_path,
+        project_old_path: old_full_path,
+        message: message
+      }
+    end
+  end
+end
diff --git a/ee/app/services/geo/hashed_storage_attachments_migration_service.rb b/ee/app/services/geo/hashed_storage_attachments_migration_service.rb
new file mode 100644
index 000000000000..d967d8f6d5e6
--- /dev/null
+++ b/ee/app/services/geo/hashed_storage_attachments_migration_service.rb
@@ -0,0 +1,55 @@
+module Geo
+  AttachmentMigrationError = Class.new(StandardError)
+
+  class HashedStorageAttachmentsMigrationService
+    include ::Gitlab::Geo::LogHelpers
+
+    attr_reader :project_id, :old_attachments_path, :new_attachments_path
+
+    def initialize(project_id, old_attachments_path:, new_attachments_path:)
+      @project_id = project_id
+      @old_attachments_path = old_attachments_path
+      @new_attachments_path = new_attachments_path
+    end
+
+    def async_execute
+      Geo::HashedStorageAttachmentsMigrationWorker.perform_async(
+        project_id,
+        old_attachments_path,
+        new_attachments_path
+      )
+    end
+
+    def execute
+      origin = File.join(FileUploader.root, old_attachments_path)
+      target = File.join(FileUploader.root, new_attachments_path)
+      move_folder!(origin, target)
+    end
+
+    private
+
+    def project
+      @project ||= Project.find(project_id)
+    end
+
+    def move_folder!(old_path, new_path)
+      unless File.directory?(old_path)
+        log_info("Skipped attachments migration to Hashed Storage, source path doesn't exist or is not a directory", project_id: project.id, source: old_path, target: new_path)
+        return
+      end
+
+      if File.exist?(new_path)
+        log_error("Cannot migrate attachments to Hashed Storage, target path already exist", project_id: project.id, source: old_path, target: new_path)
+        raise AttachmentMigrationError, "Target path '#{new_path}' already exist"
+      end
+
+      # Create hashed storage base path folder
+      FileUtils.mkdir_p(File.dirname(new_path))
+
+      FileUtils.mv(old_path, new_path)
+      log_info("Migrated project attachments to Hashed Storage", project_id: project.id, source: old_path, target: new_path)
+
+      true
+    end
+  end
+end
diff --git a/ee/app/services/geo/job_artifact_deleted_event_store.rb b/ee/app/services/geo/job_artifact_deleted_event_store.rb
new file mode 100644
index 000000000000..7455773985c7
--- /dev/null
+++ b/ee/app/services/geo/job_artifact_deleted_event_store.rb
@@ -0,0 +1,48 @@
+module Geo
+  class JobArtifactDeletedEventStore < EventStore
+    self.event_type = :job_artifact_deleted_event
+
+    attr_reader :job_artifact
+
+    def initialize(job_artifact)
+      @job_artifact = job_artifact
+    end
+
+    def create
+      return unless job_artifact.local_store?
+
+      super
+    end
+
+    private
+
+    def build_event
+      Geo::JobArtifactDeletedEvent.new(
+        job_artifact: job_artifact,
+        file_path: relative_file_path
+      )
+    end
+
+    def local_store_path
+      Pathname.new(JobArtifactUploader.root)
+    end
+
+    def relative_file_path
+      return unless job_artifact.file.present?
+
+      Pathname.new(job_artifact.file.path).relative_path_from(local_store_path)
+    end
+
+    # This is called by ProjectLogHelpers to build json log with context info
+    #
+    # @see ::Gitlab::Geo::ProjectLogHelpers
+    def base_log_data(message)
+      {
+        class: self.class.name,
+        job_artifact_id: job_artifact.id,
+        file_path: job_artifact.file.path,
+        message: message
+      }
+    end
+  end
+end
diff --git a/ee/app/services/geo/lfs_object_deleted_event_store.rb b/ee/app/services/geo/lfs_object_deleted_event_store.rb
new file mode 100644
index 000000000000..9eb47f914727
--- /dev/null
+++ b/ee/app/services/geo/lfs_object_deleted_event_store.rb
@@ -0,0 +1,49 @@
+module Geo
+  class LfsObjectDeletedEventStore < EventStore
+    self.event_type = :lfs_object_deleted_event
+
+    attr_reader :lfs_object
+
+    def initialize(lfs_object)
+      @lfs_object = lfs_object
+    end
+
+    def create
+      return unless lfs_object.local_store?
+
+      super
+    end
+
+    private
+
+    def build_event
+      Geo::LfsObjectDeletedEvent.new(
+        lfs_object: lfs_object,
+        oid: lfs_object.oid,
+        file_path: relative_file_path
+      )
+    end
+
+    def local_store_path
+      Pathname.new(LfsObjectUploader.root)
+    end
+
+    def relative_file_path
+      return unless lfs_object.file.present?
+
+      Pathname.new(lfs_object.file.path).relative_path_from(local_store_path)
+    end
+
+    # This is called by ProjectLogHelpers to build json log with context info
+    #
+    # @see ::Gitlab::Geo::ProjectLogHelpers
+    def base_log_data(message)
+      {
+        class: self.class.name,
+        lfs_object_id: lfs_object.id,
+        file_path: lfs_object.file.path,
+        message: message
+      }
+    end
+  end
+end
diff --git a/ee/app/uploaders/object_storage.rb b/ee/app/uploaders/object_storage.rb
new file mode 100644
index 000000000000..e5b087524f5f
--- /dev/null
+++ b/ee/app/uploaders/object_storage.rb
@@ -0,0 +1,265 @@
+require 'fog/aws'
+require 'carrierwave/storage/fog'
+
+#
+# This concern should add object storage support
+# to the GitlabUploader class
+#
+module ObjectStorage
+  RemoteStoreError = Class.new(StandardError)
+  UnknownStoreError = Class.new(StandardError)
+  ObjectStoreUnavailable = Class.new(StandardError)
+
+  module Store
+    LOCAL = 1
+    REMOTE = 2
+  end
+
+  module Extension
+    # this extension is the glue between the ObjectStorage::Concern and RecordsUploads::Concern
+    module RecordsUploads
+      extend ActiveSupport::Concern
+
+      prepended do |base|
+        raise ObjectStoreUnavailable, "#{base} must include ObjectStorage::Concern to use extensions."  unless base < Concern
+
+        base.include(::RecordsUploads::Concern)
+      end
+
+      def retrieve_from_store!(identifier)
+        paths = store_dirs.map { |store, path| File.join(path, identifier) }
+
+        unless current_upload_satisfies?(paths, model)
+          # the upload we already have isn't right, find the correct one
+          self.upload = uploads.find_by(model: model, path: paths)
+        end
+
+        super
+      end
+
+      def build_upload_from_uploader(uploader)
+        super.tap { |upload| upload.store = object_store }
+      end
+
+      def upload=(upload)
+        return unless upload
+
+        self.object_store = upload.store
+        super
+      end
+
+      private
+
+      def current_upload_satisfies?(paths, model)
+        return false unless upload
+        return false unless model
+
+        paths.include?(upload.path) &&
+          upload.model_id == model.id &&
+          upload.model_type == model.class.base_class.sti_name
+      end
+    end
+  end
+
+  module Concern
+    extend ActiveSupport::Concern
+
+    included do |base|
+      base.include(ObjectStorage)
+
+      before :store, :verify_license!
+      after :migrate, :delete_migrated_file
+    end
+
+    class_methods do
+      def object_store_options
+        options.object_store
+      end
+
+      def object_store_enabled?
+        object_store_options.enabled
+      end
+
+      def background_upload_enabled?
+        object_store_options.background_upload
+      end
+
+      def object_store_credentials
+        object_store_options.connection.to_hash.deep_symbolize_keys
+      end
+
+      def remote_store_path
+        object_store_options.remote_directory
+      end
+
+      def licensed?
+        License.feature_available?(:object_storage)
+      end
+    end
+
+    def file_storage?
+      storage.is_a?(CarrierWave::Storage::File)
+    end
+
+    def file_cache_storage?
+      cache_storage.is_a?(CarrierWave::Storage::File)
+    end
+
+    def object_store
+      @object_store ||= model.try(store_serialization_column) || Store::LOCAL
+    end
+
+    # rubocop:disable Gitlab/ModuleWithInstanceVariables
+    def object_store=(value)
+      @object_store = value || Store::LOCAL
+      @storage = storage_for(object_store)
+    end
+    # rubocop:enable Gitlab/ModuleWithInstanceVariables
+
+    # Return true if the current file is part or the model (i.e. is mounted in the model)
+    #
+    def persist_object_store?
+      model.respond_to?(:"#{store_serialization_column}=")
+    end
+
+    # Save the current @object_store to the model <mounted_as>_store column
+    def persist_object_store!
+      return unless persist_object_store?
+
+      updated = model.update_column(store_serialization_column, object_store)
+      raise ActiveRecordError unless updated
+    end
+
+    def use_file
+      if file_storage?
+        return yield path
+      end
+
+      begin
+        cache_stored_file!
+        yield cache_path
+      ensure
+        cache_storage.delete_dir!(cache_path(nil))
+      end
+    end
+
+    def filename
+      super || file&.filename
+    end
+
+    #
+    # Move the file to another store
+    #
+    #   new_store: Enum (Store::LOCAL, Store::REMOTE)
+    #
+    def migrate!(new_store)
+      return unless object_store != new_store
+      return unless file
+
+      new_file = nil
+      file_to_delete = file
+      from_object_store = object_store
+      self.object_store = new_store # changes the storage and file
+
+      cache_stored_file! if file_storage?
+
+      with_callbacks(:migrate, file_to_delete) do
+        with_callbacks(:store, file_to_delete) do # for #store_versions!
+          new_file = storage.store!(file)
+          persist_object_store!
+          self.file = new_file
+        end
+      end
+
+      file
+    rescue => e
+      # in case of failure delete new file
+      new_file.delete unless new_file.nil?
+      # revert back to the old file
+      self.object_store = from_object_store
+      self.file = file_to_delete
+      raise e
+    end
+
+    def schedule_migration_to_object_storage(*args)
+      return unless self.class.object_store_enabled?
+      return unless self.class.background_upload_enabled?
+      return unless self.class.licensed?
+      return unless self.file_storage?
+
+      ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
+    end
+
+    def fog_directory
+      self.class.remote_store_path
+    end
+
+    def fog_credentials
+      self.class.object_store_credentials
+    end
+
+    def fog_public
+      false
+    end
+
+    def delete_migrated_file(migrated_file)
+      migrated_file.delete if exists?
+    end
+
+    def verify_license!(_file)
+      return if file_storage?
+
+      raise 'Object Storage feature is missing' unless self.class.licensed?
+    end
+
+    def exists?
+      file.present?
+    end
+
+    def store_dir(store = nil)
+      store_dirs[store || object_store]
+    end
+
+    def store_dirs
+      {
+        Store::LOCAL => File.join(base_dir, dynamic_segment),
+        Store::REMOTE => File.join(dynamic_segment)
+      }
+    end
+
+    private
+
+    # this is a hack around CarrierWave. The #migrate method needs to be
+    # able to force the current file to the migrated file upon success.
+    def file=(file)
+      @file = file # rubocop:disable Gitlab/ModuleWithInstanceVariables
+    end
+
+    def serialization_column
+      model.class.uploader_options.dig(mounted_as, :mount_on) || mounted_as
+    end
+
+    # Returns the column where the 'store' is saved
+    #   defaults to 'store'
+    def store_serialization_column
+      [serialization_column, 'store'].compact.join('_').to_sym
+    end
+
+    def storage
+      @storage ||= storage_for(object_store)
+    end
+
+    def storage_for(store)
+      case store
+      when Store::REMOTE
+        raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
+
+        CarrierWave::Storage::Fog.new(self)
+      when Store::LOCAL
+        CarrierWave::Storage::File.new(self)
+      else
+        raise UnknownStoreError
+      end
+    end
+  end
+end
diff --git a/ee/lib/gitlab/geo/file_transfer.rb b/ee/lib/gitlab/geo/file_transfer.rb
new file mode 100644
index 000000000000..16db6f2d4486
--- /dev/null
+++ b/ee/lib/gitlab/geo/file_transfer.rb
@@ -0,0 +1,24 @@
+module Gitlab
+  module Geo
+    class FileTransfer < Transfer
+      def initialize(file_type, upload)
+        @file_type = file_type
+        @file_id = upload.id
+        @filename = upload.absolute_path
+        @request_data = build_request_data(upload)
+      rescue ObjectStorage::RemoteStoreError
+        Rails.logger.warn "Cannot transfer a remote object."
+      end
+
+      private
+
+      def build_request_data(upload)
+        {
+          id: upload.model_id,
+          type: upload.model_type,
+          checksum: upload.checksum
+        }
+      end
+    end
+  end
+end
diff --git a/ee/lib/gitlab/geo/log_cursor/daemon.rb b/ee/lib/gitlab/geo/log_cursor/daemon.rb
new file mode 100644
index 000000000000..d4596286641c
--- /dev/null
+++ b/ee/lib/gitlab/geo/log_cursor/daemon.rb
@@ -0,0 +1,266 @@
+module Gitlab
+  module Geo
+    module LogCursor
+      class Daemon
+        VERSION = '0.2.0'.freeze
+        BATCH_SIZE = 250
+        SECONDARY_CHECK_INTERVAL = 1.minute
+
+        attr_reader :options
+
+        def initialize(options = {})
+          @options = options
+          @exit = false
+          logger.geo_logger.build.level = options[:debug] ? :debug : Rails.logger.level
+        end
+
+        def run!
+          trap_signals
+
+          until exit?
+            # Prevent the node from processing events unless it's a secondary
+            unless Geo.secondary?
+              sleep(SECONDARY_CHECK_INTERVAL)
+              next
+            end
+
+            lease = Lease.try_obtain_with_ttl { run_once! }
+
+            return if exit?
+
+            # When no new event is found sleep for a few moments
+            arbitrary_sleep(lease[:ttl])
+          end
+        end
+
+        def run_once!
+          LogCursor::Events.fetch_in_batches { |batch| handle_events(batch) }
+        end
+
+        def handle_events(batch)
+          batch.each do |event_log|
+            next unless can_replay?(event_log)
+
+            begin
+              event = event_log.event
+              handler = "handle_#{event.class.name.demodulize.underscore}"
+
+              __send__(handler, event, event_log.created_at) # rubocop:disable GitlabSecurity/PublicSend
+            rescue NoMethodError => e
+              logger.error(e.message)
+              raise e
+            end
+          end
+        end
+
+        private
+
+        def trap_signals
+          trap(:TERM) do
+            quit!
+          end
+          trap(:INT) do
+            quit!
+          end
+        end
+
+        # Safe shutdown
+        def quit!
+          $stdout.puts 'Exiting...'
+
+          @exit = true
+        end
+
+        def exit?
+          @exit
+        end
+
+        def can_replay?(event_log)
+          return true if event_log.project_id.nil?
+
+          Gitlab::Geo.current_node&.projects_include?(event_log.project_id)
+        end
+
+        def handle_repository_created_event(event, created_at)
+          registry = find_or_initialize_registry(event.project_id, resync_repository: true, resync_wiki: event.wiki_path.present?)
+
+          logger.event_info(
+            created_at,
+            message: 'Repository created',
+            project_id: event.project_id,
+            repo_path: event.repo_path,
+            wiki_path: event.wiki_path,
+            resync_repository: registry.resync_repository,
+            resync_wiki: registry.resync_wiki)
+
+          registry.save!
+
+          ::Geo::ProjectSyncWorker.perform_async(event.project_id, Time.now)
+        end
+
+        def handle_repository_updated_event(event, created_at)
+          registry = find_or_initialize_registry(event.project_id, "resync_#{event.source}" => true)
+
+          logger.event_info(
+            created_at,
+            message: 'Repository update',
+            project_id: event.project_id,
+            source: event.source,
+            resync_repository: registry.resync_repository,
+            resync_wiki: registry.resync_wiki)
+
+          registry.save!
+
+          ::Geo::ProjectSyncWorker.perform_async(event.project_id, Time.now)
+        end
+
+        def handle_repository_deleted_event(event, created_at)
+          job_id = ::Geo::RepositoryDestroyService
+                     .new(event.project_id, event.deleted_project_name, event.deleted_path, event.repository_storage_name)
+                     .async_execute
+
+          logger.event_info(
+            created_at,
+            message: 'Deleted project',
+            project_id: event.project_id,
+            repository_storage_name: event.repository_storage_name,
+            disk_path: event.deleted_path,
+            job_id: job_id)
+
+          # No need to create a project entry if it doesn't exist
+          ::Geo::ProjectRegistry.where(project_id: event.project_id).delete_all
+        end
+
+        def handle_repositories_changed_event(event, created_at)
+          return unless Gitlab::Geo.current_node.id == event.geo_node_id
+
+          job_id = ::Geo::RepositoriesCleanUpWorker.perform_in(1.hour, event.geo_node_id)
+
+          if job_id
+            logger.info('Scheduled repositories clean up for Geo node', geo_node_id: event.geo_node_id, job_id: job_id)
+          else
+            logger.error('Could not schedule repositories clean up for Geo node', geo_node_id: event.geo_node_id)
+          end
+        end
+
+        def handle_repository_renamed_event(event, created_at)
+          return unless event.project_id
+
+          old_path = event.old_path_with_namespace
+          new_path = event.new_path_with_namespace
+
+          job_id = ::Geo::RenameRepositoryService
+                     .new(event.project_id, old_path, new_path)
+                     .async_execute
+
+          logger.event_info(
+            created_at,
+            message: 'Renaming project',
+            project_id: event.project_id,
+            old_path: old_path,
+            new_path: new_path,
+            job_id: job_id)
+        end
+
+        def handle_hashed_storage_migrated_event(event, created_at)
+          return unless event.project_id
+
+          job_id = ::Geo::HashedStorageMigrationService.new(
+            event.project_id,
+            old_disk_path: event.old_disk_path,
+            new_disk_path: event.new_disk_path,
+            old_storage_version: event.old_storage_version
+          ).async_execute
+
+          logger.event_info(
+            created_at,
+            message: 'Migrating project to hashed storage',
+            project_id: event.project_id,
+            old_storage_version: event.old_storage_version,
+            new_storage_version: event.new_storage_version,
+            old_disk_path: event.old_disk_path,
+            new_disk_path: event.new_disk_path,
+            job_id: job_id)
+        end
+
+        def handle_hashed_storage_attachments_event(event, created_at)
+          job_id = ::Geo::HashedStorageAttachmentsMigrationService.new(
+            event.project_id,
+            old_attachments_path: event.old_attachments_path,
+            new_attachments_path: event.new_attachments_path
+          ).async_execute
+
+          logger.event_info(
+            created_at,
+            message: 'Migrating attachments to hashed storage',
+            project_id: event.project_id,
+            old_attachments_path: event.old_attachments_path,
+            new_attachments_path: event.new_attachments_path,
+            job_id: job_id
+          )
+        end
+
+        def handle_lfs_object_deleted_event(event, created_at)
+          file_path = File.join(LfsObjectUploader.root, event.file_path)
+
+          job_id = ::Geo::FileRemovalWorker.perform_async(file_path)
+
+          logger.event_info(
+            created_at,
+            message: 'Deleted LFS object',
+            oid: event.oid,
+            file_id: event.lfs_object_id,
+            file_path: file_path,
+            job_id: job_id)
+
+          ::Geo::FileRegistry.lfs_objects.where(file_id: event.lfs_object_id).delete_all
+        end
+
+        def handle_job_artifact_deleted_event(event, created_at)
+          file_registry_job_artifacts = ::Geo::FileRegistry.job_artifacts.where(file_id: event.job_artifact_id)
+          return unless file_registry_job_artifacts.any? # avoid race condition
+
+          file_path = File.join(::JobArtifactUploader.root, event.file_path)
+
+          if File.file?(file_path)
+            deleted = delete_file(file_path) # delete synchronously to ensure consistency
+            return unless deleted # do not delete file from registry if deletion failed
+          end
+
+          logger.event_info(
+            created_at,
+            message: 'Deleted job artifact',
+            file_id: event.job_artifact_id,
+            file_path: file_path)
+
+          file_registry_job_artifacts.delete_all
+        end
+
+        def find_or_initialize_registry(project_id, attrs)
+          registry = ::Geo::ProjectRegistry.find_or_initialize_by(project_id: project_id)
+          registry.assign_attributes(attrs)
+          registry
+        end
+
+        def delete_file(path)
+          File.delete(path)
+        rescue => ex
+          logger.error("Failed to remove file", exception: ex.class.name, details: ex.message, filename: path)
+          false
+        end
+
+        # Sleeps for the expired TTL that remains on the lease plus some random seconds.
+        #
+        # This allows multiple GeoLogCursors to randomly process a batch of events,
+        # without favouring the shortest path (or latency).
+        def arbitrary_sleep(delay)
+          sleep(delay + rand(1..20) * 0.1)
+        end
+
+        def logger
+          Gitlab::Geo::LogCursor::Logger
+        end
+      end
+    end
+  end
+end
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index 80feb629d54c..1f80646a2ea5 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -215,9 +215,9 @@ class Runner < Grape::API
         job = authenticate_job!
         forbidden!('Job is not running!') unless job.running?
 
-        artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
-        artifacts = uploaded_file(:file, artifacts_upload_path)
-        metadata = uploaded_file(:metadata, artifacts_upload_path)
+        workhorse_upload_path = JobArtifactUploader.workhorse_upload_path
+        artifacts = uploaded_file(:file, workhorse_upload_path)
+        metadata = uploaded_file(:metadata, workhorse_upload_path)
 
         bad_request!('Missing artifacts file!') unless artifacts
         file_to_large! unless artifacts.size < max_artifacts_size
diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb
index 7a582a200561..4383124d150b 100644
--- a/lib/backup/artifacts.rb
+++ b/lib/backup/artifacts.rb
@@ -3,7 +3,7 @@
 module Backup
   class Artifacts < Files
     def initialize
-      super('artifacts', LegacyArtifactUploader.local_store_path)
+      super('artifacts', JobArtifactUploader.root)
     end
 
     def create_files_dir
diff --git a/lib/gitlab/background_migration/populate_untracked_uploads.rb b/lib/gitlab/background_migration/populate_untracked_uploads.rb
index 81e95e5832d4..759bdeb4bb3d 100644
--- a/lib/gitlab/background_migration/populate_untracked_uploads.rb
+++ b/lib/gitlab/background_migration/populate_untracked_uploads.rb
@@ -143,7 +143,7 @@ def path_relative_to_upload_dir
         end
 
         def absolute_path
-          File.join(CarrierWave.root, path)
+          File.join(Gitlab.config.uploads.storage_path, path)
         end
       end
 
diff --git a/lib/gitlab/background_migration/prepare_untracked_uploads.rb b/lib/gitlab/background_migration/prepare_untracked_uploads.rb
index 476c46341aec..8d126a34dffe 100644
--- a/lib/gitlab/background_migration/prepare_untracked_uploads.rb
+++ b/lib/gitlab/background_migration/prepare_untracked_uploads.rb
@@ -10,9 +10,12 @@ class PrepareUntrackedUploads # rubocop:disable Metrics/ClassLength
 
       FIND_BATCH_SIZE = 500
       RELATIVE_UPLOAD_DIR = "uploads".freeze
-      ABSOLUTE_UPLOAD_DIR = "#{CarrierWave.root}/#{RELATIVE_UPLOAD_DIR}".freeze
+      ABSOLUTE_UPLOAD_DIR = File.join(
+        Gitlab.config.uploads.storage_path,
+        RELATIVE_UPLOAD_DIR
+      )
       FOLLOW_UP_MIGRATION = 'PopulateUntrackedUploads'.freeze
-      START_WITH_CARRIERWAVE_ROOT_REGEX = %r{\A#{CarrierWave.root}/}
+      START_WITH_ROOT_REGEX = %r{\A#{Gitlab.config.uploads.storage_path}/}
       EXCLUDED_HASHED_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/@hashed/*".freeze
       EXCLUDED_TMP_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/tmp/*".freeze
 
@@ -80,7 +83,7 @@ def yield_paths_in_batches(stdout, batch_size, &block)
         paths = []
 
         stdout.each_line("\0") do |line|
-          paths << line.chomp("\0").sub(START_WITH_CARRIERWAVE_ROOT_REGEX, '')
+          paths << line.chomp("\0").sub(START_WITH_ROOT_REGEX, '')
 
           if paths.size >= batch_size
             yield(paths)
diff --git a/lib/gitlab/gfm/uploads_rewriter.rb b/lib/gitlab/gfm/uploads_rewriter.rb
index 8fab54896160..3fdc3c27f739 100644
--- a/lib/gitlab/gfm/uploads_rewriter.rb
+++ b/lib/gitlab/gfm/uploads_rewriter.rb
@@ -27,7 +27,7 @@ def rewrite(target_project)
           with_link_in_tmp_dir(file.file) do |open_tmp_file|
             new_uploader.store!(open_tmp_file)
           end
-          new_uploader.to_markdown
+          new_uploader.markdown_link
         end
       end
 
diff --git a/lib/gitlab/import_export/uploads_saver.rb b/lib/gitlab/import_export/uploads_saver.rb
index 627a487d5779..2f08dda55fdc 100644
--- a/lib/gitlab/import_export/uploads_saver.rb
+++ b/lib/gitlab/import_export/uploads_saver.rb
@@ -17,15 +17,13 @@ def save
         false
       end
 
-      private
+      def uploads_path
+        FileUploader.absolute_base_dir(@project)
+      end
 
       def uploads_export_path
         File.join(@shared.export_path, 'uploads')
       end
-
-      def uploads_path
-        FileUploader.dynamic_path_segment(@project)
-      end
     end
   end
 end
diff --git a/lib/gitlab/uploads_transfer.rb b/lib/gitlab/uploads_transfer.rb
index b5f41240529c..7d7400bdabf8 100644
--- a/lib/gitlab/uploads_transfer.rb
+++ b/lib/gitlab/uploads_transfer.rb
@@ -1,7 +1,7 @@
 module Gitlab
   class UploadsTransfer < ProjectTransfer
     def root_dir
-      File.join(CarrierWave.root, FileUploader.base_dir)
+      FileUploader.root
     end
   end
 end
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index 5ab6cd5a4ef9..dfe8acd4833c 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -51,14 +51,14 @@ def git_http_ok(repository, is_wiki, user, action, show_all_refs: false)
 
       def lfs_upload_ok(oid, size)
         {
-          StoreLFSPath: "#{Gitlab.config.lfs.storage_path}/tmp/upload",
+          StoreLFSPath: LfsObjectUploader.workhorse_upload_path,
           LfsOid: oid,
           LfsSize: size
         }
       end
 
       def artifact_upload_ok
-        { TempPath: JobArtifactUploader.artifacts_upload_path }
+        { TempPath: JobArtifactUploader.workhorse_upload_path }
       end
 
       def send_git_blob(repository, blob)
diff --git a/lib/tasks/gitlab/artifacts.rake b/lib/tasks/gitlab/artifacts.rake
index 494317d99c77..bfca4bfb3f71 100644
--- a/lib/tasks/gitlab/artifacts.rake
+++ b/lib/tasks/gitlab/artifacts.rake
@@ -12,8 +12,8 @@ namespace :gitlab do
         .with_artifacts_stored_locally
         .find_each(batch_size: 10) do |build|
         begin
-          build.artifacts_file.migrate!(ObjectStoreUploader::REMOTE_STORE)
-          build.artifacts_metadata.migrate!(ObjectStoreUploader::REMOTE_STORE)
+          build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
+          build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
 
           logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
         rescue => e
diff --git a/lib/tasks/gitlab/lfs.rake b/lib/tasks/gitlab/lfs.rake
index c17c05f8589a..a45e5ca91e0c 100644
--- a/lib/tasks/gitlab/lfs.rake
+++ b/lib/tasks/gitlab/lfs.rake
@@ -10,7 +10,7 @@ namespace :gitlab do
       LfsObject.with_files_stored_locally
         .find_each(batch_size: 10) do |lfs_object|
           begin
-            lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+            lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
 
             logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
           rescue => e
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
index 67a11e56e947..6a1869d1a48b 100644
--- a/spec/controllers/groups/uploads_controller_spec.rb
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -6,5 +6,7 @@
     { group_id: model }
   end
 
-  it_behaves_like 'handle uploads'
+  it_behaves_like 'handle uploads' do
+    let(:uploader_class) { NamespaceFileUploader }
+  end
 end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 46d618fa6826..4ea6f869aa33 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -145,8 +145,8 @@ def params
       context 'when using local file storage' do
         it_behaves_like 'a valid file' do
           let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
-          let(:store) { ObjectStoreUploader::LOCAL_STORE }
-          let(:archive_path) { JobArtifactUploader.local_store_path }
+          let(:store) { ObjectStorage::Store::LOCAL }
+          let(:archive_path) { JobArtifactUploader.root }
         end
       end
 
@@ -158,7 +158,7 @@ def params
         it_behaves_like 'a valid file' do
           let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
           let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
-          let(:store) { ObjectStoreUploader::REMOTE_STORE }
+          let(:store) { ObjectStorage::Store::REMOTE }
           let(:archive_path) { 'https://' }
         end
       end
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index e4310a4847b0..08e2ccf893a4 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -47,7 +47,7 @@
           end
 
           it 'serves the file' do
-            expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
+            expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
             get_show(public_project, id)
 
             expect(response).to have_gitlab_http_status(200)
@@ -58,7 +58,7 @@
               lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
               lfs_object.save!
               stub_lfs_object_storage
-              lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+              lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
             end
 
             it 'responds with redirect to file' do
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index b1f601a19e59..376b229ffc98 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -180,6 +180,7 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png'
+
               response
             end
           end
@@ -196,6 +197,7 @@
         it_behaves_like 'content not cached without revalidation' do
           subject do
             get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png'
+
             response
           end
         end
@@ -220,6 +222,7 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
+
               response
             end
           end
@@ -239,6 +242,7 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
+
               response
             end
           end
@@ -291,6 +295,7 @@
               it_behaves_like 'content not cached without revalidation' do
                 subject do
                   get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
+
                   response
                 end
               end
@@ -322,6 +327,7 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
+
               response
             end
           end
@@ -341,6 +347,7 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
+
               response
             end
           end
@@ -384,6 +391,7 @@
               it_behaves_like 'content not cached without revalidation' do
                 subject do
                   get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
+
                   response
                 end
               end
@@ -420,6 +428,7 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
+
               response
             end
           end
@@ -439,6 +448,7 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
+
               response
             end
           end
@@ -491,6 +501,7 @@
               it_behaves_like 'content not cached without revalidation' do
                 subject do
                   get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
+
                   response
                 end
               end
@@ -522,6 +533,7 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png'
+
               response
             end
           end
@@ -541,6 +553,7 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png'
+
               response
             end
           end
diff --git a/spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb b/spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb
new file mode 100644
index 000000000000..9f0f5f2ab873
--- /dev/null
+++ b/spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb
@@ -0,0 +1,270 @@
+require 'spec_helper'
+
+describe Geo::AttachmentRegistryFinder, :geo do
+  include ::EE::GeoHelpers
+
+  let(:secondary) { create(:geo_node) }
+
+  let(:synced_group) { create(:group) }
+  let(:synced_subgroup) { create(:group, parent: synced_group) }
+  let(:unsynced_group) { create(:group) }
+  let(:synced_project) { create(:project, group: synced_group) }
+  let(:unsynced_project) { create(:project, group: unsynced_group, repository_storage: 'broken') }
+
+  let!(:upload_1) { create(:upload, model: synced_group) }
+  let!(:upload_2) { create(:upload, model: unsynced_group) }
+  let!(:upload_3) { create(:upload, :issuable_upload, model: synced_project) }
+  let!(:upload_4) { create(:upload, model: unsynced_project) }
+  let(:upload_5) { create(:upload, model: synced_project) }
+  let(:upload_6) { create(:upload, :personal_snippet_upload) }
+  let(:upload_7) { create(:upload, model: synced_subgroup) }
+  let(:lfs_object) { create(:lfs_object) }
+
+  subject { described_class.new(current_node: secondary) }
+
+  before do
+    stub_current_geo_node(secondary)
+  end
+
+  # Disable transactions via :delete method because a foreign table
+  # can't see changes inside a transaction of a different connection.
+  context 'FDW', :delete do
+    before do
+      skip('FDW is not configured') if Gitlab::Database.postgresql? && !Gitlab::Geo.fdw?
+    end
+
+    describe '#find_synced_attachments' do
+      it 'delegates to #fdw_find_synced_attachments' do
+        expect(subject).to receive(:fdw_find_synced_attachments).and_call_original
+
+        subject.find_synced_attachments
+      end
+
+      it 'returns synced avatars, attachment, personal snippets and files' do
+        create(:geo_file_registry, :avatar, file_id: upload_1.id)
+        create(:geo_file_registry, :avatar, file_id: upload_2.id)
+        create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+        create(:geo_file_registry, :avatar, file_id: upload_6.id)
+        create(:geo_file_registry, :avatar, file_id: upload_7.id)
+        create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+        synced_attachments = subject.find_synced_attachments
+
+        expect(synced_attachments.pluck(:id)).to match_array([upload_1.id, upload_2.id, upload_6.id, upload_7.id])
+      end
+
+      context 'with selective sync' do
+        it 'falls back to legacy queries' do
+          secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+          expect(subject).to receive(:legacy_find_synced_attachments)
+
+          subject.find_synced_attachments
+        end
+      end
+    end
+
+    describe '#find_failed_attachments' do
+      it 'delegates to #fdw_find_failed_attachments' do
+        expect(subject).to receive(:fdw_find_failed_attachments).and_call_original
+
+        subject.find_failed_attachments
+      end
+
+      it 'returns failed avatars, attachment, personal snippets and files' do
+        create(:geo_file_registry, :avatar, file_id: upload_1.id)
+        create(:geo_file_registry, :avatar, file_id: upload_2.id)
+        create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+        create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+        create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+        create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+        failed_attachments = subject.find_failed_attachments
+
+        expect(failed_attachments.pluck(:id)).to match_array([upload_3.id, upload_6.id, upload_7.id])
+      end
+
+      context 'with selective sync' do
+        it 'falls back to legacy queries' do
+          secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+          expect(subject).to receive(:legacy_find_failed_attachments)
+
+          subject.find_failed_attachments
+        end
+      end
+    end
+
+    describe '#find_unsynced_attachments' do
+      it 'delegates to #fdw_find_unsynced_attachments' do
+        expect(subject).to receive(:fdw_find_unsynced_attachments).and_call_original
+
+        subject.find_unsynced_attachments(batch_size: 10)
+      end
+
+      it 'returns uploads without an entry on the tracking database' do
+        create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+        uploads = subject.find_unsynced_attachments(batch_size: 10)
+
+        expect(uploads.map(&:id)).to match_array([upload_2.id, upload_3.id, upload_4.id])
+      end
+
+      it 'excludes uploads without an entry on the tracking database' do
+        create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+        uploads = subject.find_unsynced_attachments(batch_size: 10, except_registry_ids: [upload_2.id])
+
+        expect(uploads.map(&:id)).to match_array([upload_3.id, upload_4.id])
+      end
+    end
+  end
+
+  context 'Legacy' do
+    before do
+      allow(Gitlab::Geo).to receive(:fdw?).and_return(false)
+    end
+
+    describe '#find_synced_attachments' do
+      it 'delegates to #legacy_find_synced_attachments' do
+        expect(subject).to receive(:legacy_find_synced_attachments).and_call_original
+
+        subject.find_synced_attachments
+      end
+
+      it 'returns synced avatars, attachment, personal snippets and files' do
+        create(:geo_file_registry, :avatar, file_id: upload_1.id)
+        create(:geo_file_registry, :avatar, file_id: upload_2.id)
+        create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+        create(:geo_file_registry, :avatar, file_id: upload_6.id)
+        create(:geo_file_registry, :avatar, file_id: upload_7.id)
+        create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+        synced_attachments = subject.find_synced_attachments
+
+        expect(synced_attachments).to match_array([upload_1, upload_2, upload_6, upload_7])
+      end
+
+      context 'with selective sync by namespace' do
+        it 'returns synced avatars, attachment, personal snippets and files' do
+          create(:geo_file_registry, :avatar, file_id: upload_1.id)
+          create(:geo_file_registry, :avatar, file_id: upload_2.id)
+          create(:geo_file_registry, :avatar, file_id: upload_3.id)
+          create(:geo_file_registry, :avatar, file_id: upload_4.id)
+          create(:geo_file_registry, :avatar, file_id: upload_5.id, success: false)
+          create(:geo_file_registry, :avatar, file_id: upload_6.id)
+          create(:geo_file_registry, :avatar, file_id: upload_7.id)
+          create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+          secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+          synced_attachments = subject.find_synced_attachments
+
+          expect(synced_attachments).to match_array([upload_1, upload_3, upload_6, upload_7])
+        end
+      end
+
+      context 'with selective sync by shard' do
+        it 'returns synced avatars, attachment, personal snippets and files' do
+          create(:geo_file_registry, :avatar, file_id: upload_1.id)
+          create(:geo_file_registry, :avatar, file_id: upload_2.id)
+          create(:geo_file_registry, :avatar, file_id: upload_3.id)
+          create(:geo_file_registry, :avatar, file_id: upload_4.id)
+          create(:geo_file_registry, :avatar, file_id: upload_5.id, success: false)
+          create(:geo_file_registry, :avatar, file_id: upload_6.id)
+          create(:geo_file_registry, :avatar, file_id: upload_7.id)
+          create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+          secondary.update!(selective_sync_type: 'shards', selective_sync_shards: ['default'])
+
+          synced_attachments = subject.find_synced_attachments
+
+          expect(synced_attachments).to match_array([upload_1, upload_3, upload_6])
+        end
+      end
+    end
+
+    describe '#find_failed_attachments' do
+      it 'delegates to #legacy_find_failed_attachments' do
+        expect(subject).to receive(:legacy_find_failed_attachments).and_call_original
+
+        subject.find_failed_attachments
+      end
+
+      it 'returns failed avatars, attachment, personal snippets and files' do
+        create(:geo_file_registry, :avatar, file_id: upload_1.id)
+        create(:geo_file_registry, :avatar, file_id: upload_2.id)
+        create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+        create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+        create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+        create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+        failed_attachments = subject.find_failed_attachments
+
+        expect(failed_attachments).to match_array([upload_3, upload_6, upload_7])
+      end
+
+      context 'with selective sync by namespace' do
+        it 'returns failed avatars, attachment, personal snippets and files' do
+          create(:geo_file_registry, :avatar, file_id: upload_1.id, success: false)
+          create(:geo_file_registry, :avatar, file_id: upload_2.id)
+          create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+          create(:geo_file_registry, :avatar, file_id: upload_4.id, success: false)
+          create(:geo_file_registry, :avatar, file_id: upload_5.id)
+          create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+          create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+          create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+          secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+          failed_attachments = subject.find_failed_attachments
+
+          expect(failed_attachments).to match_array([upload_1, upload_3, upload_6, upload_7])
+        end
+      end
+
+      context 'with selective sync by shard' do
+        it 'returns failed avatars, attachment, personal snippets and files' do
+          create(:geo_file_registry, :avatar, file_id: upload_1.id, success: false)
+          create(:geo_file_registry, :avatar, file_id: upload_2.id)
+          create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+          create(:geo_file_registry, :avatar, file_id: upload_4.id, success: false)
+          create(:geo_file_registry, :avatar, file_id: upload_5.id)
+          create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+          create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+          create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+          secondary.update!(selective_sync_type: 'shards', selective_sync_shards: ['default'])
+
+          failed_attachments = subject.find_failed_attachments
+
+          expect(failed_attachments).to match_array([upload_1, upload_3, upload_6])
+        end
+      end
+    end
+
+    describe '#find_unsynced_attachments' do
+      it 'delegates to #legacy_find_unsynced_attachments' do
+        expect(subject).to receive(:legacy_find_unsynced_attachments).and_call_original
+
+        subject.find_unsynced_attachments(batch_size: 10)
+      end
+
+      it 'returns LFS objects without an entry on the tracking database' do
+        create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+        uploads = subject.find_unsynced_attachments(batch_size: 10)
+
+        expect(uploads).to match_array([upload_2, upload_3, upload_4])
+      end
+
+      it 'excludes uploads without an entry on the tracking database' do
+        create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+        uploads = subject.find_unsynced_attachments(batch_size: 10, except_registry_ids: [upload_2.id])
+
+        expect(uploads).to match_array([upload_3, upload_4])
+      end
+    end
+  end
+end
diff --git a/spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb b/spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb
new file mode 100644
index 000000000000..4cb2a1ec08f3
--- /dev/null
+++ b/spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe Gitlab::Geo::FileTransfer do
+  let(:user) { create(:user, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+  let(:upload) { Upload.find_by(model: user, uploader: 'AvatarUploader') }
+
+  subject { described_class.new(:file, upload) }
+
+  describe '#execute' do
+    context 'user avatar' do
+      it 'sets an absolute path' do
+        expect(subject.file_type).to eq(:file)
+        expect(subject.file_id).to eq(upload.id)
+        expect(subject.filename).to eq(upload.absolute_path)
+        expect(Pathname.new(subject.filename).absolute?).to be_truthy
+        expect(subject.request_data).to eq({ id: upload.model_id,
+                                             type: 'User',
+                                             checksum: upload.checksum })
+      end
+    end
+  end
+end
diff --git a/spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb b/spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb
new file mode 100644
index 000000000000..af475a966a05
--- /dev/null
+++ b/spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb
@@ -0,0 +1,414 @@
+require 'spec_helper'
+
+describe Gitlab::Geo::LogCursor::Daemon, :postgresql, :clean_gitlab_redis_shared_state do
+  include ::EE::GeoHelpers
+
+  set(:primary) { create(:geo_node, :primary) }
+  set(:secondary) { create(:geo_node) }
+
+  let(:options) { {} }
+  subject(:daemon) { described_class.new(options) }
+
+  around do |example|
+    Sidekiq::Testing.fake! { example.run }
+  end
+
+  before do
+    stub_current_geo_node(secondary)
+
+    allow(daemon).to receive(:trap_signals)
+    allow(daemon).to receive(:arbitrary_sleep).and_return(0.1)
+  end
+
+  describe '#run!' do
+    it 'traps signals' do
+      is_expected.to receive(:exit?).and_return(true)
+      is_expected.to receive(:trap_signals)
+
+      daemon.run!
+    end
+
+    it 'delegates to #run_once! in a loop' do
+      is_expected.to receive(:exit?).and_return(false, false, false, true)
+      is_expected.to receive(:run_once!).twice
+
+      daemon.run!
+    end
+
+    it 'skips execution if cannot achieve a lease' do
+      is_expected.to receive(:exit?).and_return(false, true)
+      is_expected.not_to receive(:run_once!)
+      expect_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain_with_ttl).and_return({ ttl: 1, uuid: false })
+
+      daemon.run!
+    end
+
+    it 'skips execution if not a Geo node' do
+      stub_current_geo_node(nil)
+
+      is_expected.to receive(:exit?).and_return(false, true)
+      is_expected.to receive(:sleep).with(1.minute)
+      is_expected.not_to receive(:run_once!)
+
+      daemon.run!
+    end
+
+    it 'skips execution if the current node is a primary' do
+      stub_current_geo_node(primary)
+
+      is_expected.to receive(:exit?).and_return(false, true)
+      is_expected.to receive(:sleep).with(1.minute)
+      is_expected.not_to receive(:run_once!)
+
+      daemon.run!
+    end
+  end
+
+  describe '#run_once!' do
+    context 'when replaying a repository created event' do
+      let(:project) { create(:project) }
+      let(:repository_created_event) { create(:geo_repository_created_event, project: project) }
+      let(:event_log) { create(:geo_event_log, repository_created_event: repository_created_event) }
+      let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+      it 'creates a new project registry' do
+        expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(1)
+      end
+
+      it 'sets resync attributes to true' do
+        daemon.run_once!
+
+        registry = Geo::ProjectRegistry.last
+
+        expect(registry).to have_attributes(project_id: project.id, resync_repository: true, resync_wiki: true)
+      end
+
+      it 'sets resync_wiki to false if wiki_path is nil' do
+        repository_created_event.update!(wiki_path: nil)
+
+        daemon.run_once!
+
+        registry = Geo::ProjectRegistry.last
+
+        expect(registry).to have_attributes(project_id: project.id, resync_repository: true, resync_wiki: false)
+      end
+
+      it 'performs Geo::ProjectSyncWorker' do
+        expect(Geo::ProjectSyncWorker).to receive(:perform_async)
+          .with(project.id, anything).once
+
+        daemon.run_once!
+      end
+    end
+
+    context 'when replaying a repository updated event' do
+      let(:project) { create(:project) }
+      let(:repository_updated_event) { create(:geo_repository_updated_event, project: project) }
+      let(:event_log) { create(:geo_event_log, repository_updated_event: repository_updated_event) }
+      let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+      it 'creates a new project registry if it does not exist' do
+        expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(1)
+      end
+
+      it 'sets resync_repository to true if event source is repository' do
+        repository_updated_event.update!(source: Geo::RepositoryUpdatedEvent::REPOSITORY)
+        registry = create(:geo_project_registry, :synced, project: repository_updated_event.project)
+
+        daemon.run_once!
+
+        expect(registry.reload.resync_repository).to be true
+      end
+
+      it 'sets resync_wiki to true if event source is wiki' do
+        repository_updated_event.update!(source: Geo::RepositoryUpdatedEvent::WIKI)
+        registry = create(:geo_project_registry, :synced, project: repository_updated_event.project)
+
+        daemon.run_once!
+
+        expect(registry.reload.resync_wiki).to be true
+      end
+
+      it 'performs Geo::ProjectSyncWorker' do
+        expect(Geo::ProjectSyncWorker).to receive(:perform_async)
+          .with(project.id, anything).once
+
+        daemon.run_once!
+      end
+    end
+
+    context 'when replaying a repository deleted event' do
+      let(:event_log) { create(:geo_event_log, :deleted_event) }
+      let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+      let(:repository_deleted_event) { event_log.repository_deleted_event }
+      let(:project) { repository_deleted_event.project }
+
+      it 'does not create a tracking database entry' do
+        expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+      end
+
+      it 'schedules a GeoRepositoryDestroyWorker' do
+        project_id   = repository_deleted_event.project_id
+        project_name = repository_deleted_event.deleted_project_name
+        project_path = repository_deleted_event.deleted_path
+
+        expect(::GeoRepositoryDestroyWorker).to receive(:perform_async)
+          .with(project_id, project_name, project_path, project.repository_storage)
+
+        daemon.run_once!
+      end
+
+      it 'removes the tracking database entry if exist' do
+        create(:geo_project_registry, :synced, project: project)
+
+        expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(-1)
+      end
+    end
+
+    context 'when replaying a repositories changed event' do
+      let(:repositories_changed_event) { create(:geo_repositories_changed_event, geo_node: secondary) }
+      let(:event_log) { create(:geo_event_log, repositories_changed_event: repositories_changed_event) }
+      let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+      it 'schedules a GeoRepositoryDestroyWorker when event node is the current node' do
+        expect(Geo::RepositoriesCleanUpWorker).to receive(:perform_in).with(within(5.minutes).of(1.hour), secondary.id)
+
+        daemon.run_once!
+      end
+
+      it 'does not schedule a GeoRepositoryDestroyWorker when event node is not the current node' do
+        stub_current_geo_node(build(:geo_node))
+
+        expect(Geo::RepositoriesCleanUpWorker).not_to receive(:perform_in)
+
+        daemon.run_once!
+      end
+    end
+
+    context 'when node has namespace restrictions' do
+      let(:group_1) { create(:group) }
+      let(:group_2) { create(:group) }
+      let(:project) { create(:project, group: group_1) }
+      let(:repository_updated_event) { create(:geo_repository_updated_event, project: project) }
+      let(:event_log) { create(:geo_event_log, repository_updated_event: repository_updated_event) }
+      let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+      before do
+        allow(Geo::ProjectSyncWorker).to receive(:perform_async)
+      end
+
+      it 'replays events for projects that belong to selected namespaces to replicate' do
+        secondary.update!(namespaces: [group_1])
+
+        expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(1)
+      end
+
+      it 'does not replay events for projects that do not belong to selected namespaces to replicate' do
+        secondary.update!(selective_sync_type: 'namespaces', namespaces: [group_2])
+
+        expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+      end
+
+      it 'does not replay events for projects that do not belong to selected shards to replicate' do
+        secondary.update!(selective_sync_type: 'shards', selective_sync_shards: ['broken'])
+
+        expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+      end
+    end
+
+    context 'when processing a repository renamed event' do
+      let(:event_log) { create(:geo_event_log, :renamed_event) }
+      let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+      let(:repository_renamed_event) { event_log.repository_renamed_event }
+
+      it 'does not create a new project registry' do
+        expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+      end
+
+      it 'schedules a Geo::RenameRepositoryWorker' do
+        project_id = repository_renamed_event.project_id
+        old_path_with_namespace = repository_renamed_event.old_path_with_namespace
+        new_path_with_namespace = repository_renamed_event.new_path_with_namespace
+
+        expect(::Geo::RenameRepositoryWorker).to receive(:perform_async)
+          .with(project_id, old_path_with_namespace, new_path_with_namespace)
+
+        daemon.run_once!
+      end
+    end
+
+    context 'when processing a hashed storage migration event' do
+      let(:event_log) { create(:geo_event_log, :hashed_storage_migration_event) }
+      let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+      let(:hashed_storage_migrated_event) { event_log.hashed_storage_migrated_event }
+
+      it 'does not create a new project registry' do
+        expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+      end
+
+      it 'schedules a Geo::HashedStorageMigrationWorker' do
+        project = hashed_storage_migrated_event.project
+        old_disk_path = hashed_storage_migrated_event.old_disk_path
+        new_disk_path = hashed_storage_migrated_event.new_disk_path
+        old_storage_version = project.storage_version
+
+        expect(::Geo::HashedStorageMigrationWorker).to receive(:perform_async)
+          .with(project.id, old_disk_path, new_disk_path, old_storage_version)
+
+        daemon.run_once!
+      end
+    end
+
+    context 'when processing an attachment migration event to hashed storage' do
+      let(:event_log) { create(:geo_event_log, :hashed_storage_attachments_event) }
+      let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+      let(:hashed_storage_attachments_event) { event_log.hashed_storage_attachments_event }
+
+      it 'does not create a new project registry' do
+        expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+      end
+
+      it 'schedules a Geo::HashedStorageAttachmentsMigrationWorker' do
+        project = hashed_storage_attachments_event.project
+        old_attachments_path = hashed_storage_attachments_event.old_attachments_path
+        new_attachments_path = hashed_storage_attachments_event.new_attachments_path
+
+        expect(::Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async)
+          .with(project.id, old_attachments_path, new_attachments_path)
+
+        daemon.run_once!
+      end
+    end
+
+    context 'when replaying a LFS object deleted event' do
+      let(:event_log) { create(:geo_event_log, :lfs_object_deleted_event) }
+      let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+      let(:lfs_object_deleted_event) { event_log.lfs_object_deleted_event }
+      let(:lfs_object) { lfs_object_deleted_event.lfs_object }
+
+      it 'does not create a tracking database entry' do
+        expect { daemon.run_once! }.not_to change(Geo::FileRegistry, :count)
+      end
+
+      it 'schedules a Geo::FileRemovalWorker' do
+        file_path = File.join(LfsObjectUploader.root, lfs_object_deleted_event.file_path)
+
+        expect(::Geo::FileRemovalWorker).to receive(:perform_async)
+          .with(file_path)
+
+        daemon.run_once!
+      end
+
+      it 'removes the tracking database entry if exist' do
+        create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+        expect { daemon.run_once! }.to change(Geo::FileRegistry.lfs_objects, :count).by(-1)
+      end
+    end
+
+    context 'when replaying a job artifact event' do
+      let(:event_log) { create(:geo_event_log, :job_artifact_deleted_event) }
+      let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+      let(:job_artifact_deleted_event) { event_log.job_artifact_deleted_event }
+      let(:job_artifact) { job_artifact_deleted_event.job_artifact }
+
+      context 'with a tracking database entry' do
+        before do
+          create(:geo_file_registry, :job_artifact, file_id: job_artifact.id)
+        end
+
+        context 'with a file' do
+          context 'when the delete succeeds' do
+            it 'removes the tracking database entry' do
+              expect { daemon.run_once! }.to change(Geo::FileRegistry.job_artifacts, :count).by(-1)
+            end
+
+            it 'deletes the file' do
+              expect { daemon.run_once! }.to change { File.exist?(job_artifact.file.path) }.from(true).to(false)
+            end
+          end
+
+          context 'when the delete fails' do
+            before do
+              expect(daemon).to receive(:delete_file).and_return(false)
+            end
+
+            it 'does not remove the tracking database entry' do
+              expect { daemon.run_once! }.not_to change(Geo::FileRegistry.job_artifacts, :count)
+            end
+          end
+        end
+
+        context 'without a file' do
+          before do
+            FileUtils.rm(job_artifact.file.path)
+          end
+
+          it 'removes the tracking database entry' do
+            expect { daemon.run_once! }.to change(Geo::FileRegistry.job_artifacts, :count).by(-1)
+          end
+        end
+      end
+
+      context 'without a tracking database entry' do
+        it 'does not create a tracking database entry' do
+          expect { daemon.run_once! }.not_to change(Geo::FileRegistry, :count)
+        end
+
+        it 'does not delete the file (yet, due to possible race condition)' do
+          expect { daemon.run_once! }.not_to change { File.exist?(job_artifact.file.path) }.from(true)
+        end
+      end
+    end
+  end
+
+  describe '#delete_file' do
+    context 'when the file exists' do
+      let!(:file) { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png") }
+
+      context 'when the delete does not raise an exception' do
+        it 'returns true' do
+          expect(daemon.send(:delete_file, file.path)).to be_truthy
+        end
+
+        it 'does not log an error' do
+          expect(daemon).not_to receive(:logger)
+
+          daemon.send(:delete_file, file.path)
+        end
+      end
+
+      context 'when the delete raises an exception' do
+        before do
+          expect(File).to receive(:delete).and_raise('something went wrong')
+        end
+
+        it 'returns false' do
+          expect(daemon.send(:delete_file, file.path)).to be_falsey
+        end
+
+        it 'logs an error' do
+          logger = double(logger)
+          expect(daemon).to receive(:logger).and_return(logger)
+          expect(logger).to receive(:error).with('Failed to remove file', exception: 'RuntimeError', details: 'something went wrong', filename: file.path)
+
+          daemon.send(:delete_file, file.path)
+        end
+      end
+    end
+
+    context 'when the file does not exist' do
+      it 'returns false' do
+        expect(daemon.send(:delete_file, '/does/not/exist')).to be_falsey
+      end
+
+      it 'logs an error' do
+        logger = double(logger)
+        expect(daemon).to receive(:logger).and_return(logger)
+        expect(logger).to receive(:error).with('Failed to remove file', exception: 'Errno::ENOENT', details: 'No such file or directory @ unlink_internal - /does/not/exist', filename: '/does/not/exist')
+
+        daemon.send(:delete_file, '/does/not/exist')
+      end
+    end
+  end
+end
diff --git a/spec/ee/spec/models/ee/lfs_object_spec.rb b/spec/ee/spec/models/ee/lfs_object_spec.rb
index b02327b4c734..e425f5bc112b 100644
--- a/spec/ee/spec/models/ee/lfs_object_spec.rb
+++ b/spec/ee/spec/models/ee/lfs_object_spec.rb
@@ -8,14 +8,14 @@
       expect(subject.local_store?).to eq true
     end
 
-    it 'returns true when file_store is equal to LfsObjectUploader::LOCAL_STORE' do
-      subject.file_store = LfsObjectUploader::LOCAL_STORE
+    it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
+      subject.file_store = LfsObjectUploader::Store::LOCAL
 
       expect(subject.local_store?).to eq true
     end
 
-    it 'returns false whe file_store is equal to LfsObjectUploader::REMOTE_STORE' do
-      subject.file_store = LfsObjectUploader::REMOTE_STORE
+    it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
+      subject.file_store = LfsObjectUploader::Store::REMOTE
 
       expect(subject.local_store?).to eq false
     end
diff --git a/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb
new file mode 100644
index 000000000000..9fa618fdc474
--- /dev/null
+++ b/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -0,0 +1,50 @@
+require 'spec_helper'
+
+describe Projects::HashedStorage::MigrateAttachmentsService do
+  let(:project) { create(:project, storage_version: 1) }
+  let(:service) { described_class.new(project) }
+  let(:legacy_storage) { Storage::LegacyProject.new(project) }
+  let(:hashed_storage) { Storage::HashedProject.new(project) }
+  let(:old_attachments_path) { legacy_storage.disk_path }
+  let(:new_attachments_path) { hashed_storage.disk_path }
+
+  describe '#execute' do
+    set(:primary) { create(:geo_node, :primary) }
+    set(:secondary) { create(:geo_node) }
+
+    context 'on success' do
+      before do
+        TestEnv.clean_test_path
+        FileUtils.mkdir_p(File.join(FileUploader.root, old_attachments_path))
+      end
+
+      it 'returns true' do
+        expect(service.execute).to be_truthy
+      end
+
+      it 'creates a Geo::HashedStorageAttachmentsEvent' do
+        expect { service.execute }.to change(Geo::EventLog, :count).by(1)
+
+        event = Geo::EventLog.first.event
+
+        expect(event).to be_a(Geo::HashedStorageAttachmentsEvent)
+        expect(event).to have_attributes(
+          old_attachments_path: old_attachments_path,
+          new_attachments_path: new_attachments_path
+        )
+      end
+    end
+
+    context 'on failure' do
+      it 'does not create a Geo event when skipped' do
+        expect { service.execute }.not_to change { Geo::EventLog.count }
+      end
+
+      it 'does not create a Geo event on failure' do
+        expect(service).to receive(:move_folder!).and_raise(::Projects::HashedStorage::AttachmentMigrationError)
+        expect { service.execute }.to raise_error(::Projects::HashedStorage::AttachmentMigrationError)
+        expect(Geo::EventLog.count).to eq(0)
+      end
+    end
+  end
+end
diff --git a/spec/ee/spec/services/geo/file_download_service_spec.rb b/spec/ee/spec/services/geo/file_download_service_spec.rb
new file mode 100644
index 000000000000..4fb0d89dbde8
--- /dev/null
+++ b/spec/ee/spec/services/geo/file_download_service_spec.rb
@@ -0,0 +1,227 @@
+require 'spec_helper'
+
+describe Geo::FileDownloadService do
+  include ::EE::GeoHelpers
+
+  set(:primary)  { create(:geo_node, :primary) }
+  set(:secondary) { create(:geo_node) }
+
+  before do
+    stub_current_geo_node(secondary)
+
+    allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
+  end
+
+  describe '#execute' do
+    context 'user avatar' do
+      let(:user) { create(:user, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+      let(:upload) { Upload.find_by(model: user, uploader: 'AvatarUploader') }
+
+      subject(:execute!) { described_class.new(:avatar, upload.id).execute }
+
+      it 'downloads a user avatar' do
+        stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+        expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+      end
+
+      it 'registers when the download fails' do
+        stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+        expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+        expect(Geo::FileRegistry.last.retry_count).to eq(1)
+        expect(Geo::FileRegistry.last.retry_at).to be_present
+      end
+
+      it 'registers when the download fails with some other error' do
+        stub_transfer(Gitlab::Geo::FileTransfer, nil)
+
+        expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+      end
+    end
+
+    context 'group avatar' do
+      let(:group) { create(:group, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+      let(:upload) { Upload.find_by(model: group, uploader: 'AvatarUploader') }
+
+      subject(:execute!) { described_class.new(:avatar, upload.id).execute }
+
+      it 'downloads a group avatar' do
+        stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+        expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+      end
+
+      it 'registers when the download fails' do
+        stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+        expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+      end
+    end
+
+    context 'project avatar' do
+      let(:project) { create(:project, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+      let(:upload) { Upload.find_by(model: project, uploader: 'AvatarUploader') }
+
+      subject(:execute!) { described_class.new(:avatar, upload.id).execute }
+
+      it 'downloads a project avatar' do
+        stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+        expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+      end
+
+      it 'registers when the download fails' do
+        stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+        expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+      end
+    end
+
+    context 'with an attachment' do
+      let(:note) { create(:note, :with_attachment) }
+      let(:upload) { Upload.find_by(model: note, uploader: 'AttachmentUploader') }
+
+      subject(:execute!) { described_class.new(:attachment, upload.id).execute }
+
+      it 'downloads the attachment' do
+        stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+        expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+      end
+
+      it 'registers when the download fails' do
+        stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+        expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+      end
+    end
+
+    context 'with a snippet' do
+      let(:upload) { create(:upload, :personal_snippet_upload) }
+
+      subject(:execute!) { described_class.new(:personal_file, upload.id).execute }
+
+      it 'downloads the file' do
+        stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+        expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+      end
+
+      it 'registers when the download fails' do
+        stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+        expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+      end
+    end
+
+    context 'with file upload' do
+      let(:project) { create(:project) }
+      let(:upload) { Upload.find_by(model: project, uploader: 'FileUploader') }
+
+      subject { described_class.new(:file, upload.id) }
+
+      before do
+        FileUploader.new(project).store!(fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png'))
+      end
+
+      it 'downloads the file' do
+        stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+        expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+      end
+
+      it 'registers when the download fails' do
+        stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+        expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+      end
+    end
+
+    context 'with namespace file upload' do
+      let(:group) { create(:group) }
+      let(:upload) { Upload.find_by(model: group, uploader: 'NamespaceFileUploader') }
+
+      subject { described_class.new(:file, upload.id) }
+
+      before do
+        NamespaceFileUploader.new(group).store!(fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png'))
+      end
+
+      it 'downloads the file' do
+        stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+        expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+      end
+
+      it 'registers when the download fails' do
+        stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+        expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+      end
+    end
+
+    context 'LFS object' do
+      let(:lfs_object) { create(:lfs_object) }
+
+      subject { described_class.new(:lfs, lfs_object.id) }
+
+      it 'downloads an LFS object' do
+        stub_transfer(Gitlab::Geo::LfsTransfer, 100)
+
+        expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+      end
+
+      it 'registers when the download fails' do
+        stub_transfer(Gitlab::Geo::LfsTransfer, -1)
+
+        expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+      end
+
+      it 'logs a message' do
+        stub_transfer(Gitlab::Geo::LfsTransfer, 100)
+
+        expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, success: true, bytes_downloaded: 100)).and_call_original
+
+        subject.execute
+      end
+    end
+
+    context 'job artifacts' do
+      let(:job_artifact) { create(:ci_job_artifact) }
+
+      subject { described_class.new(:job_artifact, job_artifact.id) }
+
+      it 'downloads a job artifact' do
+        stub_transfer(Gitlab::Geo::JobArtifactTransfer, 100)
+
+        expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+      end
+
+      it 'registers when the download fails' do
+        stub_transfer(Gitlab::Geo::JobArtifactTransfer, -1)
+
+        expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+      end
+
+      it 'logs a message' do
+        stub_transfer(Gitlab::Geo::JobArtifactTransfer, 100)
+
+        expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, success: true, bytes_downloaded: 100)).and_call_original
+
+        subject.execute
+      end
+    end
+
+    context 'bad object type' do
+      it 'raises an error' do
+        expect { described_class.new(:bad, 1).execute }.to raise_error(NameError)
+      end
+    end
+
+    def stub_transfer(kls, result)
+      instance = double("(instance of #{kls})", download_from_primary: result)
+      allow(kls).to receive(:new).and_return(instance)
+    end
+  end
+end
diff --git a/spec/ee/spec/services/geo/files_expire_service_spec.rb b/spec/ee/spec/services/geo/files_expire_service_spec.rb
new file mode 100644
index 000000000000..09b0b386ed10
--- /dev/null
+++ b/spec/ee/spec/services/geo/files_expire_service_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+# Disable transactions via :delete method because a foreign table
+# can't see changes inside a transaction of a different connection.
+describe Geo::FilesExpireService, :geo, :delete do
+  let(:project) { create(:project) }
+  let!(:old_full_path) { project.full_path }
+  subject { described_class.new(project, old_full_path) }
+
+  describe '#execute' do
+    let(:file_uploader) { build(:file_uploader, project: project) }
+    let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
+    let!(:file_registry) { create(:geo_file_registry, file_id: upload.id) }
+
+    before do
+      project.update(path: "#{project.path}_renamed")
+    end
+
+    context 'when in Geo secondary node' do
+      before do
+        allow(Gitlab::Geo).to receive(:secondary?) { true }
+      end
+
+      it 'remove file from disk' do
+        file_path = File.join(subject.base_dir, upload.path)
+        expect(File.exist?(file_path)).to be_truthy
+
+        Sidekiq::Testing.inline! { subject.execute }
+
+        expect(File.exist?(file_path)).to be_falsey
+      end
+
+      it 'removes file_registry associates with upload' do
+        expect(file_registry.success).to be_truthy
+
+        subject.execute
+
+        expect { file_registry.reload }.to raise_error(ActiveRecord::RecordNotFound)
+      end
+    end
+
+    context 'when not in Geo secondary node' do
+      it 'no-op execute action' do
+        expect(subject).not_to receive(:schedule_file_removal)
+        expect(subject).not_to receive(:mark_for_resync!)
+
+        subject.execute
+      end
+    end
+  end
+end
diff --git a/spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb b/spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb
new file mode 100644
index 000000000000..40e06705cf50
--- /dev/null
+++ b/spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb
@@ -0,0 +1,83 @@
+require 'spec_helper'
+
+def base_path(storage)
+  File.join(FileUploader.root, storage.disk_path)
+end
+
+describe Geo::HashedStorageAttachmentsMigrationService do
+  let!(:project) { create(:project) }
+
+  let(:legacy_storage) { Storage::LegacyProject.new(project) }
+  let(:hashed_storage) { Storage::HashedProject.new(project) }
+
+  let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
+  let(:file_uploader) { build(:file_uploader, project: project) }
+  let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
+  let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
+
+  subject(:service) do
+    described_class.new(project.id,
+                        old_attachments_path: legacy_storage.disk_path,
+                        new_attachments_path: hashed_storage.disk_path)
+  end
+
+  describe '#execute' do
+    context 'when succeeds' do
+      it 'moves attachments to hashed storage layout' do
+        expect(File.file?(old_path)).to be_truthy
+        expect(File.file?(new_path)).to be_falsey
+        expect(File.exist?(base_path(legacy_storage))).to be_truthy
+        expect(File.exist?(base_path(hashed_storage))).to be_falsey
+        expect(FileUtils).to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)).and_call_original
+
+        service.execute
+
+        expect(File.exist?(base_path(hashed_storage))).to be_truthy
+        expect(File.exist?(base_path(legacy_storage))).to be_falsey
+        expect(File.file?(old_path)).to be_falsey
+        expect(File.file?(new_path)).to be_truthy
+      end
+    end
+
+    context 'when original folder does not exist anymore' do
+      before do
+        FileUtils.rm_rf(base_path(legacy_storage))
+      end
+
+      it 'skips moving folders and go to next' do
+        expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+        service.execute
+
+        expect(File.exist?(base_path(hashed_storage))).to be_falsey
+        expect(File.file?(new_path)).to be_falsey
+      end
+    end
+
+    context 'when target folder already exists' do
+      before do
+        FileUtils.mkdir_p(base_path(hashed_storage))
+      end
+
+      it 'raises AttachmentMigrationError' do
+        expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+        expect { service.execute }.to raise_error(::Geo::AttachmentMigrationError)
+      end
+    end
+  end
+
+  describe '#async_execute' do
+    it 'starts the worker' do
+      expect(Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async)
+
+      service.async_execute
+    end
+
+    it 'returns job id' do
+      allow(Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async).and_return('foo')
+
+      expect(service.async_execute).to eq('foo')
+    end
+  end
+end
diff --git a/spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb b/spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb
new file mode 100644
index 000000000000..ad7cad3128a4
--- /dev/null
+++ b/spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb
@@ -0,0 +1,291 @@
+require 'spec_helper'
+
+describe Geo::FileDownloadDispatchWorker, :geo do
+  include ::EE::GeoHelpers
+
+  let(:primary)   { create(:geo_node, :primary, host: 'primary-geo-node') }
+  let(:secondary) { create(:geo_node) }
+
+  before do
+    stub_current_geo_node(secondary)
+    allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
+    allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:renew).and_return(true)
+    allow_any_instance_of(described_class).to receive(:over_time?).and_return(false)
+    WebMock.stub_request(:get, /primary-geo-node/).to_return(status: 200, body: "", headers: {})
+  end
+
+  subject { described_class.new }
+
+  shared_examples '#perform' do |skip_tests|
+    before do
+      skip('FDW is not configured') if skip_tests
+    end
+
+    it 'does not schedule anything when tracking database is not configured' do
+      create(:lfs_object, :with_file)
+
+      allow(Gitlab::Geo).to receive(:geo_database_configured?) { false }
+
+      expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+      subject.perform
+
+      # We need to unstub here or the DatabaseCleaner will have issues since it
+      # will appear as though the tracking DB were not available
+      allow(Gitlab::Geo).to receive(:geo_database_configured?).and_call_original
+    end
+
+    it 'does not schedule anything when node is disabled' do
+      create(:lfs_object, :with_file)
+
+      secondary.enabled = false
+      secondary.save
+
+      expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+      subject.perform
+    end
+
+    context 'with LFS objects' do
+      let!(:lfs_object_local_store) { create(:lfs_object, :with_file) }
+      let!(:lfs_object_remote_store) { create(:lfs_object, :with_file) }
+
+      before do
+        stub_lfs_object_storage
+        lfs_object_remote_store.file.migrate!(LfsObjectUploader::Store::REMOTE)
+      end
+
+      it 'filters S3-backed files' do
+        expect(Geo::FileDownloadWorker).to receive(:perform_async).with(:lfs, lfs_object_local_store.id)
+        expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with(:lfs, lfs_object_remote_store.id)
+
+        subject.perform
+      end
+    end
+
+    context 'with job artifacts' do
+      it 'performs Geo::FileDownloadWorker for unsynced job artifacts' do
+        artifact = create(:ci_job_artifact)
+
+        expect(Geo::FileDownloadWorker).to receive(:perform_async)
+          .with(:job_artifact, artifact.id).once.and_return(spy)
+
+        subject.perform
+      end
+
+      it 'performs Geo::FileDownloadWorker for failed-sync job artifacts' do
+        artifact = create(:ci_job_artifact)
+
+        Geo::FileRegistry.create!(file_type: :job_artifact, file_id: artifact.id, bytes: 0, success: false)
+
+        expect(Geo::FileDownloadWorker).to receive(:perform_async)
+          .with('job_artifact', artifact.id).once.and_return(spy)
+
+        subject.perform
+      end
+
+      it 'does not perform Geo::FileDownloadWorker for synced job artifacts' do
+        artifact = create(:ci_job_artifact)
+
+        Geo::FileRegistry.create!(file_type: :job_artifact, file_id: artifact.id, bytes: 1234, success: true)
+
+        expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+        subject.perform
+      end
+
+      it 'does not perform Geo::FileDownloadWorker for synced job artifacts even with 0 bytes downloaded' do
+        artifact = create(:ci_job_artifact)
+
+        Geo::FileRegistry.create!(file_type: :job_artifact, file_id: artifact.id, bytes: 0, success: true)
+
+        expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+        subject.perform
+      end
+    end
+
+    # Test the case where we have:
+    #
+    # 1. A total of 10 files in the queue, and we can load a maximimum of 5 and send 2 at a time.
+    # 2. We send 2, wait for 1 to finish, and then send again.
+    it 'attempts to load a new batch without pending downloads' do
+      stub_const('Geo::BaseSchedulerWorker::DB_RETRIEVE_BATCH_SIZE', 5)
+      secondary.update!(files_max_capacity: 2)
+      allow_any_instance_of(::Gitlab::Geo::Transfer).to receive(:download_from_primary).and_return(100)
+
+      avatar = fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'))
+      create_list(:lfs_object, 2, :with_file)
+      create_list(:user, 2, avatar: avatar)
+      create_list(:note, 2, :with_attachment)
+      create_list(:upload, 1, :personal_snippet_upload)
+      create_list(:ci_job_artifact, 1)
+      create(:appearance, logo: avatar, header_logo: avatar)
+
+      expect(Geo::FileDownloadWorker).to receive(:perform_async).exactly(10).times.and_call_original
+      # For 10 downloads, we expect four database reloads:
+      # 1. Load the first batch of 5.
+      # 2. 4 get sent out, 1 remains. This triggers another reload, which loads in the next 5.
+      # 3. Those 4 get sent out, and 1 remains.
+      # 3. Since the second reload filled the pipe with 4, we need to do a final reload to ensure
+      #    zero are left.
+      expect(subject).to receive(:load_pending_resources).exactly(4).times.and_call_original
+
+      Sidekiq::Testing.inline! do
+        subject.perform
+      end
+    end
+
+    context 'with a failed file' do
+      let(:failed_registry) { create(:geo_file_registry, :lfs, file_id: 999, success: false) }
+
+      it 'does not stall backfill' do
+        unsynced = create(:lfs_object, :with_file)
+
+        stub_const('Geo::BaseSchedulerWorker::DB_RETRIEVE_BATCH_SIZE', 1)
+
+        expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with(:lfs, failed_registry.file_id)
+        expect(Geo::FileDownloadWorker).to receive(:perform_async).with(:lfs, unsynced.id)
+
+        subject.perform
+      end
+
+      it 'retries failed files' do
+        expect(Geo::FileDownloadWorker).to receive(:perform_async).with('lfs', failed_registry.file_id)
+
+        subject.perform
+      end
+
+      it 'does not retries failed files when retry_at is tomorrow' do
+        failed_registry = create(:geo_file_registry, :lfs, file_id: 999, success: false, retry_at: Date.tomorrow)
+
+        expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with('lfs', failed_registry.file_id)
+
+        subject.perform
+      end
+
+      it 'does not retries failed files when retry_at is in the past' do
+        failed_registry = create(:geo_file_registry, :lfs, file_id: 999, success: false, retry_at: Date.yesterday)
+
+        expect(Geo::FileDownloadWorker).to receive(:perform_async).with('lfs', failed_registry.file_id)
+
+        subject.perform
+      end
+    end
+
+    context 'when node has namespace restrictions' do
+      let(:synced_group) { create(:group) }
+      let(:project_in_synced_group) { create(:project, group: synced_group) }
+      let(:unsynced_project) { create(:project) }
+
+      before do
+        allow(ProjectCacheWorker).to receive(:perform_async).and_return(true)
+
+        secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+      end
+
+      it 'does not perform Geo::FileDownloadWorker for LFS object that does not belong to selected namespaces to replicate' do
+        lfs_object_in_synced_group = create(:lfs_objects_project, project: project_in_synced_group)
+        create(:lfs_objects_project, project: unsynced_project)
+
+        expect(Geo::FileDownloadWorker).to receive(:perform_async)
+          .with(:lfs, lfs_object_in_synced_group.lfs_object_id).once.and_return(spy)
+
+        subject.perform
+      end
+
+      it 'does not perform Geo::FileDownloadWorker for job artifact that does not belong to selected namespaces to replicate' do
+        create(:ci_job_artifact, project: unsynced_project)
+        job_artifact_in_synced_group = create(:ci_job_artifact, project: project_in_synced_group)
+
+        expect(Geo::FileDownloadWorker).to receive(:perform_async)
+          .with(:job_artifact, job_artifact_in_synced_group.id).once.and_return(spy)
+
+        subject.perform
+      end
+
+      it 'does not perform Geo::FileDownloadWorker for upload objects that do not belong to selected namespaces to replicate' do
+        avatar = fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'))
+        avatar_in_synced_group = create(:upload, model: synced_group, path: avatar)
+        create(:upload, model: create(:group), path: avatar)
+        avatar_in_project_in_synced_group = create(:upload, model: project_in_synced_group, path: avatar)
+        create(:upload, model: unsynced_project, path: avatar)
+
+        expect(Geo::FileDownloadWorker).to receive(:perform_async)
+          .with('avatar', avatar_in_project_in_synced_group.id).once.and_return(spy)
+
+        expect(Geo::FileDownloadWorker).to receive(:perform_async)
+          .with('avatar', avatar_in_synced_group.id).once.and_return(spy)
+
+        subject.perform
+      end
+    end
+  end
+
+  # Disable transactions via :delete method because a foreign table
+  # can't see changes inside a transaction of a different connection.
+  describe 'when PostgreSQL FDW is available', :geo, :delete do
+    # Skip if FDW isn't activated on this database
+    it_behaves_like '#perform', Gitlab::Database.postgresql? && !Gitlab::Geo.fdw?
+  end
+
+  describe 'when PostgreSQL FDW is not enabled', :geo do
+    before do
+      allow(Gitlab::Geo).to receive(:fdw?).and_return(false)
+    end
+
+    it_behaves_like '#perform', false
+  end
+
+  describe '#take_batch' do
+    it 'returns a batch of jobs' do
+      a = [[2, :lfs], [3, :lfs]]
+      b = []
+      c = [[3, :job_artifact], [8, :job_artifact], [9, :job_artifact]]
+      expect(subject).to receive(:db_retrieve_batch_size).and_return(4)
+
+      expect(subject.send(:take_batch, a, b, c)).to eq([
+        [3, :job_artifact],
+        [2, :lfs],
+        [8, :job_artifact],
+        [3, :lfs]
+      ])
+    end
+  end
+
+  describe '#interleave' do
+    # Notice ties are resolved by taking the "first" tied element
+    it 'interleaves 2 arrays' do
+      a = %w{1 2 3}
+      b = %w{A B C}
+      expect(subject.send(:interleave, a, b)).to eq(%w{1 A 2 B 3 C})
+    end
+
+    # Notice there are no ties in this call
+    it 'interleaves 2 arrays with a longer second array' do
+      a = %w{1 2}
+      b = %w{A B C}
+      expect(subject.send(:interleave, a, b)).to eq(%w{A 1 B 2 C})
+    end
+
+    it 'interleaves 2 arrays with a longer first array' do
+      a = %w{1 2 3}
+      b = %w{A B}
+      expect(subject.send(:interleave, a, b)).to eq(%w{1 A 2 B 3})
+    end
+
+    it 'interleaves 3 arrays' do
+      a = %w{1 2 3}
+      b = %w{A B C}
+      c = %w{i ii iii}
+      expect(subject.send(:interleave, a, b, c)).to eq(%w{1 A i 2 B ii 3 C iii})
+    end
+
+    it 'interleaves 3 arrays of unequal length' do
+      a = %w{1 2}
+      b = %w{A}
+      c = %w{i ii iii iiii}
+      expect(subject.send(:interleave, a, b, c)).to eq(%w{i 1 ii A iii 2 iiii})
+    end
+  end
+end
diff --git a/spec/ee/workers/object_storage_upload_worker_spec.rb b/spec/ee/workers/object_storage_upload_worker_spec.rb
index d421fdf95a95..32ddcbe97577 100644
--- a/spec/ee/workers/object_storage_upload_worker_spec.rb
+++ b/spec/ee/workers/object_storage_upload_worker_spec.rb
@@ -1,8 +1,8 @@
 require 'spec_helper'
 
 describe ObjectStorageUploadWorker do
-  let(:local) { ObjectStoreUploader::LOCAL_STORE }
-  let(:remote) { ObjectStoreUploader::REMOTE_STORE }
+  let(:local) { ObjectStorage::Store::LOCAL }
+  let(:remote) { ObjectStorage::Store::REMOTE }
 
   def perform
     described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 436735e7ed34..9bb456e89ff1 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -6,7 +6,7 @@
     file_type :archive
 
     trait :remote_store do
-      file_store JobArtifactUploader::REMOTE_STORE
+      file_store JobArtifactUploader::Store::REMOTE
     end
 
     after :build do |artifact|
diff --git a/spec/factories/geo/event_log.rb b/spec/factories/geo/event_log.rb
new file mode 100644
index 000000000000..dbe2f400f974
--- /dev/null
+++ b/spec/factories/geo/event_log.rb
@@ -0,0 +1,121 @@
+FactoryBot.define do
+  factory :geo_event_log, class: Geo::EventLog do
+    trait :created_event do
+      repository_created_event factory: :geo_repository_created_event
+    end
+
+    trait :updated_event do
+      repository_updated_event factory: :geo_repository_updated_event
+    end
+
+    trait :deleted_event do
+      repository_deleted_event factory: :geo_repository_deleted_event
+    end
+
+    trait :renamed_event do
+      repository_renamed_event factory: :geo_repository_renamed_event
+    end
+
+    trait :hashed_storage_migration_event do
+      hashed_storage_migrated_event factory: :geo_hashed_storage_migrated_event
+    end
+
+    trait :hashed_storage_attachments_event do
+      hashed_storage_attachments_event factory: :geo_hashed_storage_attachments_event
+    end
+
+    trait :lfs_object_deleted_event do
+      lfs_object_deleted_event factory: :geo_lfs_object_deleted_event
+    end
+
+    trait :job_artifact_deleted_event do
+      job_artifact_deleted_event factory: :geo_job_artifact_deleted_event
+    end
+  end
+
+  factory :geo_repository_created_event, class: Geo::RepositoryCreatedEvent do
+    project
+
+    repository_storage_name { project.repository_storage }
+    repository_storage_path { project.repository_storage_path }
+    add_attribute(:repo_path) { project.disk_path }
+    project_name { project.name }
+    wiki_path { project.wiki.disk_path }
+  end
+
+  factory :geo_repository_updated_event, class: Geo::RepositoryUpdatedEvent do
+    project
+
+    source 0
+    branches_affected 0
+    tags_affected 0
+  end
+
+  factory :geo_repository_deleted_event, class: Geo::RepositoryDeletedEvent do
+    project
+
+    repository_storage_name { project.repository_storage }
+    repository_storage_path { project.repository_storage_path }
+    deleted_path { project.path_with_namespace }
+    deleted_project_name { project.name }
+  end
+
+  factory :geo_repositories_changed_event, class: Geo::RepositoriesChangedEvent do
+    geo_node
+  end
+
+  factory :geo_repository_renamed_event, class: Geo::RepositoryRenamedEvent do
+    project { create(:project, :repository) }
+
+    repository_storage_name { project.repository_storage }
+    repository_storage_path { project.repository_storage_path }
+    old_path_with_namespace { project.path_with_namespace }
+    new_path_with_namespace { project.path_with_namespace + '_new' }
+    old_wiki_path_with_namespace { project.wiki.path_with_namespace }
+    new_wiki_path_with_namespace { project.wiki.path_with_namespace + '_new' }
+    old_path { project.path }
+    new_path { project.path + '_new' }
+  end
+
+  factory :geo_hashed_storage_migrated_event, class: Geo::HashedStorageMigratedEvent do
+    project { create(:project, :repository) }
+
+    repository_storage_name { project.repository_storage }
+    repository_storage_path { project.repository_storage_path }
+    old_disk_path { project.path_with_namespace }
+    new_disk_path { project.path_with_namespace + '_new' }
+    old_wiki_disk_path { project.wiki.path_with_namespace }
+    new_wiki_disk_path { project.wiki.path_with_namespace + '_new' }
+    new_storage_version { Project::HASHED_STORAGE_FEATURES[:repository] }
+  end
+
+  factory :geo_hashed_storage_attachments_event, class: Geo::HashedStorageAttachmentsEvent do
+    project { create(:project, :repository) }
+
+    old_attachments_path { Storage::LegacyProject.new(project).disk_path }
+    new_attachments_path { Storage::HashedProject.new(project).disk_path }
+  end
+
+  factory :geo_lfs_object_deleted_event, class: Geo::LfsObjectDeletedEvent do
+    lfs_object { create(:lfs_object, :with_file) }
+
+    after(:build, :stub) do |event, _|
+      local_store_path = Pathname.new(LfsObjectUploader.root)
+      relative_path = Pathname.new(event.lfs_object.file.path).relative_path_from(local_store_path)
+
+      event.oid = event.lfs_object.oid
+      event.file_path = relative_path
+    end
+  end
+
+  factory :geo_job_artifact_deleted_event, class: Geo::JobArtifactDeletedEvent do
+    job_artifact { create(:ci_job_artifact, :archive) }
+
+    after(:build, :stub) do |event, _|
+      local_store_path = Pathname.new(JobArtifactUploader.root)
+      relative_path = Pathname.new(event.job_artifact.file.path).relative_path_from(local_store_path)
+
+      event.file_path = relative_path
+    end
+  end
+end
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index 1512f5a0e58e..8c531cf59096 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -18,7 +18,7 @@
     end
 
     trait :with_avatar do
-      avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
+      avatar { fixture_file_upload('spec/fixtures/dk.png') }
     end
 
     trait :access_requestable do
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index 707ecbd6be55..0889c5090fb5 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -122,11 +122,11 @@
     end
 
     trait :with_attachment do
-      attachment { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png") }
+      attachment { fixture_file_upload(Rails.root.join( "spec/fixtures/dk.png"), "image/png") }
     end
 
     trait :with_svg_attachment do
-      attachment { fixture_file_upload(Rails.root + "spec/fixtures/unsanitized.svg", "image/svg+xml") }
+      attachment { fixture_file_upload(Rails.root.join("spec/fixtures/unsanitized.svg"), "image/svg+xml") }
     end
 
     transient do
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index d0f3911f7301..16d328a5bc27 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -90,7 +90,7 @@
     end
 
     trait :with_avatar do
-      avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
+      avatar { fixture_file_upload('spec/fixtures/dk.png') }
     end
 
     trait :broken_storage do
diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb
index c39500faea1a..9e8a55eaedb9 100644
--- a/spec/factories/uploads.rb
+++ b/spec/factories/uploads.rb
@@ -1,24 +1,43 @@
 FactoryBot.define do
   factory :upload do
     model { build(:project) }
-    path { "uploads/-/system/project/avatar/avatar.jpg" }
     size 100.kilobytes
     uploader "AvatarUploader"
+    store ObjectStorage::Store::LOCAL
 
-    trait :personal_snippet do
+    # we should build a mount agnostic upload by default
+    transient do
+      mounted_as :avatar
+      secret SecureRandom.hex
+    end
+
+    # this needs to comply with RecordsUpload::Concern#upload_path
+    path { File.join("uploads/-/system", model.class.to_s.underscore, mounted_as.to_s, 'avatar.jpg') }
+
+    trait :personal_snippet_upload do
       model { build(:personal_snippet) }
+      path { File.join(secret, 'myfile.jpg') }
       uploader "PersonalFileUploader"
     end
 
     trait :issuable_upload do
-      path { "#{SecureRandom.hex}/myfile.jpg" }
+      path { File.join(secret, 'myfile.jpg') }
       uploader "FileUploader"
     end
 
     trait :namespace_upload do
-      path { "#{SecureRandom.hex}/myfile.jpg" }
       model { build(:group) }
+      path { File.join(secret, 'myfile.jpg') }
       uploader "NamespaceFileUploader"
     end
+
+    trait :attachment_upload do
+      transient do
+        mounted_as :attachment
+      end
+
+      model { build(:note) }
+      uploader "AttachmentUploader"
+    end
   end
 end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index e62e0b263ca0..769fd656e7a4 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -38,7 +38,7 @@
     end
 
     trait :with_avatar do
-      avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
+      avatar { fixture_file_upload('spec/fixtures/dk.png') }
     end
 
     trait :two_factor_via_otp do
diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
index 8bb9ebe0419b..370c2490b97a 100644
--- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
+++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
@@ -23,6 +23,27 @@
     end
   end
 
+  # E.g. The installation is in use at the time of migration, and someone has
+  # just uploaded a file
+  shared_examples 'does not add files in /uploads/tmp' do
+    let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
+
+    before do
+      FileUtils.mkdir(File.dirname(tmp_file))
+      FileUtils.touch(tmp_file)
+    end
+
+    after do
+      FileUtils.rm(tmp_file)
+    end
+
+    it 'does not add files from /uploads/tmp' do
+      described_class.new.perform
+
+      expect(untracked_files_for_uploads.count).to eq(5)
+    end
+  end
+
   it 'ensures the untracked_files_for_uploads table exists' do
     expect do
       described_class.new.perform
@@ -109,24 +130,8 @@
         end
       end
 
-      # E.g. The installation is in use at the time of migration, and someone has
-      # just uploaded a file
       context 'when there are files in /uploads/tmp' do
-        let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
-
-        before do
-          FileUtils.touch(tmp_file)
-        end
-
-        after do
-          FileUtils.rm(tmp_file)
-        end
-
-        it 'does not add files from /uploads/tmp' do
-          described_class.new.perform
-
-          expect(untracked_files_for_uploads.count).to eq(5)
-        end
+        it_behaves_like 'does not add files in /uploads/tmp'
       end
     end
   end
@@ -197,24 +202,8 @@
         end
       end
 
-      # E.g. The installation is in use at the time of migration, and someone has
-      # just uploaded a file
       context 'when there are files in /uploads/tmp' do
-        let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
-
-        before do
-          FileUtils.touch(tmp_file)
-        end
-
-        after do
-          FileUtils.rm(tmp_file)
-        end
-
-        it 'does not add files from /uploads/tmp' do
-          described_class.new.perform
-
-          expect(untracked_files_for_uploads.count).to eq(5)
-        end
+        it_behaves_like 'does not add files in /uploads/tmp'
       end
     end
   end
diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
index 39e3b875c49f..326ed2f2ecfa 100644
--- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
@@ -17,7 +17,7 @@
     end
 
     let(:text) do
-      "Text and #{image_uploader.to_markdown} and #{zip_uploader.to_markdown}"
+      "Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}"
     end
 
     describe '#rewrite' do
diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
index 63992ea8ab8f..a685521cbf09 100644
--- a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
@@ -4,7 +4,6 @@
   describe 'bundle a project Git repo' do
     let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
     let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
-    let(:uploads_path) { FileUploader.dynamic_path_segment(project) }
 
     before do
       allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
@@ -26,9 +25,9 @@
       end
 
       it 'copies the uploads to the project path' do
-        restorer.restore
+        subject.restore
 
-        uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
+        uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
 
         expect(uploads).to include('dummy.txt')
       end
@@ -44,9 +43,9 @@
       end
 
       it 'copies the uploads to the project path' do
-        restorer.restore
+        subject.restore
 
-        uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
+        uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
 
         expect(uploads).to include('dummy.txt')
       end
diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
index e8948de1f3af..959779523f49 100644
--- a/spec/lib/gitlab/import_export/uploads_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
@@ -30,7 +30,7 @@
       it 'copies the uploads to the export path' do
         saver.save
 
-        uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
+        uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
 
         expect(uploads).to include('banana_sample.gif')
       end
@@ -52,7 +52,7 @@
       it 'copies the uploads to the export path' do
         saver.save
 
-        uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
+        uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
 
         expect(uploads).to include('banana_sample.gif')
       end
diff --git a/spec/migrations/remove_empty_fork_networks_spec.rb b/spec/migrations/remove_empty_fork_networks_spec.rb
index cf6ae5cda742..ca9086a84d03 100644
--- a/spec/migrations/remove_empty_fork_networks_spec.rb
+++ b/spec/migrations/remove_empty_fork_networks_spec.rb
@@ -12,6 +12,10 @@
     deleted_project.destroy!
   end
 
+  after do
+    Upload.reset_column_information
+  end
+
   it 'deletes only the fork network without members' do
     expect(fork_networks.count).to eq(2)
 
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index b3f160f3119a..138b2a4935f6 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -204,7 +204,7 @@
       let(:parent) { create(:group, name: 'parent', path: 'parent') }
       let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
       let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) }
-      let(:uploads_dir) { File.join(CarrierWave.root, FileUploader.base_dir) }
+      let(:uploads_dir) { FileUploader.root }
       let(:pages_dir) { File.join(TestEnv.pages_path) }
 
       before do
diff --git a/spec/models/upload_spec.rb b/spec/models/upload_spec.rb
index 345382ea8c7d..42f3d6097702 100644
--- a/spec/models/upload_spec.rb
+++ b/spec/models/upload_spec.rb
@@ -45,51 +45,6 @@
     end
   end
 
-  describe '.remove_path' do
-    it 'removes all records at the given path' do
-      described_class.create!(
-        size: File.size(__FILE__),
-        path: __FILE__,
-        model: build_stubbed(:user),
-        uploader: 'AvatarUploader'
-      )
-
-      expect { described_class.remove_path(__FILE__) }
-        .to change { described_class.count }.from(1).to(0)
-    end
-  end
-
-  describe '.record' do
-    let(:fake_uploader) do
-      double(
-        file: double(size: 12_345),
-        relative_path: 'foo/bar.jpg',
-        model: build_stubbed(:user),
-        class: 'AvatarUploader'
-      )
-    end
-
-    it 'removes existing paths before creation' do
-      expect(described_class).to receive(:remove_path)
-        .with(fake_uploader.relative_path)
-
-      described_class.record(fake_uploader)
-    end
-
-    it 'creates a new record and assigns size, path, model, and uploader' do
-      upload = described_class.record(fake_uploader)
-
-      aggregate_failures do
-        expect(upload).to be_persisted
-        expect(upload.size).to eq fake_uploader.file.size
-        expect(upload.path).to eq fake_uploader.relative_path
-        expect(upload.model_id).to eq fake_uploader.model.id
-        expect(upload.model_type).to eq fake_uploader.model.class.to_s
-        expect(upload.uploader).to eq fake_uploader.class
-      end
-    end
-  end
-
   describe '#absolute_path' do
     it 'returns the path directly when already absolute' do
       path = '/path/to/namespace/project/secret/file.jpg'
@@ -111,27 +66,27 @@
     end
   end
 
-  describe '#calculate_checksum' do
-    it 'calculates the SHA256 sum' do
-      upload = described_class.new(
-        path: __FILE__,
-        size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
-      )
+  describe '#calculate_checksum!' do
+    let(:upload) do
+      described_class.new(path: __FILE__,
+                          size: described_class::CHECKSUM_THRESHOLD - 1.megabyte)
+    end
+
+    it 'sets `checksum` to SHA256 sum of the file' do
       expected = Digest::SHA256.file(__FILE__).hexdigest
 
-      expect { upload.calculate_checksum }
+      expect { upload.calculate_checksum! }
         .to change { upload.checksum }.from(nil).to(expected)
     end
 
-    it 'returns nil for a non-existant file' do
-      upload = described_class.new(
-        path: __FILE__,
-        size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
-      )
-
+    it 'sets `checksum` to nil for a non-existant file' do
       expect(upload).to receive(:exist?).and_return(false)
 
-      expect(upload.calculate_checksum).to be_nil
+      checksum = Digest::SHA256.file(__FILE__).hexdigest
+      upload.checksum = checksum
+
+      expect { upload.calculate_checksum! }
+        .to change { upload.checksum }.from(checksum).to(nil)
     end
   end
 
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 5c6eee09285f..8086b91a4887 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -947,7 +947,7 @@ def authorize_artifacts_with_token_in_headers(params = {}, request_headers = hea
         context 'when artifacts are being stored inside of tmp path' do
           before do
             # by configuring this path we allow to pass temp file from any path
-            allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
+            allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
           end
 
           context 'when job has been erased' do
@@ -1124,7 +1124,7 @@ def authorize_artifacts_with_token_in_headers(params = {}, request_headers = hea
             # by configuring this path we allow to pass file from @tmpdir only
             # but all temporary files are stored in system tmp directory
             @tmpdir = Dir.mktmpdir
-            allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
+            allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir)
           end
 
           after do
@@ -1153,7 +1153,7 @@ def upload_artifacts(file, headers = {}, accelerated = true)
 
         context 'when job has artifacts' do
           let(:job) { create(:ci_build) }
-          let(:store) { JobArtifactUploader::LOCAL_STORE }
+          let(:store) { JobArtifactUploader::Store::LOCAL }
 
           before do
             create(:ci_job_artifact, :archive, file_store: store, job: job)
@@ -1175,7 +1175,7 @@ def upload_artifacts(file, headers = {}, accelerated = true)
             end
 
             context 'when artifacts are stored remotely' do
-              let(:store) { JobArtifactUploader::REMOTE_STORE }
+              let(:store) { JobArtifactUploader::Store::REMOTE }
               let!(:job) { create(:ci_build) }
 
               it 'download artifacts' do
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 8bfc8693981d..0a8788fd57e2 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -245,7 +245,7 @@
             context 'when LFS uses object storage' do
               let(:before_get) do
                 stub_lfs_object_storage
-                lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+                lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
               end
 
               it 'responds with redirect' do
@@ -975,7 +975,7 @@
             end
 
             it 'responds with status 200, location of lfs store and object details' do
-              expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
+              expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
               expect(json_response['LfsOid']).to eq(sample_oid)
               expect(json_response['LfsSize']).to eq(sample_size)
             end
@@ -1132,7 +1132,7 @@
             end
 
             it 'with location of lfs store and object details' do
-              expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
+              expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
               expect(json_response['LfsOid']).to eq(sample_oid)
               expect(json_response['LfsSize']).to eq(sample_size)
             end
@@ -1246,7 +1246,7 @@ def lfs_tmp_file
     end
 
     def setup_tempfile(lfs_tmp)
-      upload_path = "#{Gitlab.config.lfs.storage_path}/tmp/upload"
+      upload_path = LfsObjectUploader.workhorse_upload_path
 
       FileUtils.mkdir_p(upload_path)
       FileUtils.touch(File.join(upload_path, lfs_tmp))
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 53ea88332fb6..dfe9adbbcdc8 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -244,7 +244,7 @@
 
         context 'issue description with uploads' do
           let(:uploader) { build(:file_uploader, project: old_project) }
-          let(:description) { "Text and #{uploader.to_markdown}" }
+          let(:description) { "Text and #{uploader.markdown_link}" }
 
           include_context 'issue move executed'
 
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
index 50e59954f73b..15699574b3a9 100644
--- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -6,7 +6,7 @@
   let(:legacy_storage) { Storage::LegacyProject.new(project) }
   let(:hashed_storage) { Storage::HashedProject.new(project) }
 
-  let!(:upload) { Upload.find_by(path: file_uploader.relative_path) }
+  let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
   let(:file_uploader) { build(:file_uploader, project: project) }
   let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
   let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
@@ -58,6 +58,6 @@
   end
 
   def base_path(storage)
-    FileUploader.dynamic_path_builder(storage.disk_path)
+    File.join(FileUploader.root, storage.disk_path)
   end
 end
diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
index 935c08221e0c..7ce80c82439c 100644
--- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
@@ -2,6 +2,8 @@
   let(:user)  { create(:user) }
   let(:jpg)   { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
   let(:txt)   { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
+  let(:secret) { FileUploader.generate_secret }
+  let(:uploader_class) { FileUploader }
 
   describe "POST #create" do
     context 'when a user is not authorized to upload a file' do
@@ -65,7 +67,12 @@
 
   describe "GET #show" do
     let(:show_upload) do
-      get :show, params.merge(secret: "123456", filename: "image.jpg")
+      get :show, params.merge(secret: secret, filename: "rails_sample.jpg")
+    end
+
+    before do
+      expect(FileUploader).to receive(:generate_secret).and_return(secret)
+      UploadService.new(model, jpg, uploader_class).execute
     end
 
     context "when the model is public" do
@@ -75,11 +82,6 @@
 
       context "when not signed in" do
         context "when the file exists" do
-          before do
-            allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
-            allow(jpg).to receive(:exists?).and_return(true)
-          end
-
           it "responds with status 200" do
             show_upload
 
@@ -88,6 +90,10 @@
         end
 
         context "when the file doesn't exist" do
+          before do
+            allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+          end
+
           it "responds with status 404" do
             show_upload
 
@@ -102,11 +108,6 @@
         end
 
         context "when the file exists" do
-          before do
-            allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
-            allow(jpg).to receive(:exists?).and_return(true)
-          end
-
           it "responds with status 200" do
             show_upload
 
@@ -115,6 +116,10 @@
         end
 
         context "when the file doesn't exist" do
+          before do
+            allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+          end
+
           it "responds with status 404" do
             show_upload
 
@@ -131,11 +136,6 @@
 
       context "when not signed in" do
         context "when the file exists" do
-          before do
-            allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
-            allow(jpg).to receive(:exists?).and_return(true)
-          end
-
           context "when the file is an image" do
             before do
               allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
@@ -149,6 +149,10 @@
           end
 
           context "when the file is not an image" do
+            before do
+              allow_any_instance_of(FileUploader).to receive(:image?).and_return(false)
+            end
+
             it "redirects to the sign in page" do
               show_upload
 
@@ -158,6 +162,10 @@
         end
 
         context "when the file doesn't exist" do
+          before do
+            allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+          end
+
           it "redirects to the sign in page" do
             show_upload
 
@@ -177,11 +185,6 @@
           end
 
           context "when the file exists" do
-            before do
-              allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
-              allow(jpg).to receive(:exists?).and_return(true)
-            end
-
             it "responds with status 200" do
               show_upload
 
@@ -190,6 +193,10 @@
           end
 
           context "when the file doesn't exist" do
+            before do
+              allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+            end
+
             it "responds with status 404" do
               show_upload
 
@@ -200,11 +207,6 @@
 
         context "when the user doesn't have access to the model" do
           context "when the file exists" do
-            before do
-              allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
-              allow(jpg).to receive(:exists?).and_return(true)
-            end
-
             context "when the file is an image" do
               before do
                 allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
@@ -218,6 +220,10 @@
             end
 
             context "when the file is not an image" do
+              before do
+                allow_any_instance_of(FileUploader).to receive(:image?).and_return(false)
+              end
+
               it "responds with status 404" do
                 show_upload
 
@@ -227,6 +233,10 @@
           end
 
           context "when the file doesn't exist" do
+            before do
+              allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+            end
+
             it "responds with status 404" do
               show_upload
 
diff --git a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
new file mode 100644
index 000000000000..0022b2f803fb
--- /dev/null
+++ b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
@@ -0,0 +1,126 @@
+shared_context 'with storage' do |store, **stub_params|
+  before do
+    subject.object_store = store
+  end
+end
+
+shared_examples "migrates" do |to_store:, from_store: nil|
+  let(:to) { to_store }
+  let(:from) { from_store || subject.object_store }
+
+  def migrate(to)
+    subject.migrate!(to)
+  end
+
+  def checksum
+    Digest::SHA256.hexdigest(subject.read)
+  end
+
+  before do
+    migrate(from)
+  end
+
+  it 'does nothing when migrating to the current store' do
+    expect { migrate(from) }.not_to change { subject.object_store }.from(from)
+  end
+
+  it 'migrate to the specified store' do
+    from_checksum = checksum
+
+    expect { migrate(to) }.to change { subject.object_store }.from(from).to(to)
+    expect(checksum).to eq(from_checksum)
+  end
+
+  it 'removes the original file after the migration' do
+    original_file = subject.file.path
+    migrate(to)
+
+    expect(File.exist?(original_file)).to be_falsey
+  end
+
+  context 'migration is unsuccessful' do
+    shared_examples "handles gracefully" do |error:|
+      it 'does not update the object_store' do
+        expect { migrate(to) }.to raise_error(error)
+        expect(subject.object_store).to eq(from)
+      end
+
+      it 'does not delete the original file' do
+        expect { migrate(to) }.to raise_error(error)
+        expect(subject.exists?).to be_truthy
+      end
+    end
+
+    context 'when the store is not supported' do
+      let(:to) { -1 } # not a valid store
+
+      include_examples "handles gracefully", error: ObjectStorage::UnknownStoreError
+    end
+
+    context 'upon a fog failure' do
+      before do
+        storage_class = subject.send(:storage_for, to).class
+        expect_any_instance_of(storage_class).to receive(:store!).and_raise("Store failure.")
+      end
+
+      include_examples "handles gracefully", error: "Store failure."
+    end
+
+    context 'upon a database failure' do
+      before do
+        expect(uploader).to receive(:persist_object_store!).and_raise("ActiveRecord failure.")
+      end
+
+      include_examples "handles gracefully", error: "ActiveRecord failure."
+    end
+  end
+end
+
+shared_examples "matches the method pattern" do |method|
+  let(:target) { subject }
+  let(:args) { nil }
+  let(:pattern) { patterns[method] }
+
+  it do
+    return skip "No pattern provided, skipping." unless pattern
+
+    expect(target.method(method).call(*args)).to match(pattern)
+  end
+end
+
+shared_examples "builds correct paths" do |**patterns|
+  let(:patterns) { patterns }
+
+  before do
+    allow(subject).to receive(:filename).and_return('<filename>')
+  end
+
+  describe "#store_dir" do
+    it_behaves_like "matches the method pattern", :store_dir
+  end
+
+  describe "#cache_dir" do
+    it_behaves_like "matches the method pattern", :cache_dir
+  end
+
+  describe "#work_dir" do
+    it_behaves_like "matches the method pattern", :work_dir
+  end
+
+  describe "#upload_path" do
+    it_behaves_like "matches the method pattern", :upload_path
+  end
+
+  describe ".absolute_path" do
+    it_behaves_like "matches the method pattern", :absolute_path do
+      let(:target) { subject.class }
+      let(:args) { [upload] }
+    end
+  end
+
+  describe ".base_dir" do
+    it_behaves_like "matches the method pattern", :base_dir do
+      let(:target) { subject.class }
+    end
+  end
+end
diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb
index 4f469648d5ce..93477e513f25 100644
--- a/spec/support/stub_object_storage.rb
+++ b/spec/support/stub_object_storage.rb
@@ -30,4 +30,11 @@ def stub_lfs_object_storage(**params)
                                  remote_directory: 'lfs-objects',
                                  **params)
   end
+
+  def stub_uploads_object_storage(uploader = described_class, **params)
+    stub_object_storage_uploader(config: Gitlab.config.uploads.object_store,
+                                 uploader: uploader,
+                                 remote_directory: 'uploads',
+                                 **params)
+  end
 end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index 664698fcbaf1..3b79d769e02d 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -239,7 +239,7 @@ def pages_path
   end
 
   def artifacts_path
-    Gitlab.config.artifacts.path
+    Gitlab.config.artifacts.storage_path
   end
 
   # When no cached assets exist, manually hit the root path to create them
diff --git a/spec/support/track_untracked_uploads_helpers.rb b/spec/support/track_untracked_uploads_helpers.rb
index d05eda082016..5752078d2a0a 100644
--- a/spec/support/track_untracked_uploads_helpers.rb
+++ b/spec/support/track_untracked_uploads_helpers.rb
@@ -1,6 +1,6 @@
 module TrackUntrackedUploadsHelpers
   def uploaded_file
-    fixture_path = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
+    fixture_path = Rails.root.join('spec/fixtures/rails_sample.jpg')
     fixture_file_upload(fixture_path)
   end
 
diff --git a/spec/tasks/gitlab/artifacts_rake_spec.rb b/spec/tasks/gitlab/artifacts_rake_spec.rb
index a30823b8875c..570c7fa75033 100644
--- a/spec/tasks/gitlab/artifacts_rake_spec.rb
+++ b/spec/tasks/gitlab/artifacts_rake_spec.rb
@@ -18,7 +18,7 @@
       let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
 
       context 'when local storage is used' do
-        let(:store) { ObjectStoreUploader::LOCAL_STORE }
+        let(:store) { ObjectStorage::Store::LOCAL }
 
         context 'and job does not have file store defined' do
           let(:object_storage_enabled) { true }
@@ -27,8 +27,8 @@
           it "migrates file to remote storage" do
             subject
 
-            expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
-            expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+            expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+            expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
           end
         end
 
@@ -38,8 +38,8 @@
           it "migrates file to remote storage" do
             subject
 
-            expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
-            expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+            expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+            expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
           end
         end
 
@@ -47,8 +47,8 @@
           it "fails to migrate to remote storage" do
             subject
 
-            expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
-            expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE)
+            expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL)
+            expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL)
           end
         end
       end
@@ -56,13 +56,13 @@
       context 'when remote storage is used' do
         let(:object_storage_enabled) { true }
 
-        let(:store) { ObjectStoreUploader::REMOTE_STORE }
+        let(:store) { ObjectStorage::Store::REMOTE }
 
         it "file stays on remote storage" do
           subject
 
-          expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
-          expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+          expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+          expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
         end
       end
     end
@@ -72,7 +72,7 @@
     let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
 
     context 'when local storage is used' do
-      let(:store) { ObjectStoreUploader::LOCAL_STORE }
+      let(:store) { ObjectStorage::Store::LOCAL }
 
       context 'and job does not have file store defined' do
         let(:object_storage_enabled) { true }
@@ -81,7 +81,7 @@
         it "migrates file to remote storage" do
           subject
 
-          expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+          expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
         end
       end
 
@@ -91,7 +91,7 @@
         it "migrates file to remote storage" do
           subject
 
-          expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+          expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
         end
       end
 
@@ -99,19 +99,19 @@
         it "fails to migrate to remote storage" do
           subject
 
-          expect(artifact.reload.file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
+          expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
         end
       end
     end
 
     context 'when remote storage is used' do
       let(:object_storage_enabled) { true }
-      let(:store) { ObjectStoreUploader::REMOTE_STORE }
+      let(:store) { ObjectStorage::Store::REMOTE }
 
       it "file stays on remote storage" do
         subject
 
-        expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+        expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
       end
     end
   end
diff --git a/spec/tasks/gitlab/lfs_rake_spec.rb b/spec/tasks/gitlab/lfs_rake_spec.rb
index faed24f2010c..f1b677bd6eeb 100644
--- a/spec/tasks/gitlab/lfs_rake_spec.rb
+++ b/spec/tasks/gitlab/lfs_rake_spec.rb
@@ -6,8 +6,8 @@
   end
 
   describe 'migrate' do
-    let(:local) { ObjectStoreUploader::LOCAL_STORE }
-    let(:remote) { ObjectStoreUploader::REMOTE_STORE }
+    let(:local) { ObjectStorage::Store::LOCAL }
+    let(:remote) { ObjectStorage::Store::REMOTE }
     let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
 
     def lfs_migrate
diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb
index 04ee6e9bfadb..70618f6bc192 100644
--- a/spec/uploaders/attachment_uploader_spec.rb
+++ b/spec/uploaders/attachment_uploader_spec.rb
@@ -1,28 +1,37 @@
 require 'spec_helper'
 
 describe AttachmentUploader do
-  let(:uploader) { described_class.new(build_stubbed(:user)) }
+  let(:note) { create(:note, :with_attachment) }
+  let(:uploader) { note.attachment }
+  let(:upload) { create(:upload, :attachment_upload, model: uploader.model) }
 
-  describe "#store_dir" do
-    it "stores in the system dir" do
-      expect(uploader.store_dir).to start_with("uploads/-/system/user")
-    end
+  subject { uploader }
 
-    it "uses the old path when using object storage" do
-      expect(described_class).to receive(:file_storage?).and_return(false)
-      expect(uploader.store_dir).to start_with("uploads/user")
-    end
-  end
+  it_behaves_like 'builds correct paths',
+                  store_dir: %r[uploads/-/system/note/attachment/],
+                  upload_path: %r[uploads/-/system/note/attachment/],
+                  absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/]
 
-  describe '#move_to_cache' do
-    it 'is true' do
-      expect(uploader.move_to_cache).to eq(true)
+  # EE-specific
+  context "object_store is REMOTE" do
+    before do
+      stub_uploads_object_storage
     end
+
+    include_context 'with storage', described_class::Store::REMOTE
+
+    it_behaves_like 'builds correct paths',
+                    store_dir: %r[note/attachment/],
+                    upload_path: %r[note/attachment/]
   end
 
-  describe '#move_to_store' do
-    it 'is true' do
-      expect(uploader.move_to_store).to eq(true)
+  describe "#migrate!" do
+    before do
+      uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+      stub_uploads_object_storage
     end
+
+    it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+    it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
   end
 end
diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb
index 1dc574699d8d..6f4dbae26abe 100644
--- a/spec/uploaders/avatar_uploader_spec.rb
+++ b/spec/uploaders/avatar_uploader_spec.rb
@@ -1,28 +1,40 @@
 require 'spec_helper'
 
 describe AvatarUploader do
-  let(:uploader) { described_class.new(build_stubbed(:user)) }
+  let(:model) { build_stubbed(:user) }
+  let(:uploader) { described_class.new(model, :avatar) }
+  let(:upload) { create(:upload, model: model) }
 
-  describe "#store_dir" do
-    it "stores in the system dir" do
-      expect(uploader.store_dir).to start_with("uploads/-/system/user")
-    end
+  subject { uploader }
 
-    it "uses the old path when using object storage" do
-      expect(described_class).to receive(:file_storage?).and_return(false)
-      expect(uploader.store_dir).to start_with("uploads/user")
-    end
-  end
+  it_behaves_like 'builds correct paths',
+                  store_dir: %r[uploads/-/system/user/avatar/],
+                  upload_path: %r[uploads/-/system/user/avatar/],
+                  absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
 
-  describe '#move_to_cache' do
-    it 'is false' do
-      expect(uploader.move_to_cache).to eq(false)
+  # EE-specific
+  context "object_store is REMOTE" do
+    before do
+      stub_uploads_object_storage
     end
+
+    include_context 'with storage', described_class::Store::REMOTE
+
+    it_behaves_like 'builds correct paths',
+                    store_dir: %r[user/avatar/],
+                    upload_path: %r[user/avatar/]
   end
 
-  describe '#move_to_store' do
-    it 'is false' do
-      expect(uploader.move_to_store).to eq(false)
+  context "with a file" do
+    let(:project) { create(:project, :with_avatar) }
+    let(:uploader) { project.avatar }
+    let(:upload) { uploader.upload }
+
+    before do
+      stub_uploads_object_storage
     end
+
+    it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+    it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
   end
 end
diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb
index 0cf462e95538..bc024cd307c7 100644
--- a/spec/uploaders/file_mover_spec.rb
+++ b/spec/uploaders/file_mover_spec.rb
@@ -3,13 +3,13 @@
 describe FileMover do
   let(:filename) { 'banana_sample.gif' }
   let(:file) { fixture_file_upload(Rails.root.join('spec', 'fixtures', filename)) }
+  let(:temp_file_path) { File.join('uploads/-/system/temp', 'secret55', filename) }
+
   let(:temp_description) do
-    'test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) same ![banana_sample]'\
-    '(/uploads/-/system/temp/secret55/banana_sample.gif)'
+    "test ![banana_sample](/#{temp_file_path}) "\
+    "same ![banana_sample](/#{temp_file_path}) "
   end
-  let(:temp_file_path) { File.join('secret55', filename).to_s }
-  let(:file_path) { File.join('uploads', '-', 'system', 'personal_snippet', snippet.id.to_s, 'secret55', filename).to_s }
-
+  let(:file_path) { File.join('uploads/-/system/personal_snippet', snippet.id.to_s, 'secret55', filename) }
   let(:snippet) { create(:personal_snippet, description: temp_description) }
 
   subject { described_class.new(file_path, snippet).execute }
@@ -28,8 +28,8 @@
 
         expect(snippet.reload.description)
           .to eq(
-            "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"\
-            " same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"
+            "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "\
+            "same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "
           )
       end
 
@@ -50,8 +50,8 @@
 
         expect(snippet.reload.description)
           .to eq(
-            "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"\
-            " same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"
+            "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "\
+            "same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "
           )
       end
 
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index fd195d6f9b85..b92d52727c10 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -1,118 +1,78 @@
 require 'spec_helper'
 
 describe FileUploader do
-  let(:uploader) { described_class.new(build_stubbed(:project)) }
+  let(:group) { create(:group, name: 'awesome') }
+  let(:project) { create(:project, namespace: group, name: 'project') }
+  let(:uploader) { described_class.new(project) }
+  let(:upload)  { double(model: project, path: 'secret/foo.jpg') }
 
-  context 'legacy storage' do
-    let(:project) { build_stubbed(:project) }
-
-    describe '.absolute_path' do
-      it 'returns the correct absolute path by building it dynamically' do
-        upload = double(model: project, path: 'secret/foo.jpg')
-
-        dynamic_segment = project.full_path
-
-        expect(described_class.absolute_path(upload))
-          .to end_with("#{dynamic_segment}/secret/foo.jpg")
-      end
-    end
+  subject { uploader }
 
-    describe "#store_dir" do
-      it "stores in the namespace path" do
-        uploader = described_class.new(project)
-
-        expect(uploader.store_dir).to include(project.full_path)
-        expect(uploader.store_dir).not_to include("system")
-      end
-    end
+  shared_examples 'builds correct legacy storage paths' do
+    include_examples 'builds correct paths',
+                     store_dir: %r{awesome/project/\h+},
+                     absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg}
   end
 
-  context 'hashed storage' do
+  shared_examples 'uses hashed storage' do
     context 'when rolled out attachments' do
-      let(:project) { build_stubbed(:project, :hashed) }
-
-      describe '.absolute_path' do
-        it 'returns the correct absolute path by building it dynamically' do
-          upload = double(model: project, path: 'secret/foo.jpg')
-
-          dynamic_segment = project.disk_path
-
-          expect(described_class.absolute_path(upload))
-            .to end_with("#{dynamic_segment}/secret/foo.jpg")
-        end
+      before do
+        allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed')
       end
 
-      describe "#store_dir" do
-        it "stores in the namespace path" do
-          uploader = described_class.new(project)
+      let(:project) { build_stubbed(:project, :hashed, namespace: group, name: 'project') }
 
-          expect(uploader.store_dir).to include(project.disk_path)
-          expect(uploader.store_dir).not_to include("system")
-        end
-      end
+      it_behaves_like 'builds correct paths',
+                      store_dir: %r{ca/fe/fe/ed/\h+},
+                      absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
     end
 
     context 'when only repositories are rolled out' do
-      let(:project) { build_stubbed(:project, storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
+      let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
 
-      describe '.absolute_path' do
-        it 'returns the correct absolute path by building it dynamically' do
-          upload = double(model: project, path: 'secret/foo.jpg')
+      it_behaves_like 'builds correct legacy storage paths'
+    end
+  end
 
-          dynamic_segment = project.full_path
+  context 'legacy storage' do
+    it_behaves_like 'builds correct legacy storage paths'
+    include_examples 'uses hashed storage'
+  end
 
-          expect(described_class.absolute_path(upload))
-            .to end_with("#{dynamic_segment}/secret/foo.jpg")
-        end
-      end
+  context 'object store is remote' do
+    before do
+      stub_uploads_object_storage
+    end
 
-      describe "#store_dir" do
-        it "stores in the namespace path" do
-          uploader = described_class.new(project)
+    include_context 'with storage', described_class::Store::REMOTE
 
-          expect(uploader.store_dir).to include(project.full_path)
-          expect(uploader.store_dir).not_to include("system")
-        end
-      end
-    end
+    it_behaves_like 'builds correct legacy storage paths'
+    include_examples 'uses hashed storage'
   end
 
   describe 'initialize' do
-    it 'generates a secret if none is provided' do
-      expect(SecureRandom).to receive(:hex).and_return('secret')
-
-      uploader = described_class.new(double)
-
-      expect(uploader.secret).to eq 'secret'
-    end
+    let(:uploader) { described_class.new(double, 'secret') }
 
     it 'accepts a secret parameter' do
-      expect(SecureRandom).not_to receive(:hex)
-
-      uploader = described_class.new(double, 'secret')
-
-      expect(uploader.secret).to eq 'secret'
+      expect(described_class).not_to receive(:generate_secret)
+      expect(uploader.secret).to eq('secret')
     end
   end
 
-  describe '#move_to_cache' do
-    it 'is true' do
-      expect(uploader.move_to_cache).to eq(true)
+  describe '#secret' do
+    it 'generates a secret if none is provided' do
+      expect(described_class).to receive(:generate_secret).and_return('secret')
+      expect(uploader.secret).to eq('secret')
     end
   end
 
-  describe '#move_to_store' do
-    it 'is true' do
-      expect(uploader.move_to_store).to eq(true)
+  describe "#migrate!" do
+    before do
+      uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png')))
+      stub_uploads_object_storage
     end
-  end
-
-  describe '#relative_path' do
-    it 'removes the leading dynamic path segment' do
-      fixture = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
-      uploader.store!(fixture_file_upload(fixture))
 
-      expect(uploader.relative_path).to match(/\A\h{32}\/rails_sample.jpg\z/)
-    end
+    it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+    it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
   end
 end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index decea35c86de..fda70a8441bc 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -1,46 +1,26 @@
 require 'spec_helper'
 
 describe JobArtifactUploader do
-  let(:store) { described_class::LOCAL_STORE }
+  let(:store) { described_class::Store::LOCAL }
   let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
   let(:uploader) { described_class.new(job_artifact, :file) }
-  let(:local_path) { Gitlab.config.artifacts.path }
 
-  describe '#store_dir' do
-    subject { uploader.store_dir }
+  subject { uploader }
 
-    let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" }
+  it_behaves_like "builds correct paths",
+                  store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z],
+                  cache_dir: %r[artifacts/tmp/cache],
+                  work_dir: %r[artifacts/tmp/work]
 
-    context 'when using local storage' do
-      it { is_expected.to start_with(local_path) }
-      it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
-      it { is_expected.to end_with(path) }
-    end
-
-    context 'when using remote storage' do
-      let(:store) { described_class::REMOTE_STORE }
-
-      before do
-        stub_artifacts_object_storage
-      end
-
-      it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
-      it { is_expected.to end_with(path) }
+  context "object store is REMOTE" do
+    before do
+      stub_artifacts_object_storage
     end
-  end
-
-  describe '#cache_dir' do
-    subject { uploader.cache_dir }
-
-    it { is_expected.to start_with(local_path) }
-    it { is_expected.to end_with('/tmp/cache') }
-  end
 
-  describe '#work_dir' do
-    subject { uploader.work_dir }
+    include_context 'with storage', described_class::Store::REMOTE
 
-    it { is_expected.to start_with(local_path) }
-    it { is_expected.to end_with('/tmp/work') }
+    it_behaves_like "builds correct paths",
+                    store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
   end
 
   context 'file is stored in valid local_path' do
@@ -55,7 +35,7 @@
 
     subject { uploader.file.path }
 
-    it { is_expected.to start_with(local_path) }
+    it { is_expected.to start_with("#{uploader.root}/#{uploader.class.base_dir}") }
     it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
     it { is_expected.to include("/#{job_artifact.project_id}/") }
     it { is_expected.to end_with("ci_build_artifacts.zip") }
diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb
index 7b316072f471..eeb6fd90c9d2 100644
--- a/spec/uploaders/legacy_artifact_uploader_spec.rb
+++ b/spec/uploaders/legacy_artifact_uploader_spec.rb
@@ -1,51 +1,35 @@
 require 'rails_helper'
 
 describe LegacyArtifactUploader do
-  let(:store) { described_class::LOCAL_STORE }
+  let(:store) { described_class::Store::LOCAL }
   let(:job) { create(:ci_build, artifacts_file_store: store) }
   let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
-  let(:local_path) { Gitlab.config.artifacts.path }
+  let(:local_path) { described_class.root }
 
-  describe '.local_store_path' do
-    subject { described_class.local_store_path }
+  subject { uploader }
 
-    it "delegate to artifacts path" do
-      expect(Gitlab.config.artifacts).to receive(:path)
-
-      subject
-    end
-  end
-
-  describe '.artifacts_upload_path' do
-    subject { described_class.artifacts_upload_path }
+  # TODO: move to Workhorse::UploadPath
+  describe '.workhorse_upload_path' do
+    subject { described_class.workhorse_upload_path }
 
     it { is_expected.to start_with(local_path) }
-    it { is_expected.to end_with('tmp/uploads/') }
+    it { is_expected.to end_with('tmp/uploads') }
   end
 
-  describe '#store_dir' do
-    subject { uploader.store_dir }
+  it_behaves_like "builds correct paths",
+                  store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z],
+                  cache_dir: %r[artifacts/tmp/cache],
+                  work_dir: %r[artifacts/tmp/work]
 
-    let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
-
-    context 'when using local storage' do
-      it { is_expected.to start_with(local_path) }
-      it { is_expected.to end_with(path) }
+  context 'object store is remote' do
+    before do
+      stub_artifacts_object_storage
     end
-  end
 
-  describe '#cache_dir' do
-    subject { uploader.cache_dir }
+    include_context 'with storage', described_class::Store::REMOTE
 
-    it { is_expected.to start_with(local_path) }
-    it { is_expected.to end_with('/tmp/cache') }
-  end
-
-  describe '#work_dir' do
-    subject { uploader.work_dir }
-
-    it { is_expected.to start_with(local_path) }
-    it { is_expected.to end_with('/tmp/work') }
+    it_behaves_like "builds correct paths",
+                    store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z]
   end
 
   describe '#filename' do
@@ -70,7 +54,7 @@
 
     subject { uploader.file.path }
 
-    it { is_expected.to start_with(local_path) }
+    it { is_expected.to start_with("#{uploader.root}") }
     it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
     it { is_expected.to include("/#{job.project_id}/") }
     it { is_expected.to end_with("ci_build_artifacts.zip") }
diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb
index 9b8e2835ebc0..2e4bd008afea 100644
--- a/spec/uploaders/lfs_object_uploader_spec.rb
+++ b/spec/uploaders/lfs_object_uploader_spec.rb
@@ -5,37 +5,22 @@
   let(:uploader) { described_class.new(lfs_object, :file) }
   let(:path) { Gitlab.config.lfs.storage_path }
 
-  describe '#move_to_cache' do
-    it 'is true' do
-      expect(uploader.move_to_cache).to eq(true)
-    end
-  end
-
-  describe '#move_to_store' do
-    it 'is true' do
-      expect(uploader.move_to_store).to eq(true)
-    end
-  end
+  subject { uploader }
 
-  describe '#store_dir' do
-    subject { uploader.store_dir }
+  it_behaves_like "builds correct paths",
+                  store_dir: %r[\h{2}/\h{2}],
+                  cache_dir: %r[/lfs-objects/tmp/cache],
+                  work_dir: %r[/lfs-objects/tmp/work]
 
-    it { is_expected.to start_with(path) }
-    it { is_expected.to end_with("#{lfs_object.oid[0, 2]}/#{lfs_object.oid[2, 2]}") }
-  end
-
-  describe '#cache_dir' do
-    subject { uploader.cache_dir }
-
-    it { is_expected.to start_with(path) }
-    it { is_expected.to end_with('/tmp/cache') }
-  end
+  context "object store is REMOTE" do
+    before do
+      stub_lfs_object_storage
+    end
 
-  describe '#work_dir' do
-    subject { uploader.work_dir }
+    include_context 'with storage', described_class::Store::REMOTE
 
-    it { is_expected.to start_with(path) }
-    it { is_expected.to end_with('/tmp/work') }
+    it_behaves_like "builds correct paths",
+                    store_dir: %r[\h{2}/\h{2}]
   end
 
   describe 'migration to object storage' do
@@ -73,7 +58,7 @@
   end
 
   describe 'remote file' do
-    let(:remote) { described_class::REMOTE_STORE }
+    let(:remote) { described_class::Store::REMOTE }
     let(:lfs_object) { create(:lfs_object, file_store: remote) }
 
     context 'with object storage enabled' do
@@ -103,7 +88,7 @@
   end
 
   def store_file(lfs_object)
-    lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+    lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png")
     lfs_object.save!
   end
 end
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
index c6c4500c179d..2f2c27127fcc 100644
--- a/spec/uploaders/namespace_file_uploader_spec.rb
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -1,21 +1,39 @@
 require 'spec_helper'
 
+IDENTIFIER = %r{\h+/\S+}
+
 describe NamespaceFileUploader do
   let(:group) { build_stubbed(:group) }
   let(:uploader) { described_class.new(group) }
+  let(:upload) { create(:upload, :namespace_upload, model: group) }
+
+  subject { uploader }
 
-  describe "#store_dir" do
-    it "stores in the namespace id directory" do
-      expect(uploader.store_dir).to include(group.id.to_s)
+  it_behaves_like 'builds correct paths',
+                  store_dir: %r[uploads/-/system/namespace/\d+],
+                  upload_path: IDENTIFIER,
+                  absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}]
+
+  # EE-specific
+  context "object_store is REMOTE" do
+    before do
+      stub_uploads_object_storage
     end
-  end
 
-  describe ".absolute_path" do
-    it "stores in thecorrect directory" do
-      upload_record = create(:upload, :namespace_upload, model: group)
+    include_context 'with storage', described_class::Store::REMOTE
 
-      expect(described_class.absolute_path(upload_record))
-        .to include("-/system/namespace/#{group.id}")
+    it_behaves_like 'builds correct paths',
+                    store_dir: %r[namespace/\d+/\h+],
+                    upload_path: IDENTIFIER
+  end
+
+  describe "#migrate!" do
+    before do
+      uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+      stub_uploads_object_storage
     end
+
+    it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+    it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
   end
 end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
new file mode 100644
index 000000000000..e01ad9af1dcd
--- /dev/null
+++ b/spec/uploaders/object_storage_spec.rb
@@ -0,0 +1,350 @@
+require 'rails_helper'
+require 'carrierwave/storage/fog'
+
+class Implementation < GitlabUploader
+  include ObjectStorage::Concern
+  include ::RecordsUploads::Concern
+  prepend ::ObjectStorage::Extension::RecordsUploads
+
+  storage_options Gitlab.config.uploads
+
+  private
+
+  # user/:id
+  def dynamic_segment
+    File.join(model.class.to_s.underscore, model.id.to_s)
+  end
+end
+
+describe ObjectStorage do
+  let(:uploader_class) { Implementation }
+  let(:object) { build_stubbed(:user) }
+  let(:uploader) { uploader_class.new(object, :file) }
+
+  before do
+    allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
+  end
+
+  describe '#object_store=' do
+    it "reload the local storage" do
+      uploader.object_store = described_class::Store::LOCAL
+      expect(uploader.file_storage?).to be_truthy
+    end
+
+    it "reload the REMOTE storage" do
+      uploader.object_store = described_class::Store::REMOTE
+      expect(uploader.file_storage?).to be_falsey
+    end
+  end
+
+  context 'object_store is Store::LOCAL' do
+    before do
+      uploader.object_store = described_class::Store::LOCAL
+    end
+
+    describe '#store_dir' do
+      it 'is the composition of (base_dir, dynamic_segment)' do
+        expect(uploader.store_dir).to start_with("uploads/-/system/user/")
+      end
+    end
+  end
+
+  context 'object_store is Store::REMOTE' do
+    before do
+      uploader.object_store = described_class::Store::REMOTE
+    end
+
+    describe '#store_dir' do
+      it 'is the composition of (dynamic_segment)' do
+        expect(uploader.store_dir).to start_with("user/")
+      end
+    end
+  end
+
+  describe '#object_store' do
+    it "delegates to <mount>_store on model" do
+      expect(object).to receive(:file_store)
+
+      uploader.object_store
+    end
+
+    context 'when store is null' do
+      before do
+        expect(object).to receive(:file_store).and_return(nil)
+      end
+
+      it "returns Store::LOCAL" do
+        expect(uploader.object_store).to eq(described_class::Store::LOCAL)
+      end
+    end
+
+    context 'when value is set' do
+      before do
+        expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE)
+      end
+
+      it "returns the given value" do
+        expect(uploader.object_store).to eq(described_class::Store::REMOTE)
+      end
+    end
+  end
+
+  describe '#file_cache_storage?' do
+    context 'when file storage is used' do
+      before do
+        uploader_class.cache_storage(:file)
+      end
+
+      it { expect(uploader).to be_file_cache_storage }
+    end
+
+    context 'when is remote storage' do
+      before do
+        uploader_class.cache_storage(:fog)
+      end
+
+      it { expect(uploader).not_to be_file_cache_storage }
+    end
+  end
+
+  # this means the model shall include
+  #   include RecordsUpload::Concern
+  #   prepend ObjectStorage::Extension::RecordsUploads
+  # the object_store persistence is delegated to the `Upload` model.
+  #
+  context 'when persist_object_store? is false' do
+    let(:object) { create(:project, :with_avatar) }
+    let(:uploader) { object.avatar }
+
+    it { expect(object).to be_a(Avatarable) }
+    it { expect(uploader.persist_object_store?).to be_falsey }
+
+    describe 'delegates the object_store logic to the `Upload` model' do
+      it 'sets @upload to the found `upload`' do
+        expect(uploader.upload).to eq(uploader.upload)
+      end
+
+      it 'sets @object_store to the `Upload` value' do
+        expect(uploader.object_store).to eq(uploader.upload.store)
+      end
+    end
+  end
+
+  # this means the model holds an <mounted_as>_store attribute directly
+  # and do not delegate the object_store persistence to the `Upload` model.
+  #
+  context 'persist_object_store? is true' do
+    context 'when using JobArtifactsUploader' do
+      let(:store) { described_class::Store::LOCAL }
+      let(:object) { create(:ci_job_artifact, :archive, file_store: store) }
+      let(:uploader) { object.file }
+
+      context 'checking described_class' do
+        it "uploader include described_class::Concern" do
+          expect(uploader).to be_a(described_class::Concern)
+        end
+      end
+
+      describe '#use_file' do
+        context 'when file is stored locally' do
+          it "calls a regular path" do
+            expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache])
+          end
+        end
+
+        context 'when file is stored remotely' do
+          let(:store) { described_class::Store::REMOTE }
+
+          before do
+            stub_artifacts_object_storage
+          end
+
+          it "calls a cache path" do
+            expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache])
+          end
+        end
+      end
+
+      describe '#migrate!' do
+        subject { uploader.migrate!(new_store) }
+
+        shared_examples "updates the underlying <mounted>_store" do
+          it do
+            subject
+
+            expect(object.file_store).to eq(new_store)
+          end
+        end
+
+        context 'when using the same storage' do
+          let(:new_store) { store }
+
+          it "to not migrate the storage" do
+            subject
+
+            expect(uploader).not_to receive(:store!)
+            expect(uploader.object_store).to eq(store)
+          end
+        end
+
+        context 'when migrating to local storage' do
+          let(:store) { described_class::Store::REMOTE }
+          let(:new_store) { described_class::Store::LOCAL }
+
+          before do
+            stub_artifacts_object_storage
+          end
+
+          include_examples "updates the underlying <mounted>_store"
+
+          it "local file does not exist" do
+            expect(File.exist?(uploader.path)).to eq(false)
+          end
+
+          it "remote file exist" do
+            expect(uploader.file.exists?).to be_truthy
+          end
+
+          it "does migrate the file" do
+            subject
+
+            expect(uploader.object_store).to eq(new_store)
+            expect(File.exist?(uploader.path)).to eq(true)
+          end
+        end
+
+        context 'when migrating to remote storage' do
+          let(:new_store) { described_class::Store::REMOTE }
+          let!(:current_path) { uploader.path }
+
+          it "file does exist" do
+            expect(File.exist?(current_path)).to eq(true)
+          end
+
+          context 'when storage is disabled' do
+            before do
+              stub_artifacts_object_storage(enabled: false)
+            end
+
+            it "to raise an error" do
+              expect { subject }.to raise_error(/Object Storage is not enabled/)
+            end
+          end
+
+          context 'when storage is unlicensed' do
+            before do
+              stub_artifacts_object_storage(licensed: false)
+            end
+
+            it "raises an error" do
+              expect { subject }.to raise_error(/Object Storage feature is missing/)
+            end
+          end
+
+          context 'when credentials are set' do
+            before do
+              stub_artifacts_object_storage
+            end
+
+            include_examples "updates the underlying <mounted>_store"
+
+            it "does migrate the file" do
+              subject
+
+              expect(uploader.object_store).to eq(new_store)
+            end
+
+            it "does delete original file" do
+              subject
+
+              expect(File.exist?(current_path)).to eq(false)
+            end
+
+            context 'when subject save fails' do
+              before do
+                expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception")
+              end
+
+              it "original file is not removed" do
+                expect { subject }.to raise_error(/exception/)
+
+                expect(File.exist?(current_path)).to eq(true)
+              end
+            end
+          end
+        end
+      end
+    end
+  end
+
+  describe '#fog_directory' do
+    let(:remote_directory) { 'directory' }
+
+    before do
+      uploader_class.storage_options double(object_store: double(remote_directory: remote_directory))
+    end
+
+    subject { uploader.fog_directory }
+
+    it { is_expected.to eq(remote_directory) }
+  end
+
+  describe '#fog_credentials' do
+    let(:connection) { Settingslogic.new("provider" => "AWS") }
+
+    before do
+      uploader_class.storage_options double(object_store: double(connection: connection))
+    end
+
+    subject { uploader.fog_credentials }
+
+    it { is_expected.to eq(provider: 'AWS') }
+  end
+
+  describe '#fog_public' do
+    subject { uploader.fog_public }
+
+    it { is_expected.to eq(false) }
+  end
+
+  describe '#verify_license!' do
+    subject { uploader.verify_license!(nil) }
+
+    context 'when using local storage' do
+      before do
+        expect(object).to receive(:file_store) { described_class::Store::LOCAL }
+      end
+
+      it "does not raise an error" do
+        expect { subject }.not_to raise_error
+      end
+    end
+
+    context 'when using remote storage' do
+      before do
+        uploader_class.storage_options double(object_store: double(enabled: true))
+        expect(object).to receive(:file_store) { described_class::Store::REMOTE }
+      end
+
+      context 'feature is not available' do
+        before do
+          expect(License).to receive(:feature_available?).with(:object_storage).and_return(false)
+        end
+
+        it "does raise an error" do
+          expect { subject }.to raise_error(/Object Storage feature is missing/)
+        end
+      end
+
+      context 'feature is available' do
+        before do
+          expect(License).to receive(:feature_available?).with(:object_storage).and_return(true)
+        end
+
+        it "does not raise an error" do
+          expect { subject }.not_to raise_error
+        end
+      end
+    end
+  end
+end
diff --git a/spec/uploaders/object_store_uploader_spec.rb b/spec/uploaders/object_store_uploader_spec.rb
deleted file mode 100644
index 2f52867bb91e..000000000000
--- a/spec/uploaders/object_store_uploader_spec.rb
+++ /dev/null
@@ -1,315 +0,0 @@
-require 'rails_helper'
-require 'carrierwave/storage/fog'
-
-describe ObjectStoreUploader do
-  let(:uploader_class) { Class.new(described_class) }
-  let(:object) { double }
-  let(:uploader) { uploader_class.new(object, :file) }
-
-  before do
-    allow(object.class).to receive(:uploader_option).with(:file, :mount_on) { nil }
-  end
-
-  describe '#object_store' do
-    it "calls artifacts_file_store on object" do
-      expect(object).to receive(:file_store)
-
-      uploader.object_store
-    end
-
-    context 'when store is null' do
-      before do
-        expect(object).to receive(:file_store).twice.and_return(nil)
-      end
-
-      it "returns LOCAL_STORE" do
-        expect(uploader.real_object_store).to be_nil
-        expect(uploader.object_store).to eq(described_class::LOCAL_STORE)
-      end
-    end
-
-    context 'when value is set' do
-      before do
-        expect(object).to receive(:file_store).twice.and_return(described_class::REMOTE_STORE)
-      end
-
-      it "returns given value" do
-        expect(uploader.real_object_store).not_to be_nil
-        expect(uploader.object_store).to eq(described_class::REMOTE_STORE)
-      end
-    end
-  end
-
-  describe '#object_store=' do
-    it "calls artifacts_file_store= on object" do
-      expect(object).to receive(:file_store=).with(described_class::REMOTE_STORE)
-
-      uploader.object_store = described_class::REMOTE_STORE
-    end
-  end
-
-  describe '#file_storage?' do
-    context 'when file storage is used' do
-      before do
-        expect(object).to receive(:file_store).and_return(described_class::LOCAL_STORE)
-      end
-
-      it { expect(uploader).to be_file_storage }
-    end
-
-    context 'when is remote storage' do
-      before do
-        uploader_class.storage_options double(
-          object_store: double(enabled: true))
-        expect(object).to receive(:file_store).and_return(described_class::REMOTE_STORE)
-      end
-
-      it { expect(uploader).not_to be_file_storage }
-    end
-  end
-
-  describe '#file_cache_storage?' do
-    context 'when file storage is used' do
-      before do
-        uploader_class.cache_storage(:file)
-      end
-
-      it { expect(uploader).to be_file_cache_storage }
-    end
-
-    context 'when is remote storage' do
-      before do
-        uploader_class.cache_storage(:fog)
-      end
-
-      it { expect(uploader).not_to be_file_cache_storage }
-    end
-  end
-
-  context 'when using JobArtifactsUploader' do
-    let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
-    let(:uploader) { artifact.file }
-
-    context 'checking described_class' do
-      let(:store) { described_class::LOCAL_STORE }
-
-      it "uploader is of a described_class" do
-        expect(uploader).to be_a(described_class)
-      end
-
-      it 'moves files locally' do
-        expect(uploader.move_to_store).to be(true)
-        expect(uploader.move_to_cache).to be(true)
-      end
-    end
-
-    context 'when store is null' do
-      let(:store) { nil }
-
-      it "sets the store to LOCAL_STORE" do
-        expect(artifact.file_store).to eq(described_class::LOCAL_STORE)
-      end
-    end
-
-    describe '#use_file' do
-      context 'when file is stored locally' do
-        let(:store) { described_class::LOCAL_STORE }
-
-        it "calls a regular path" do
-          expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/)
-        end
-      end
-
-      context 'when file is stored remotely' do
-        let(:store) { described_class::REMOTE_STORE }
-
-        before do
-          stub_artifacts_object_storage
-        end
-
-        it "calls a cache path" do
-          expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/)
-        end
-      end
-    end
-
-    describe '#migrate!' do
-      let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
-      let(:uploader) { artifact.file }
-      let(:store) { described_class::LOCAL_STORE }
-      
-      subject { uploader.migrate!(new_store) }
-
-      context 'when using the same storage' do
-        let(:new_store) { store }
-
-        it "to not migrate the storage" do
-          subject
-
-          expect(uploader.object_store).to eq(store)
-        end
-      end
-
-      context 'when migrating to local storage' do
-        let(:store) { described_class::REMOTE_STORE }
-        let(:new_store) { described_class::LOCAL_STORE }
-        
-        before do
-          stub_artifacts_object_storage
-        end
-
-        it "local file does not exist" do
-          expect(File.exist?(uploader.path)).to eq(false)
-        end
-
-        it "does migrate the file" do
-          subject
-
-          expect(uploader.object_store).to eq(new_store)
-          expect(File.exist?(uploader.path)).to eq(true)
-        end
-      end
-
-      context 'when migrating to remote storage' do
-        let(:new_store) { described_class::REMOTE_STORE }
-        let!(:current_path) { uploader.path }
-
-        it "file does exist" do
-          expect(File.exist?(current_path)).to eq(true)
-        end
-        
-        context 'when storage is disabled' do
-          before do
-            stub_artifacts_object_storage(enabled: false) 
-          end
-
-          it "to raise an error" do
-            expect { subject }.to raise_error(/Object Storage is not enabled/)
-          end
-        end
-
-        context 'when storage is unlicensed' do
-          before do
-            stub_artifacts_object_storage(licensed: false)
-          end
-
-          it "raises an error" do
-            expect { subject }.to raise_error(/Object Storage feature is missing/)
-          end
-        end
-
-        context 'when credentials are set' do
-          before do
-            stub_artifacts_object_storage
-          end
-
-          it "does migrate the file" do
-            subject
-
-            expect(uploader.object_store).to eq(new_store)
-            expect(File.exist?(current_path)).to eq(false)
-          end
-
-          it "does delete original file" do
-            subject
-    
-            expect(File.exist?(current_path)).to eq(false)
-          end
-
-          context 'when subject save fails' do
-            before do
-              expect(artifact).to receive(:save!).and_raise(RuntimeError, "exception")
-            end
-
-            it "does catch an error" do
-              expect { subject }.to raise_error(/exception/)
-            end
-
-            it "original file is not removed" do
-              begin
-                subject
-              rescue
-              end
-
-              expect(File.exist?(current_path)).to eq(true)
-            end
-          end
-        end
-      end
-    end
-  end
-
-  describe '#fog_directory' do
-    let(:remote_directory) { 'directory' }
-
-    before do
-      uploader_class.storage_options double(
-        object_store: double(remote_directory: remote_directory))
-    end
-
-    subject { uploader.fog_directory }
-
-    it { is_expected.to eq(remote_directory) }
-  end
-
-  describe '#fog_credentials' do
-    let(:connection) { 'connection' }
-
-    before do
-      uploader_class.storage_options double(
-        object_store: double(connection: connection))
-    end
-
-    subject { uploader.fog_credentials }
-
-    it { is_expected.to eq(connection) }
-  end
-
-  describe '#fog_public' do
-    subject { uploader.fog_public }
-
-    it { is_expected.to eq(false) }
-  end
-
-  describe '#verify_license!' do
-    subject { uploader.verify_license!(nil) }
-
-    context 'when using local storage' do
-      before do
-        expect(object).to receive(:file_store) { described_class::LOCAL_STORE }
-      end
-
-      it "does not raise an error" do
-        expect { subject }.not_to raise_error
-      end
-    end
-
-    context 'when using remote storage' do
-      before do
-        uploader_class.storage_options double(
-          object_store: double(enabled: true))
-        expect(object).to receive(:file_store) { described_class::REMOTE_STORE }
-      end
-
-      context 'feature is not available' do
-        before do
-          expect(License).to receive(:feature_available?).with(:object_storage) { false }
-        end
-
-        it "does raise an error" do
-          expect { subject }.to raise_error(/Object Storage feature is missing/)
-        end
-      end
-
-      context 'feature is available' do
-        before do
-          expect(License).to receive(:feature_available?).with(:object_storage) { true }
-        end
-
-        it "does not raise an error" do
-          expect { subject }.not_to raise_error
-        end
-      end
-    end
-  end
-end
diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb
index cbafa9f478dc..ef5a70f668bf 100644
--- a/spec/uploaders/personal_file_uploader_spec.rb
+++ b/spec/uploaders/personal_file_uploader_spec.rb
@@ -1,25 +1,40 @@
 require 'spec_helper'
 
+IDENTIFIER = %r{\h+/\S+}
+
 describe PersonalFileUploader do
-  let(:uploader) { described_class.new(build_stubbed(:project)) }
-  let(:snippet) { create(:personal_snippet) }
+  let(:model) { create(:personal_snippet) }
+  let(:uploader) { described_class.new(model) }
+  let(:upload) { create(:upload, :personal_snippet_upload) }
 
-  describe '.absolute_path' do
-    it 'returns the correct absolute path by building it dynamically' do
-      upload = double(model: snippet, path: 'secret/foo.jpg')
+  subject { uploader }
 
-      dynamic_segment = "personal_snippet/#{snippet.id}"
+  it_behaves_like 'builds correct paths',
+                  store_dir: %r[uploads/-/system/personal_snippet/\d+],
+                  upload_path: IDENTIFIER,
+                  absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}]
 
-      expect(described_class.absolute_path(upload)).to end_with("/-/system/#{dynamic_segment}/secret/foo.jpg")
+  # EE-specific
+  context "object_store is REMOTE" do
+    before do
+      stub_uploads_object_storage
     end
+
+    include_context 'with storage', described_class::Store::REMOTE
+
+    it_behaves_like 'builds correct paths',
+                    store_dir: %r[\d+/\h+],
+                    upload_path: IDENTIFIER
   end
 
   describe '#to_h' do
-    it 'returns the hass' do
-      uploader = described_class.new(snippet, 'secret')
+    before do
+      subject.instance_variable_set(:@secret, 'secret')
+    end
 
+    it 'is correct' do
       allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name'))
-      expected_url = "/uploads/-/system/personal_snippet/#{snippet.id}/secret/file_name"
+      expected_url = "/uploads/-/system/personal_snippet/#{model.id}/secret/file_name"
 
       expect(uploader.to_h).to eq(
         alt: 'file_name',
@@ -28,4 +43,14 @@
       )
     end
   end
+
+  describe "#migrate!" do
+    before do
+      uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+      stub_uploads_object_storage
+    end
+
+    it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+    it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+  end
 end
diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb
index 7ef7fb7d758b..9a3e5d83e01f 100644
--- a/spec/uploaders/records_uploads_spec.rb
+++ b/spec/uploaders/records_uploads_spec.rb
@@ -3,16 +3,16 @@
 describe RecordsUploads do
   let!(:uploader) do
     class RecordsUploadsExampleUploader < GitlabUploader
-      include RecordsUploads
+      include RecordsUploads::Concern
 
       storage :file
 
-      def model
-        FactoryBot.build_stubbed(:user)
+      def dynamic_segment
+        'co/fe/ee'
       end
     end
 
-    RecordsUploadsExampleUploader.new
+    RecordsUploadsExampleUploader.new(build_stubbed(:user))
   end
 
   def upload_fixture(filename)
@@ -20,48 +20,55 @@ def upload_fixture(filename)
   end
 
   describe 'callbacks' do
-    it 'calls `record_upload` after `store`' do
+    let(:upload) { create(:upload) }
+
+    before do
+      uploader.upload = upload
+    end
+
+    it '#record_upload after `store`' do
       expect(uploader).to receive(:record_upload).once
 
       uploader.store!(upload_fixture('doc_sample.txt'))
     end
 
-    it 'calls `destroy_upload` after `remove`' do
-      expect(uploader).to receive(:destroy_upload).once
-
+    it '#destroy_upload after `remove`' do
       uploader.store!(upload_fixture('doc_sample.txt'))
 
+      expect(uploader).to receive(:destroy_upload).once
       uploader.remove!
     end
   end
 
   describe '#record_upload callback' do
-    it 'returns early when not using file storage' do
-      allow(uploader).to receive(:file_storage?).and_return(false)
-      expect(Upload).not_to receive(:record)
-
-      uploader.store!(upload_fixture('rails_sample.jpg'))
+    it 'creates an Upload record after store' do
+      expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.to change { Upload.count }.by(1)
     end
 
-    it "returns early when the file doesn't exist" do
-      allow(uploader).to receive(:file).and_return(double(exists?: false))
-      expect(Upload).not_to receive(:record)
-
+    it 'creates a new record and assigns size, path, model, and uploader' do
       uploader.store!(upload_fixture('rails_sample.jpg'))
+
+      upload = uploader.upload
+      aggregate_failures do
+        expect(upload).to be_persisted
+        expect(upload.size).to eq uploader.file.size
+        expect(upload.path).to eq uploader.upload_path
+        expect(upload.model_id).to eq uploader.model.id
+        expect(upload.model_type).to eq uploader.model.class.to_s
+        expect(upload.uploader).to eq uploader.class.to_s
+      end
     end
 
-    it 'creates an Upload record after store' do
-      expect(Upload).to receive(:record)
-        .with(uploader)
+    it "does not create an Upload record when the file doesn't exist" do
+      allow(uploader).to receive(:file).and_return(double(exists?: false))
 
-      uploader.store!(upload_fixture('rails_sample.jpg'))
+      expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count }
     end
 
     it 'does not create an Upload record if model is missing' do
-      expect_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil)
-      expect(Upload).not_to receive(:record).with(uploader)
+      allow_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil)
 
-      uploader.store!(upload_fixture('rails_sample.jpg'))
+      expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count }
     end
 
     it 'it destroys Upload records at the same path before recording' do
@@ -72,29 +79,15 @@ def upload_fixture(filename)
         uploader: uploader.class.to_s
       )
 
+      uploader.upload = existing
       uploader.store!(upload_fixture('rails_sample.jpg'))
 
       expect { existing.reload }.to raise_error(ActiveRecord::RecordNotFound)
-      expect(Upload.count).to eq 1
+      expect(Upload.count).to eq(1)
     end
   end
 
   describe '#destroy_upload callback' do
-    it 'returns early when not using file storage' do
-      uploader.store!(upload_fixture('rails_sample.jpg'))
-
-      allow(uploader).to receive(:file_storage?).and_return(false)
-      expect(Upload).not_to receive(:remove_path)
-
-      uploader.remove!
-    end
-
-    it 'returns early when file is nil' do
-      expect(Upload).not_to receive(:remove_path)
-
-      uploader.remove!
-    end
-
     it 'it destroys Upload records at the same path after removal' do
       uploader.store!(upload_fixture('rails_sample.jpg'))
 
diff --git a/spec/workers/upload_checksum_worker_spec.rb b/spec/workers/upload_checksum_worker_spec.rb
index 911360da66ca..9e50ce15871a 100644
--- a/spec/workers/upload_checksum_worker_spec.rb
+++ b/spec/workers/upload_checksum_worker_spec.rb
@@ -2,18 +2,31 @@
 
 describe UploadChecksumWorker do
   describe '#perform' do
-    it 'rescues ActiveRecord::RecordNotFound' do
-      expect { described_class.new.perform(999_999) }.not_to raise_error
+    subject { described_class.new }
+
+    context 'without a valid record' do
+      it 'rescues ActiveRecord::RecordNotFound' do
+        expect { subject.perform(999_999) }.not_to raise_error
+      end
     end
 
-    it 'calls calculate_checksum_without_delay and save!' do
-      upload = spy
-      expect(Upload).to receive(:find).with(999_999).and_return(upload)
+    context 'with a valid record' do
+      let(:upload) { create(:user, :with_avatar).avatar.upload }
+
+      before do
+        expect(Upload).to receive(:find).and_return(upload)
+        allow(upload).to receive(:foreground_checksumable?).and_return(false)
+      end
 
-      described_class.new.perform(999_999)
+      it 'calls calculate_checksum!' do
+        expect(upload).to receive(:calculate_checksum!)
+        subject.perform(upload.id)
+      end
 
-      expect(upload).to have_received(:calculate_checksum)
-      expect(upload).to have_received(:save!)
+      it 'calls save!' do
+        expect(upload).to receive(:save!)
+        subject.perform(upload.id)
+      end
     end
   end
 end
-- 
GitLab


From b14c484bb1d174c7c355de2258be1a4414b2cf78 Mon Sep 17 00:00:00 2001
From: Grzegorz Bizon <grzegorz@gitlab.com>
Date: Tue, 6 Feb 2018 15:39:20 +0000
Subject: [PATCH 09/14] Merge branch 'use-send-url-for-incompatible-runners'
 into 'master'
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Support SendURL for performing indirect download of artifacts if clients does…

See merge request gitlab-org/gitlab-ee!4401
---
 GITLAB_WORKHORSE_VERSION                      |  2 +-
 .../use-send-url-for-incompatible-runners.yml |  6 ++++
 lib/api/helpers.rb                            |  8 +++--
 lib/api/runner.rb                             |  3 +-
 lib/gitlab/workhorse.rb                       | 12 +++++++
 spec/lib/gitlab/workhorse_spec.rb             | 17 ++++++++++
 spec/requests/api/runner_spec.rb              | 33 ++++++++++++++++---
 7 files changed, 73 insertions(+), 8 deletions(-)
 create mode 100644 changelogs/unreleased-ee/use-send-url-for-incompatible-runners.yml

diff --git a/GITLAB_WORKHORSE_VERSION b/GITLAB_WORKHORSE_VERSION
index bea438e9ade7..40c341bdcdbe 100644
--- a/GITLAB_WORKHORSE_VERSION
+++ b/GITLAB_WORKHORSE_VERSION
@@ -1 +1 @@
-3.3.1
+3.6.0
diff --git a/changelogs/unreleased-ee/use-send-url-for-incompatible-runners.yml b/changelogs/unreleased-ee/use-send-url-for-incompatible-runners.yml
new file mode 100644
index 000000000000..6e924a9ee0b1
--- /dev/null
+++ b/changelogs/unreleased-ee/use-send-url-for-incompatible-runners.yml
@@ -0,0 +1,6 @@
+---
+title: Support SendURL for performing indirect download of artifacts if clients does
+  not specify that it supports that
+merge_request:
+author:
+type: fixed
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index cc81e4d3595a..d4ca945873c6 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -418,13 +418,17 @@ def present_file!(path, filename, content_type = 'application/octet-stream')
       end
     end
 
-    def present_artifacts!(artifacts_file)
+    def present_artifacts!(artifacts_file, direct_download: true)
       return not_found! unless artifacts_file.exists?
 
       if artifacts_file.file_storage?
         present_file!(artifacts_file.path, artifacts_file.filename)
-      else
+      elsif direct_download
         redirect(artifacts_file.url)
+      else
+        header(*Gitlab::Workhorse.send_url(artifacts_file.url))
+        status :ok
+        body
       end
     end
 
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index 1f80646a2ea5..e6e85d41806c 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -244,11 +244,12 @@ class Runner < Grape::API
       params do
         requires :id, type: Integer, desc: %q(Job's ID)
         optional :token, type: String, desc: %q(Job's authentication token)
+        optional :direct_download, default: false, type: Boolean, desc: %q(Perform direct download from remote storage instead of proxying artifacts)
       end
       get '/:id/artifacts' do
         job = authenticate_job!
 
-        present_artifacts!(job.artifacts_file)
+        present_artifacts!(job.artifacts_file, direct_download: params[:direct_download])
       end
     end
   end
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index dfe8acd4833c..990a6b1d80d2 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -151,6 +151,18 @@ def send_artifacts_entry(build, entry)
         ]
       end
 
+      def send_url(url, allow_redirects: false)
+        params = {
+          'URL' => url,
+          'AllowRedirects' => allow_redirects
+        }
+
+        [
+          SEND_DATA_HEADER,
+          "send-url:#{encode(params)}"
+        ]
+      end
+
       def terminal_websocket(terminal)
         details = {
           'Terminal' => {
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 249c77dc636d..0b34d71bfb22 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -451,4 +451,21 @@ def call_verify(headers)
       end
     end
   end
+
+  describe '.send_url' do
+    let(:url) { 'http://example.com' }
+
+    subject { described_class.send_url(url) }
+
+    it 'sets the header correctly' do
+      key, command, params = decode_workhorse_header(subject)
+
+      expect(key).to eq("Gitlab-Workhorse-Send-Data")
+      expect(command).to eq("send-url")
+      expect(params).to eq({
+        'URL' => url,
+        'AllowRedirects' => false
+      }.deep_stringify_keys)
+    end
+  end
 end
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 8086b91a4887..c6366ffec62e 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -1157,8 +1157,6 @@ def upload_artifacts(file, headers = {}, accelerated = true)
 
           before do
             create(:ci_job_artifact, :archive, file_store: store, job: job)
-
-            download_artifact
           end
 
           context 'when using job token' do
@@ -1168,6 +1166,10 @@ def upload_artifacts(file, headers = {}, accelerated = true)
                   'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
               end
 
+              before do
+                download_artifact
+              end
+
               it 'download artifacts' do
                 expect(response).to have_http_status(200)
                 expect(response.headers).to include download_headers
@@ -1178,8 +1180,27 @@ def upload_artifacts(file, headers = {}, accelerated = true)
               let(:store) { JobArtifactUploader::Store::REMOTE }
               let!(:job) { create(:ci_build) }
 
-              it 'download artifacts' do
-                expect(response).to have_http_status(302)
+              context 'when proxy download is being used' do
+                before do
+                  download_artifact(direct_download: false)
+                end
+
+                it 'uses workhorse send-url' do
+                  expect(response).to have_gitlab_http_status(200)
+                  expect(response.headers).to include(
+                    'Gitlab-Workhorse-Send-Data' => /send-url:/)
+                end
+              end
+
+              context 'when direct download is being used' do
+                before do
+                  download_artifact(direct_download: true)
+                end
+
+                it 'receive redirect for downloading artifacts' do
+                  expect(response).to have_gitlab_http_status(302)
+                  expect(response.headers).to include('Location')
+                end
               end
             end
           end
@@ -1187,6 +1208,10 @@ def upload_artifacts(file, headers = {}, accelerated = true)
           context 'when using runnners token' do
             let(:token) { job.project.runners_token }
 
+            before do
+              download_artifact
+            end
+
             it 'responds with forbidden' do
               expect(response).to have_gitlab_http_status(403)
             end
-- 
GitLab


From 999118f0ec6edabc9e13c089381ad664970a080a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= <ayufan@ayufan.eu>
Date: Tue, 6 Feb 2018 14:18:32 +0000
Subject: [PATCH 10/14] Merge branch 'feature/sm/artifacts-trace-ee' into
 'master'

EE: Trace as artifacts (FileStorage and ObjectStorage)

Closes #4171

See merge request gitlab-org/gitlab-ee!4258
---
 app/controllers/projects/jobs_controller.rb   |    2 +
 app/models/ci/build.rb                        |    1 +
 app/models/ci/job_artifact.rb                 |    5 +-
 app/models/concerns/artifact_migratable.rb    |    3 +-
 .../ci/create_trace_artifact_service.rb       |   16 +
 app/uploaders/job_artifact_uploader.rb        |    7 +
 app/workers/all_queues.yml                    |    1 +
 app/workers/build_finished_worker.rb          |    8 +-
 app/workers/create_trace_artifact_worker.rb   |   10 +
 .../unreleased/feature-sm-artifacts-trace.yml |    5 +
 doc/development/file_storage.md               |    4 +-
 .../ee/projects/jobs_controller.rb            |   32 +
 ee/app/models/ee/ci/job_artifact.rb           |    4 +-
 ee/app/uploaders/ee/job_artifact_uploader.rb  |   13 +
 ee/lib/gitlab/ci/trace/http_io.rb             |  183 +++
 lib/gitlab/ci/trace.rb                        |   12 +-
 .../projects/jobs_controller_spec.rb          |   31 +-
 .../ee/projects/jobs_controller_spec.rb       |   73 +
 .../spec/lib/gitlab/ci/trace/http_io_spec.rb  |  329 +++++
 .../ee/job_artifact_uploader_spec.rb          |   22 +
 spec/factories/ci/builds.rb                   |   10 +-
 spec/factories/ci/job_artifacts.rb            |    9 +
 spec/features/projects/jobs_spec.rb           |   36 +-
 spec/fixtures/trace/sample_trace              | 1185 +++++++++++++++++
 spec/javascripts/fixtures/jobs.rb             |    2 +-
 spec/lib/gitlab/ci/trace_spec.rb              |   87 ++
 spec/models/ci/build_spec.rb                  |    8 +-
 spec/models/ci/job_artifact_spec.rb           |    3 +
 spec/requests/api/jobs_spec.rb                |   49 +-
 spec/requests/api/runner_spec.rb              |   18 +-
 spec/requests/api/v3/builds_spec.rb           |    8 +-
 .../ci/create_trace_artifact_service_spec.rb  |   43 +
 spec/services/ci/retry_build_service_spec.rb  |    5 +-
 spec/support/http_io/http_io_helpers.rb       |   64 +
 spec/uploaders/job_artifact_uploader_spec.rb  |   27 +
 spec/workers/build_finished_worker_spec.rb    |   14 +-
 .../create_trace_artifact_worker_spec.rb      |   29 +
 37 files changed, 2295 insertions(+), 63 deletions(-)
 create mode 100644 app/services/ci/create_trace_artifact_service.rb
 create mode 100644 app/workers/create_trace_artifact_worker.rb
 create mode 100644 changelogs/unreleased/feature-sm-artifacts-trace.yml
 create mode 100644 ee/app/controllers/ee/projects/jobs_controller.rb
 create mode 100644 ee/app/uploaders/ee/job_artifact_uploader.rb
 create mode 100644 ee/lib/gitlab/ci/trace/http_io.rb
 create mode 100644 spec/ee/spec/controllers/ee/projects/jobs_controller_spec.rb
 create mode 100644 spec/ee/spec/lib/gitlab/ci/trace/http_io_spec.rb
 create mode 100644 spec/ee/spec/uploaders/ee/job_artifact_uploader_spec.rb
 create mode 100644 spec/fixtures/trace/sample_trace
 create mode 100644 spec/services/ci/create_trace_artifact_service_spec.rb
 create mode 100644 spec/support/http_io/http_io_helpers.rb
 create mode 100644 spec/workers/create_trace_artifact_worker_spec.rb

diff --git a/app/controllers/projects/jobs_controller.rb b/app/controllers/projects/jobs_controller.rb
index 4865ec3dfe50..221b31430d81 100644
--- a/app/controllers/projects/jobs_controller.rb
+++ b/app/controllers/projects/jobs_controller.rb
@@ -1,4 +1,6 @@
 class Projects::JobsController < Projects::ApplicationController
+  prepend EE::Projects::JobsController
+
   before_action :build, except: [:index, :cancel_all]
 
   before_action :authorize_read_build!,
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 4eeccd4d934d..47eb4460320b 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -20,6 +20,7 @@ class Build < CommitStatus
     has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
     has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
     has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
+    has_one :job_artifacts_trace, -> { where(file_type: Ci::JobArtifact.file_types[:trace]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
 
     # The "environment" field for builds is a String, and is the unexpanded name
     def persisted_environment
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 1aea897aacac..2dfd8d4ef584 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -16,9 +16,12 @@ class JobArtifact < ActiveRecord::Base
       end
     end
 
+    delegate :open, :exists?, to: :file
+
     enum file_type: {
       archive: 1,
-      metadata: 2
+      metadata: 2,
+      trace: 3
     }
 
     def self.artifacts_size_for(project)
diff --git a/app/models/concerns/artifact_migratable.rb b/app/models/concerns/artifact_migratable.rb
index 0460439e9e6b..ff52ca644590 100644
--- a/app/models/concerns/artifact_migratable.rb
+++ b/app/models/concerns/artifact_migratable.rb
@@ -39,7 +39,6 @@ def remove_artifacts_metadata!
   end
 
   def artifacts_size
-    read_attribute(:artifacts_size).to_i +
-      job_artifacts_archive&.size.to_i + job_artifacts_metadata&.size.to_i
+    read_attribute(:artifacts_size).to_i + job_artifacts.sum(:size).to_i
   end
 end
diff --git a/app/services/ci/create_trace_artifact_service.rb b/app/services/ci/create_trace_artifact_service.rb
new file mode 100644
index 000000000000..280a2c3afa4c
--- /dev/null
+++ b/app/services/ci/create_trace_artifact_service.rb
@@ -0,0 +1,16 @@
+module Ci
+  class CreateTraceArtifactService < BaseService
+    def execute(job)
+      return if job.job_artifacts_trace
+
+      job.trace.read do |stream|
+        if stream.file?
+          job.create_job_artifacts_trace!(
+            project: job.project,
+            file_type: :trace,
+            file: stream)
+        end
+      end
+    end
+  end
+end
diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb
index 3ad3e6ea32b0..d80221144b4f 100644
--- a/app/uploaders/job_artifact_uploader.rb
+++ b/app/uploaders/job_artifact_uploader.rb
@@ -1,4 +1,5 @@
 class JobArtifactUploader < GitlabUploader
+  prepend EE::JobArtifactUploader
   extend Workhorse::UploadPath
   include ObjectStorage::Concern
 
@@ -14,6 +15,12 @@ def store_dir
     dynamic_segment
   end
 
+  def open
+    raise 'Only File System is supported' unless file_storage?
+
+    File.open(path, "rb") if path
+  end
+
   private
 
   def dynamic_segment
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index 50e876b1d193..f2c201145347 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -43,6 +43,7 @@
 - pipeline_creation:run_pipeline_schedule
 - pipeline_default:build_coverage
 - pipeline_default:build_trace_sections
+- pipeline_default:create_trace_artifact
 - pipeline_default:pipeline_metrics
 - pipeline_default:pipeline_notification
 - pipeline_default:update_head_pipeline_for_merge_request
diff --git a/app/workers/build_finished_worker.rb b/app/workers/build_finished_worker.rb
index 97d80305bec9..b5ed8d607b35 100644
--- a/app/workers/build_finished_worker.rb
+++ b/app/workers/build_finished_worker.rb
@@ -6,9 +6,13 @@ class BuildFinishedWorker
 
   def perform(build_id)
     Ci::Build.find_by(id: build_id).try do |build|
-      BuildTraceSectionsWorker.perform_async(build.id)
+      # We execute that in sync as this access the files in order to access local file, and reduce IO
+      BuildTraceSectionsWorker.new.perform(build.id)
       BuildCoverageWorker.new.perform(build.id)
-      BuildHooksWorker.new.perform(build.id)
+
+      # We execute that async as this are two indepentent operations that can be executed after TraceSections and Coverage
+      BuildHooksWorker.perform_async(build.id)
+      CreateTraceArtifactWorker.perform_async(build.id)
     end
   end
 end
diff --git a/app/workers/create_trace_artifact_worker.rb b/app/workers/create_trace_artifact_worker.rb
new file mode 100644
index 000000000000..11cda58021e9
--- /dev/null
+++ b/app/workers/create_trace_artifact_worker.rb
@@ -0,0 +1,10 @@
+class CreateTraceArtifactWorker
+  include ApplicationWorker
+  include PipelineQueue
+
+  def perform(job_id)
+    Ci::Build.preload(:project, :user).find_by(id: job_id).try do |job|
+      Ci::CreateTraceArtifactService.new(job.project, job.user).execute(job)
+    end
+  end
+end
diff --git a/changelogs/unreleased/feature-sm-artifacts-trace.yml b/changelogs/unreleased/feature-sm-artifacts-trace.yml
new file mode 100644
index 000000000000..7654ce58aeb3
--- /dev/null
+++ b/changelogs/unreleased/feature-sm-artifacts-trace.yml
@@ -0,0 +1,5 @@
+---
+title: Save traces as artifacts
+merge_request: 16702
+author:
+type: changed
diff --git a/doc/development/file_storage.md b/doc/development/file_storage.md
index 76354b928203..34a02bd2c3cd 100644
--- a/doc/development/file_storage.md
+++ b/doc/development/file_storage.md
@@ -16,7 +16,7 @@ There are many places where file uploading is used, according to contexts:
   - Project avatars
   - Issues/MR/Notes Markdown attachments
   - Issues/MR/Notes Legacy Markdown attachments
-  - CI Build Artifacts
+  - CI Artifacts (archive, metadata, trace)
   - LFS Objects
 
 
@@ -35,7 +35,7 @@ they are still not 100% standardized. You can see them below:
 | Project avatars                       | yes    | uploads/-/system/project/avatar/:id/:filename               | `AvatarUploader`       | Project    |
 | Issues/MR/Notes Markdown attachments        | yes    | uploads/:project_path_with_namespace/:random_hex/:filename  | `FileUploader`         | Project    |
 | Issues/MR/Notes Legacy Markdown attachments | no     | uploads/-/system/note/attachment/:id/:filename              | `AttachmentUploader`   | Note       |
-| CI Artifacts (CE)                     | yes    | shared/artifacts/:year_:month/:project_id/:id               | `ArtifactUploader`     | Ci::Build  |
+| CI Artifacts (CE)                     | yes    | shared/artifacts/:disk_hash[0..1]/:disk_hash[2..3]/:disk_hash/:year_:month_:date/:job_id/:job_artifact_id (:disk_hash is SHA256 digest of project_id) | `JobArtifactUploader`  | Ci::JobArtifact  |
 | LFS Objects  (CE)                     | yes    | shared/lfs-objects/:hex/:hex/:object_hash                   | `LfsObjectUploader`    | LfsObject  |
 
 CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader`
diff --git a/ee/app/controllers/ee/projects/jobs_controller.rb b/ee/app/controllers/ee/projects/jobs_controller.rb
new file mode 100644
index 000000000000..03e67d3b549f
--- /dev/null
+++ b/ee/app/controllers/ee/projects/jobs_controller.rb
@@ -0,0 +1,32 @@
+module EE
+  module Projects
+    module JobsController
+      extend ActiveSupport::Concern
+      include SendFileUpload
+
+      def raw
+        if trace_artifact_file
+          send_upload(trace_artifact_file,
+                      send_params: raw_send_params,
+                      redirect_params: raw_redirect_params)
+        else
+          super
+        end
+      end
+
+      private
+
+      def raw_send_params
+        { type: 'text/plain; charset=utf-8', disposition: 'inline' }
+      end
+
+      def raw_redirect_params
+        { query: { 'response-content-type' => 'text/plain; charset=utf-8', 'response-content-disposition' => 'inline' } }
+      end
+
+      def trace_artifact_file
+        @trace_artifact_file ||= build.job_artifacts_trace&.file
+      end
+    end
+  end
+end
diff --git a/ee/app/models/ee/ci/job_artifact.rb b/ee/app/models/ee/ci/job_artifact.rb
index 02c6715f4471..7dd5925bfc9f 100644
--- a/ee/app/models/ee/ci/job_artifact.rb
+++ b/ee/app/models/ee/ci/job_artifact.rb
@@ -9,11 +9,11 @@ module Ci::JobArtifact
     prepended do
       after_destroy :log_geo_event
 
-      scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::Store::LOCAL]) }
+      scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
     end
 
     def local_store?
-      [nil, JobArtifactUploader::Store::LOCAL].include?(self.file_store)
+      [nil, ::JobArtifactUploader::Store::LOCAL].include?(self.file_store)
     end
 
     private
diff --git a/ee/app/uploaders/ee/job_artifact_uploader.rb b/ee/app/uploaders/ee/job_artifact_uploader.rb
new file mode 100644
index 000000000000..e54419fe683e
--- /dev/null
+++ b/ee/app/uploaders/ee/job_artifact_uploader.rb
@@ -0,0 +1,13 @@
+module EE
+  module JobArtifactUploader
+    extend ActiveSupport::Concern
+
+    def open
+      if file_storage?
+        super
+      else
+        ::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
+      end
+    end
+  end
+end
diff --git a/ee/lib/gitlab/ci/trace/http_io.rb b/ee/lib/gitlab/ci/trace/http_io.rb
new file mode 100644
index 000000000000..5256f7999c1e
--- /dev/null
+++ b/ee/lib/gitlab/ci/trace/http_io.rb
@@ -0,0 +1,183 @@
+##
+# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
+# source: https://gitlab.com/snippets/1685610
+module Gitlab
+  module Ci
+    class Trace
+      class HttpIO
+        BUFFER_SIZE = 128.kilobytes
+
+        InvalidURLError = Class.new(StandardError)
+        FailedToGetChunkError = Class.new(StandardError)
+
+        attr_reader :uri, :size
+        attr_reader :tell
+        attr_reader :chunk, :chunk_range
+
+        alias_method :pos, :tell
+
+        def initialize(url, size)
+          raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url)
+
+          @uri = URI(url)
+          @size = size
+          @tell = 0
+        end
+
+        def close
+          # no-op
+        end
+
+        def binmode
+          # no-op
+        end
+
+        def binmode?
+          true
+        end
+
+        def path
+          @uri.to_s
+        end
+
+        def seek(pos, where = IO::SEEK_SET)
+          new_pos =
+            case where
+            when IO::SEEK_END
+              size + pos
+            when IO::SEEK_SET
+              pos
+            when IO::SEEK_CUR
+              tell + pos
+            else
+              -1
+            end
+
+          raise 'new position is outside of file' if new_pos < 0 || new_pos > size
+
+          @tell = new_pos
+        end
+
+        def eof?
+          tell == size
+        end
+
+        def each_line
+          until eof?
+            line = readline
+            break if line.nil?
+
+            yield(line)
+          end
+        end
+
+        def read(length = nil)
+          out = ""
+
+          until eof? || (length && out.length >= length)
+            data = get_chunk
+            break if data.empty?
+
+            out << data
+            @tell += data.bytesize
+          end
+
+          out = out[0, length] if length && out.length > length
+
+          out
+        end
+
+        def readline
+          out = ""
+
+          until eof?
+            data = get_chunk
+            new_line = data.index("\n")
+
+            if !new_line.nil?
+              out << data[0..new_line]
+              @tell += new_line + 1
+              break
+            else
+              out << data
+              @tell += data.bytesize
+            end
+          end
+
+          out
+        end
+
+        def write(data)
+          raise NotImplementedError
+        end
+
+        def truncate(offset)
+          raise NotImplementedError
+        end
+
+        def flush
+          raise NotImplementedError
+        end
+
+        def present?
+          true
+        end
+
+        private
+
+        ##
+        # The below methods are not implemented in IO class
+        #
+        def in_range?
+          @chunk_range&.include?(tell)
+        end
+
+        def get_chunk
+          unless in_range?
+            response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|
+              http.request(request)
+            end
+
+            raise FailedToGetChunkError unless response.code == '200' || response.code == '206'
+
+            @chunk = response.body.force_encoding(Encoding::BINARY)
+            @chunk_range = response.content_range
+
+            ##
+            # Note: If provider does not return content_range, then we set it as we requested
+            # Provider: minio
+            # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+            # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+            # Provider: AWS
+            # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+            # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+            # Provider: GCS
+            # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+            # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200
+            @chunk_range ||= (chunk_start...(chunk_start + @chunk.length))
+          end
+
+          @chunk[chunk_offset..BUFFER_SIZE]
+        end
+
+        def request
+          Net::HTTP::Get.new(uri).tap do |request|
+            request.set_range(chunk_start, BUFFER_SIZE)
+          end
+        end
+
+        def chunk_offset
+          tell % BUFFER_SIZE
+        end
+
+        def chunk_start
+          (tell / BUFFER_SIZE) * BUFFER_SIZE
+        end
+
+        def chunk_end
+          [chunk_start + BUFFER_SIZE, size].min
+        end
+      end
+    end
+  end
+end
diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb
index baf55b1fa070..f2e5124c8a81 100644
--- a/lib/gitlab/ci/trace.rb
+++ b/lib/gitlab/ci/trace.rb
@@ -52,12 +52,14 @@ def append(data, offset)
       end
 
       def exist?
-        current_path.present? || old_trace.present?
+        trace_artifact&.exists? || current_path.present? || old_trace.present?
       end
 
       def read
         stream = Gitlab::Ci::Trace::Stream.new do
-          if current_path
+          if trace_artifact
+            trace_artifact.open
+          elsif current_path
             File.open(current_path, "rb")
           elsif old_trace
             StringIO.new(old_trace)
@@ -82,6 +84,8 @@ def write
       end
 
       def erase!
+        trace_artifact&.destroy
+
         paths.each do |trace_path|
           FileUtils.rm(trace_path, force: true)
         end
@@ -137,6 +141,10 @@ def deprecated_path
           "#{job.id}.log"
         ) if job.project&.ci_id
       end
+
+      def trace_artifact
+        job.job_artifacts_trace
+      end
     end
   end
 end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index e6a4e7c8257e..01f69e811034 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -159,8 +159,19 @@ def get_show(**extra_params)
       get_trace
     end
 
+    context 'when job has a trace artifact' do
+      let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
+
+      it 'returns a trace' do
+        expect(response).to have_gitlab_http_status(:ok)
+        expect(json_response['id']).to eq job.id
+        expect(json_response['status']).to eq job.status
+        expect(json_response['html']).to eq(job.trace.html)
+      end
+    end
+
     context 'when job has a trace' do
-      let(:job) { create(:ci_build, :trace, pipeline: pipeline) }
+      let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
 
       it 'returns a trace' do
         expect(response).to have_gitlab_http_status(:ok)
@@ -182,7 +193,7 @@ def get_show(**extra_params)
     end
 
     context 'when job has a trace with ANSI sequence and Unicode' do
-      let(:job) { create(:ci_build, :unicode_trace, pipeline: pipeline) }
+      let(:job) { create(:ci_build, :unicode_trace_live, pipeline: pipeline) }
 
       it 'returns a trace with Unicode' do
         expect(response).to have_gitlab_http_status(:ok)
@@ -381,7 +392,7 @@ def post_cancel_all
     end
 
     context 'when job is erasable' do
-      let(:job) { create(:ci_build, :erasable, :trace, pipeline: pipeline) }
+      let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline) }
 
       it 'redirects to the erased job page' do
         expect(response).to have_gitlab_http_status(:found)
@@ -408,7 +419,7 @@ def post_cancel_all
 
     context 'when user is developer' do
       let(:role) { :developer }
-      let(:job) { create(:ci_build, :erasable, :trace, pipeline: pipeline, user: triggered_by) }
+      let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline, user: triggered_by) }
 
       context 'when triggered by same user' do
         let(:triggered_by) { user }
@@ -439,8 +450,18 @@ def post_erase
       get_raw
     end
 
+    context 'when job has a trace artifact' do
+      let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
+
+      it 'returns a trace' do
+        expect(response).to have_gitlab_http_status(:ok)
+        expect(response.content_type).to eq 'text/plain; charset=utf-8'
+        expect(response.body).to eq job.job_artifacts_trace.open.read
+      end
+    end
+
     context 'when job has a trace file' do
-      let(:job) { create(:ci_build, :trace, pipeline: pipeline) }
+      let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
 
       it 'send a trace file' do
         expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/ee/spec/controllers/ee/projects/jobs_controller_spec.rb b/spec/ee/spec/controllers/ee/projects/jobs_controller_spec.rb
new file mode 100644
index 000000000000..2bd3a64e3e88
--- /dev/null
+++ b/spec/ee/spec/controllers/ee/projects/jobs_controller_spec.rb
@@ -0,0 +1,73 @@
+require 'spec_helper'
+
+describe Projects::JobsController do
+  include ApiHelpers
+  include HttpIOHelpers
+
+  let(:project) { create(:project, :public) }
+  let(:pipeline) { create(:ci_pipeline, project: project) }
+
+  describe 'GET trace.json' do
+    context 'when trace artifact is in ObjectStorage' do
+      let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
+
+      before do
+        allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+        allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
+        allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
+      end
+
+      context 'when there are no network issues' do
+        before do
+          stub_remote_trace_206
+
+          get_trace
+        end
+
+        it 'returns a trace' do
+          expect(response).to have_gitlab_http_status(:ok)
+          expect(json_response['id']).to eq job.id
+          expect(json_response['status']).to eq job.status
+          expect(json_response['html']).to eq(job.trace.html)
+        end
+      end
+
+      context 'when there is a network issue' do
+        before do
+          stub_remote_trace_500
+        end
+
+        it 'returns a trace' do
+          expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
+        end
+      end
+    end
+
+    def get_trace
+      get :trace, namespace_id: project.namespace,
+                  project_id: project,
+                  id: job.id,
+                  format: :json
+    end
+  end
+
+  describe 'GET raw' do
+    subject do
+      post :raw, namespace_id: project.namespace,
+                 project_id: project,
+                 id: job.id
+    end
+
+    context 'when the trace artifact is in ObjectStorage' do
+      let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
+
+      before do
+        allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+      end
+
+      it 'redirect to the trace file url' do
+        expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
+      end
+    end
+  end
+end
diff --git a/spec/ee/spec/lib/gitlab/ci/trace/http_io_spec.rb b/spec/ee/spec/lib/gitlab/ci/trace/http_io_spec.rb
new file mode 100644
index 000000000000..b839ef7ce36d
--- /dev/null
+++ b/spec/ee/spec/lib/gitlab/ci/trace/http_io_spec.rb
@@ -0,0 +1,329 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Trace::HttpIO do
+  include HttpIOHelpers
+
+  let(:http_io) { described_class.new(url, size) }
+  let(:url) { remote_trace_url }
+  let(:size) { remote_trace_size }
+
+  describe 'Interchangeability between IO and HttpIO' do
+    EXCEPT_METHODS = %i[read_nonblock raw raw! cooked cooked! getch echo= echo?
+                        winsize winsize= iflush oflush ioflush beep goto cursor cursor= pressed?
+                        getpass write_nonblock stat pathconf wait_readable wait_writable getbyte <<
+                        wait lines bytes chars codepoints getc readpartial set_encoding printf print
+                        putc puts readlines gets each each_byte each_char each_codepoint to_io reopen
+                        syswrite to_i fileno sysread fdatasync fsync sync= sync lineno= lineno readchar
+                        ungetbyte readbyte ungetc nonblock= nread rewind pos= eof close_on_exec?
+                        close_on_exec= closed? close_read close_write isatty tty? binmode? sysseek
+                        advise ioctl fcntl pid external_encoding internal_encoding autoclose? autoclose=
+                        posix_fileno nonblock? ready? noecho nonblock].freeze
+
+    it 'HttpIO covers core interfaces in IO' do
+      expected_interfaces = ::IO.instance_methods(false)
+      expected_interfaces -= EXCEPT_METHODS
+
+      expect(expected_interfaces - described_class.instance_methods).to be_empty
+    end
+  end
+
+  describe '#close' do
+    subject { http_io.close }
+
+    it { is_expected.to be_nil }
+  end
+
+  describe '#binmode' do
+    subject { http_io.binmode }
+
+    it { is_expected.to be_nil }
+  end
+
+  describe '#binmode?' do
+    subject { http_io.binmode? }
+
+    it { is_expected.to be_truthy }
+  end
+
+  describe '#path' do
+    subject { http_io.path }
+
+    it { is_expected.to eq(url) }
+  end
+
+  describe '#seek' do
+    subject { http_io.seek(pos, where) }
+
+    context 'when moves pos to end of the file' do
+      let(:pos) { 0 }
+      let(:where) { IO::SEEK_END }
+
+      it { is_expected.to eq(size) }
+    end
+
+    context 'when moves pos to middle of the file' do
+      let(:pos) { size / 2 }
+      let(:where) { IO::SEEK_SET }
+
+      it { is_expected.to eq(size / 2) }
+    end
+
+    context 'when moves pos around' do
+      it 'matches the result' do
+        expect(http_io.seek(0)).to eq(0)
+        expect(http_io.seek(100, IO::SEEK_CUR)).to eq(100)
+        expect { http_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file')
+      end
+    end
+  end
+
+  describe '#eof?' do
+    subject { http_io.eof? }
+
+    context 'when current pos is at end of the file' do
+      before do
+        http_io.seek(size, IO::SEEK_SET)
+      end
+
+      it { is_expected.to be_truthy }
+    end
+
+    context 'when current pos is not at end of the file' do
+      before do
+        http_io.seek(0, IO::SEEK_SET)
+      end
+
+      it { is_expected.to be_falsey }
+    end
+  end
+
+  describe '#each_line' do
+    subject { http_io.each_line }
+
+    let(:string_io) { StringIO.new(remote_trace_body) }
+
+    before do
+      stub_remote_trace_206
+    end
+
+    it 'yields lines' do
+      expect { |b| http_io.each_line(&b) }.to yield_successive_args(*string_io.each_line.to_a)
+    end
+
+    context 'when buckets on GCS' do
+      context 'when BUFFER_SIZE is larger than file size' do
+        before do
+          stub_remote_trace_200
+          set_larger_buffer_size_than(size)
+        end
+
+        it 'calls get_chunk only once' do
+          expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original
+
+          http_io.each_line { |line| }
+        end
+      end
+    end
+  end
+
+  describe '#read' do
+    subject { http_io.read(length) }
+
+    context 'when there are no network issue' do
+      before do
+        stub_remote_trace_206
+      end
+
+      context 'when read whole size' do
+        let(:length) { nil }
+
+        context 'when BUFFER_SIZE is smaller than file size' do
+          before do
+            set_smaller_buffer_size_than(size)
+          end
+
+          it 'reads a trace' do
+            is_expected.to eq(remote_trace_body)
+          end
+        end
+
+        context 'when BUFFER_SIZE is larger than file size' do
+          before do
+            set_larger_buffer_size_than(size)
+          end
+
+          it 'reads a trace' do
+            is_expected.to eq(remote_trace_body)
+          end
+        end
+      end
+
+      context 'when read only first 100 bytes' do
+        let(:length) { 100 }
+
+        context 'when BUFFER_SIZE is smaller than file size' do
+          before do
+            set_smaller_buffer_size_than(size)
+          end
+
+          it 'reads a trace' do
+            is_expected.to eq(remote_trace_body[0, length])
+          end
+        end
+
+        context 'when BUFFER_SIZE is larger than file size' do
+          before do
+            set_larger_buffer_size_than(size)
+          end
+
+          it 'reads a trace' do
+            is_expected.to eq(remote_trace_body[0, length])
+          end
+        end
+      end
+
+      context 'when tries to read oversize' do
+        let(:length) { size + 1000 }
+
+        context 'when BUFFER_SIZE is smaller than file size' do
+          before do
+            set_smaller_buffer_size_than(size)
+          end
+
+          it 'reads a trace' do
+            is_expected.to eq(remote_trace_body)
+          end
+        end
+
+        context 'when BUFFER_SIZE is larger than file size' do
+          before do
+            set_larger_buffer_size_than(size)
+          end
+
+          it 'reads a trace' do
+            is_expected.to eq(remote_trace_body)
+          end
+        end
+      end
+
+      context 'when tries to read 0 bytes' do
+        let(:length) { 0 }
+
+        context 'when BUFFER_SIZE is smaller than file size' do
+          before do
+            set_smaller_buffer_size_than(size)
+          end
+
+          it 'reads a trace' do
+            is_expected.to be_empty
+          end
+        end
+
+        context 'when BUFFER_SIZE is larger than file size' do
+          before do
+            set_larger_buffer_size_than(size)
+          end
+
+          it 'reads a trace' do
+            is_expected.to be_empty
+          end
+        end
+      end
+    end
+
+    context 'when there is anetwork issue' do
+      let(:length) { nil }
+
+      before do
+        stub_remote_trace_500
+      end
+
+      it 'reads a trace' do
+        expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
+      end
+    end
+  end
+
+  describe '#readline' do
+    subject { http_io.readline }
+
+    let(:string_io) { StringIO.new(remote_trace_body) }
+
+    before do
+      stub_remote_trace_206
+    end
+
+    shared_examples 'all line matching' do
+      it 'reads a line' do
+        (0...remote_trace_body.lines.count).each do
+          expect(http_io.readline).to eq(string_io.readline)
+        end
+      end
+    end
+
+    context 'when there is anetwork issue' do
+      let(:length) { nil }
+
+      before do
+        stub_remote_trace_500
+      end
+
+      it 'reads a trace' do
+        expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
+      end
+    end
+
+    context 'when BUFFER_SIZE is smaller than file size' do
+      before do
+        set_smaller_buffer_size_than(size)
+      end
+
+      it_behaves_like 'all line matching'
+    end
+
+    context 'when BUFFER_SIZE is larger than file size' do
+      before do
+        set_larger_buffer_size_than(size)
+      end
+
+      it_behaves_like 'all line matching'
+    end
+
+    context 'when pos is at middle of the file' do
+      before do
+        set_smaller_buffer_size_than(size)
+
+        http_io.seek(size / 2)
+        string_io.seek(size / 2)
+      end
+
+      it 'reads from pos' do
+        expect(http_io.readline).to eq(string_io.readline)
+      end
+    end
+  end
+
+  describe '#write' do
+    subject { http_io.write(nil) }
+
+    it { expect { subject }.to raise_error(NotImplementedError) }
+  end
+
+  describe '#truncate' do
+    subject { http_io.truncate(nil) }
+
+    it { expect { subject }.to raise_error(NotImplementedError) }
+  end
+
+  describe '#flush' do
+    subject { http_io.flush }
+
+    it { expect { subject }.to raise_error(NotImplementedError) }
+  end
+
+  describe '#present?' do
+    subject { http_io.present? }
+
+    it { is_expected.to be_truthy }
+  end
+end
diff --git a/spec/ee/spec/uploaders/ee/job_artifact_uploader_spec.rb b/spec/ee/spec/uploaders/ee/job_artifact_uploader_spec.rb
new file mode 100644
index 000000000000..043c597c9e05
--- /dev/null
+++ b/spec/ee/spec/uploaders/ee/job_artifact_uploader_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe JobArtifactUploader do
+  let(:store) { ObjectStorage::Store::LOCAL }
+  let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
+  let(:uploader) { described_class.new(job_artifact, :file) }
+
+  describe '#open' do
+    subject { uploader.open }
+
+    context 'when trace is stored in Object storage' do
+      before do
+        allow(uploader).to receive(:file_storage?) { false }
+        allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
+      end
+
+      it 'returns http io stream' do
+        is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
+      end
+    end
+  end
+end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 6f66468570f3..6ba599cdf83d 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -135,13 +135,19 @@
       coverage_regex '/(d+)/'
     end
 
-    trait :trace do
+    trait :trace_live do
       after(:create) do |build, evaluator|
         build.trace.set('BUILD TRACE')
       end
     end
 
-    trait :unicode_trace do
+    trait :trace_artifact do
+      after(:create) do |build, evaluator|
+        create(:ci_job_artifact, :trace, job: build)
+      end
+    end
+
+    trait :unicode_trace_live do
       after(:create) do |build, evaluator|
         trace = File.binread(
           File.expand_path(
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 9bb456e89ff1..7ada3b904d37 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -30,5 +30,14 @@
           Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
       end
     end
+
+    trait :trace do
+      file_type :trace
+
+      after(:build) do |artifact, evaluator|
+        artifact.file = fixture_file_upload(
+          Rails.root.join('spec/fixtures/trace/sample_trace'), 'text/plain')
+      end
+    end
   end
 end
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 9a6b27c00f86..6bbbafaabfd6 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -7,7 +7,7 @@
   let(:project) { create(:project, :repository) }
   let(:pipeline) { create(:ci_pipeline, project: project) }
 
-  let(:job) { create(:ci_build, :trace, pipeline: pipeline) }
+  let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
   let(:job2) { create(:ci_build) }
 
   let(:artifacts_file) do
@@ -468,18 +468,34 @@
   describe 'GET /:project/jobs/:id/raw', :js do
     context 'access source' do
       context 'job from project' do
-        before do
-          job.run!
-        end
+        context 'when job is running' do
+          before do
+            job.run!
+          end
 
-        it 'sends the right headers' do
-          requests = inspect_requests(inject_headers: { 'X-Sendfile-Type' => 'X-Sendfile' }) do
-            visit raw_project_job_path(project, job)
+          it 'sends the right headers' do
+            requests = inspect_requests(inject_headers: { 'X-Sendfile-Type' => 'X-Sendfile' }) do
+              visit raw_project_job_path(project, job)
+            end
+
+            expect(requests.first.status_code).to eq(200)
+            expect(requests.first.response_headers['Content-Type']).to eq('text/plain; charset=utf-8')
+            expect(requests.first.response_headers['X-Sendfile']).to eq(job.trace.send(:current_path))
           end
+        end
 
-          expect(requests.first.status_code).to eq(200)
-          expect(requests.first.response_headers['Content-Type']).to eq('text/plain; charset=utf-8')
-          expect(requests.first.response_headers['X-Sendfile']).to eq(job.trace.send(:current_path))
+        context 'when job is complete' do
+          let(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
+
+          it 'sends the right headers' do
+            requests = inspect_requests(inject_headers: { 'X-Sendfile-Type' => 'X-Sendfile' }) do
+              visit raw_project_job_path(project, job)
+            end
+
+            expect(requests.first.status_code).to eq(200)
+            expect(requests.first.response_headers['Content-Type']).to eq('text/plain; charset=utf-8')
+            expect(requests.first.response_headers['X-Sendfile']).to eq(job.job_artifacts_trace.file.path)
+          end
         end
       end
 
diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace
new file mode 100644
index 000000000000..55fcb9d27568
--- /dev/null
+++ b/spec/fixtures/trace/sample_trace
@@ -0,0 +1,1185 @@
+Running with gitlab-runner 10.4.0 (857480b6)
+  on docker-auto-scale-com (9a6801bd)
+Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ...
+Starting service postgres:9.2 ...
+Pulling docker image postgres:9.2 ...
+Using docker image postgres:9.2 ID=sha256:18cdbca56093c841d28e629eb8acd4224afe0aa4c57c839351fc181888b8a470 for postgres service...
+Starting service redis:alpine ...
+Pulling docker image redis:alpine ...
+Using docker image redis:alpine ID=sha256:cb1ec54b370d4a91dff57d00f91fd880dc710160a58440adaa133e0f84ae999d for redis service...
+Waiting for services to be up and running...
+Using docker image sha256:3006a02a5a6f0a116358a13bbc46ee46fb2471175efd5b7f9b1c22345ec2a8e9 for predefined container...
+Pulling docker image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ...
+Using docker image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ID=sha256:1f59be408f12738509ffe4177d65e9de6391f32461de83d9d45f58517b30af99 for build container...
+section_start:1517486886:prepare_script
+Running on runner-9a6801bd-project-13083-concurrent-0 via runner-9a6801bd-gsrm-1517484168-a8449153...
+section_end:1517486887:prepare_script
+section_start:1517486887:get_sources
+Fetching changes for 42624-gitaly-bundle-isolation-not-working-in-ci with git depth set to 20...
+Removing .gitlab_shell_secret
+Removing .gitlab_workhorse_secret
+Removing .yarn-cache/
+Removing config/database.yml
+Removing config/gitlab.yml
+Removing config/redis.cache.yml
+Removing config/redis.queues.yml
+Removing config/redis.shared_state.yml
+Removing config/resque.yml
+Removing config/secrets.yml
+Removing coverage/
+Removing knapsack/
+Removing log/api_json.log
+Removing log/application.log
+Removing log/gitaly-test.log
+Removing log/githost.log
+Removing log/grpc.log
+Removing log/test_json.log
+Removing node_modules/
+Removing public/assets/
+Removing rspec_flaky/
+Removing shared/tmp/
+Removing tmp/tests/
+Removing vendor/ruby/
+HEAD is now at 4cea24f Converted todos.js to axios
+From https://gitlab.com/gitlab-org/gitlab-ce
+ * [new branch]      42624-gitaly-bundle-isolation-not-working-in-ci -> origin/42624-gitaly-bundle-isolation-not-working-in-ci
+Checking out f42a5e24 as 42624-gitaly-bundle-isolation-not-working-in-ci...
+Skipping Git submodules setup
+section_end:1517486896:get_sources
+section_start:1517486896:restore_cache
+Checking cache for ruby-2.3.6-with-yarn...
+Downloading cache.zip from http://runners-cache-5-internal.gitlab.com:444/runner/project/13083/ruby-2.3.6-with-yarn 
+Successfully extracted cache
+section_end:1517486919:restore_cache
+section_start:1517486919:download_artifacts
+Downloading artifacts for retrieve-tests-metadata (50551658)...
+Downloading artifacts from coordinator... ok        id=50551658 responseStatus=200 OK token=HhF7y_1X
+Downloading artifacts for compile-assets (50551659)...
+Downloading artifacts from coordinator... ok        id=50551659 responseStatus=200 OK token=wTz6JrCP
+Downloading artifacts for setup-test-env (50551660)...
+Downloading artifacts from coordinator... ok        id=50551660 responseStatus=200 OK token=DTGgeVF5
+WARNING: tmp/tests/gitlab-shell/.gitlab_shell_secret: chmod tmp/tests/gitlab-shell/.gitlab_shell_secret: no such file or directory (suppressing repeats) 
+section_end:1517486934:download_artifacts
+section_start:1517486934:build_script
+$ bundle --version
+Bundler version 1.16.1
+$ source scripts/utils.sh
+$ source scripts/prepare_build.sh
+The Gemfile's dependencies are satisfied
+Successfully installed knapsack-1.15.0
+1 gem installed
+NOTICE:  database "gitlabhq_test" does not exist, skipping
+DROP DATABASE
+CREATE DATABASE
+CREATE ROLE
+GRANT
+-- enable_extension("plpgsql")
+   -> 0.0156s
+-- enable_extension("pg_trgm")
+   -> 0.0156s
+-- create_table("abuse_reports", {:force=>:cascade})
+   -> 0.0119s
+-- create_table("appearances", {:force=>:cascade})
+   -> 0.0065s
+-- create_table("application_settings", {:force=>:cascade})
+   -> 0.0382s
+-- create_table("audit_events", {:force=>:cascade})
+   -> 0.0056s
+-- add_index("audit_events", ["entity_id", "entity_type"], {:name=>"index_audit_events_on_entity_id_and_entity_type", :using=>:btree})
+   -> 0.0040s
+-- create_table("award_emoji", {:force=>:cascade})
+   -> 0.0058s
+-- add_index("award_emoji", ["awardable_type", "awardable_id"], {:name=>"index_award_emoji_on_awardable_type_and_awardable_id", :using=>:btree})
+   -> 0.0068s
+-- add_index("award_emoji", ["user_id", "name"], {:name=>"index_award_emoji_on_user_id_and_name", :using=>:btree})
+   -> 0.0043s
+-- create_table("boards", {:force=>:cascade})
+   -> 0.0049s
+-- add_index("boards", ["project_id"], {:name=>"index_boards_on_project_id", :using=>:btree})
+   -> 0.0056s
+-- create_table("broadcast_messages", {:force=>:cascade})
+   -> 0.0056s
+-- add_index("broadcast_messages", ["starts_at", "ends_at", "id"], {:name=>"index_broadcast_messages_on_starts_at_and_ends_at_and_id", :using=>:btree})
+   -> 0.0041s
+-- create_table("chat_names", {:force=>:cascade})
+   -> 0.0056s
+-- add_index("chat_names", ["service_id", "team_id", "chat_id"], {:name=>"index_chat_names_on_service_id_and_team_id_and_chat_id", :unique=>true, :using=>:btree})
+   -> 0.0039s
+-- add_index("chat_names", ["user_id", "service_id"], {:name=>"index_chat_names_on_user_id_and_service_id", :unique=>true, :using=>:btree})
+   -> 0.0036s
+-- create_table("chat_teams", {:force=>:cascade})
+   -> 0.0068s
+-- add_index("chat_teams", ["namespace_id"], {:name=>"index_chat_teams_on_namespace_id", :unique=>true, :using=>:btree})
+   -> 0.0098s
+-- create_table("ci_build_trace_section_names", {:force=>:cascade})
+   -> 0.0048s
+-- add_index("ci_build_trace_section_names", ["project_id", "name"], {:name=>"index_ci_build_trace_section_names_on_project_id_and_name", :unique=>true, :using=>:btree})
+   -> 0.0035s
+-- create_table("ci_build_trace_sections", {:force=>:cascade})
+   -> 0.0040s
+-- add_index("ci_build_trace_sections", ["build_id", "section_name_id"], {:name=>"index_ci_build_trace_sections_on_build_id_and_section_name_id", :unique=>true, :using=>:btree})
+   -> 0.0035s
+-- add_index("ci_build_trace_sections", ["project_id"], {:name=>"index_ci_build_trace_sections_on_project_id", :using=>:btree})
+   -> 0.0033s
+-- create_table("ci_builds", {:force=>:cascade})
+   -> 0.0062s
+-- add_index("ci_builds", ["auto_canceled_by_id"], {:name=>"index_ci_builds_on_auto_canceled_by_id", :using=>:btree})
+   -> 0.0035s
+-- add_index("ci_builds", ["commit_id", "stage_idx", "created_at"], {:name=>"index_ci_builds_on_commit_id_and_stage_idx_and_created_at", :using=>:btree})
+   -> 0.0032s
+-- add_index("ci_builds", ["commit_id", "status", "type"], {:name=>"index_ci_builds_on_commit_id_and_status_and_type", :using=>:btree})
+   -> 0.0032s
+-- add_index("ci_builds", ["commit_id", "type", "name", "ref"], {:name=>"index_ci_builds_on_commit_id_and_type_and_name_and_ref", :using=>:btree})
+   -> 0.0035s
+-- add_index("ci_builds", ["commit_id", "type", "ref"], {:name=>"index_ci_builds_on_commit_id_and_type_and_ref", :using=>:btree})
+   -> 0.0042s
+-- add_index("ci_builds", ["project_id", "id"], {:name=>"index_ci_builds_on_project_id_and_id", :using=>:btree})
+   -> 0.0031s
+-- add_index("ci_builds", ["protected"], {:name=>"index_ci_builds_on_protected", :using=>:btree})
+   -> 0.0031s
+-- add_index("ci_builds", ["runner_id"], {:name=>"index_ci_builds_on_runner_id", :using=>:btree})
+   -> 0.0033s
+-- add_index("ci_builds", ["stage_id"], {:name=>"index_ci_builds_on_stage_id", :using=>:btree})
+   -> 0.0035s
+-- add_index("ci_builds", ["status", "type", "runner_id"], {:name=>"index_ci_builds_on_status_and_type_and_runner_id", :using=>:btree})
+   -> 0.0031s
+-- add_index("ci_builds", ["status"], {:name=>"index_ci_builds_on_status", :using=>:btree})
+   -> 0.0032s
+-- add_index("ci_builds", ["token"], {:name=>"index_ci_builds_on_token", :unique=>true, :using=>:btree})
+   -> 0.0028s
+-- add_index("ci_builds", ["updated_at"], {:name=>"index_ci_builds_on_updated_at", :using=>:btree})
+   -> 0.0047s
+-- add_index("ci_builds", ["user_id"], {:name=>"index_ci_builds_on_user_id", :using=>:btree})
+   -> 0.0029s
+-- create_table("ci_group_variables", {:force=>:cascade})
+   -> 0.0055s
+-- add_index("ci_group_variables", ["group_id", "key"], {:name=>"index_ci_group_variables_on_group_id_and_key", :unique=>true, :using=>:btree})
+   -> 0.0028s
+-- create_table("ci_job_artifacts", {:force=>:cascade})
+   -> 0.0048s
+-- add_index("ci_job_artifacts", ["job_id", "file_type"], {:name=>"index_ci_job_artifacts_on_job_id_and_file_type", :unique=>true, :using=>:btree})
+   -> 0.0027s
+-- add_index("ci_job_artifacts", ["project_id"], {:name=>"index_ci_job_artifacts_on_project_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("ci_pipeline_schedule_variables", {:force=>:cascade})
+   -> 0.0044s
+-- add_index("ci_pipeline_schedule_variables", ["pipeline_schedule_id", "key"], {:name=>"index_ci_pipeline_schedule_variables_on_schedule_id_and_key", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- create_table("ci_pipeline_schedules", {:force=>:cascade})
+   -> 0.0047s
+-- add_index("ci_pipeline_schedules", ["next_run_at", "active"], {:name=>"index_ci_pipeline_schedules_on_next_run_at_and_active", :using=>:btree})
+   -> 0.0029s
+-- add_index("ci_pipeline_schedules", ["project_id"], {:name=>"index_ci_pipeline_schedules_on_project_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("ci_pipeline_variables", {:force=>:cascade})
+   -> 0.0045s
+-- add_index("ci_pipeline_variables", ["pipeline_id", "key"], {:name=>"index_ci_pipeline_variables_on_pipeline_id_and_key", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- create_table("ci_pipelines", {:force=>:cascade})
+   -> 0.0057s
+-- add_index("ci_pipelines", ["auto_canceled_by_id"], {:name=>"index_ci_pipelines_on_auto_canceled_by_id", :using=>:btree})
+   -> 0.0030s
+-- add_index("ci_pipelines", ["pipeline_schedule_id"], {:name=>"index_ci_pipelines_on_pipeline_schedule_id", :using=>:btree})
+   -> 0.0031s
+-- add_index("ci_pipelines", ["project_id", "ref", "status", "id"], {:name=>"index_ci_pipelines_on_project_id_and_ref_and_status_and_id", :using=>:btree})
+   -> 0.0032s
+-- add_index("ci_pipelines", ["project_id", "sha"], {:name=>"index_ci_pipelines_on_project_id_and_sha", :using=>:btree})
+   -> 0.0032s
+-- add_index("ci_pipelines", ["project_id"], {:name=>"index_ci_pipelines_on_project_id", :using=>:btree})
+   -> 0.0035s
+-- add_index("ci_pipelines", ["status"], {:name=>"index_ci_pipelines_on_status", :using=>:btree})
+   -> 0.0032s
+-- add_index("ci_pipelines", ["user_id"], {:name=>"index_ci_pipelines_on_user_id", :using=>:btree})
+   -> 0.0029s
+-- create_table("ci_runner_projects", {:force=>:cascade})
+   -> 0.0035s
+-- add_index("ci_runner_projects", ["project_id"], {:name=>"index_ci_runner_projects_on_project_id", :using=>:btree})
+   -> 0.0029s
+-- add_index("ci_runner_projects", ["runner_id"], {:name=>"index_ci_runner_projects_on_runner_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("ci_runners", {:force=>:cascade})
+   -> 0.0059s
+-- add_index("ci_runners", ["contacted_at"], {:name=>"index_ci_runners_on_contacted_at", :using=>:btree})
+   -> 0.0030s
+-- add_index("ci_runners", ["is_shared"], {:name=>"index_ci_runners_on_is_shared", :using=>:btree})
+   -> 0.0030s
+-- add_index("ci_runners", ["locked"], {:name=>"index_ci_runners_on_locked", :using=>:btree})
+   -> 0.0030s
+-- add_index("ci_runners", ["token"], {:name=>"index_ci_runners_on_token", :using=>:btree})
+   -> 0.0029s
+-- create_table("ci_stages", {:force=>:cascade})
+   -> 0.0046s
+-- add_index("ci_stages", ["pipeline_id", "name"], {:name=>"index_ci_stages_on_pipeline_id_and_name", :using=>:btree})
+   -> 0.0031s
+-- add_index("ci_stages", ["pipeline_id"], {:name=>"index_ci_stages_on_pipeline_id", :using=>:btree})
+   -> 0.0030s
+-- add_index("ci_stages", ["project_id"], {:name=>"index_ci_stages_on_project_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("ci_trigger_requests", {:force=>:cascade})
+   -> 0.0058s
+-- add_index("ci_trigger_requests", ["commit_id"], {:name=>"index_ci_trigger_requests_on_commit_id", :using=>:btree})
+   -> 0.0031s
+-- create_table("ci_triggers", {:force=>:cascade})
+   -> 0.0043s
+-- add_index("ci_triggers", ["project_id"], {:name=>"index_ci_triggers_on_project_id", :using=>:btree})
+   -> 0.0033s
+-- create_table("ci_variables", {:force=>:cascade})
+   -> 0.0059s
+-- add_index("ci_variables", ["project_id", "key", "environment_scope"], {:name=>"index_ci_variables_on_project_id_and_key_and_environment_scope", :unique=>true, :using=>:btree})
+   -> 0.0031s
+-- create_table("cluster_platforms_kubernetes", {:force=>:cascade})
+   -> 0.0053s
+-- add_index("cluster_platforms_kubernetes", ["cluster_id"], {:name=>"index_cluster_platforms_kubernetes_on_cluster_id", :unique=>true, :using=>:btree})
+   -> 0.0028s
+-- create_table("cluster_projects", {:force=>:cascade})
+   -> 0.0032s
+-- add_index("cluster_projects", ["cluster_id"], {:name=>"index_cluster_projects_on_cluster_id", :using=>:btree})
+   -> 0.0035s
+-- add_index("cluster_projects", ["project_id"], {:name=>"index_cluster_projects_on_project_id", :using=>:btree})
+   -> 0.0030s
+-- create_table("cluster_providers_gcp", {:force=>:cascade})
+   -> 0.0051s
+-- add_index("cluster_providers_gcp", ["cluster_id"], {:name=>"index_cluster_providers_gcp_on_cluster_id", :unique=>true, :using=>:btree})
+   -> 0.0034s
+-- create_table("clusters", {:force=>:cascade})
+   -> 0.0052s
+-- add_index("clusters", ["enabled"], {:name=>"index_clusters_on_enabled", :using=>:btree})
+   -> 0.0031s
+-- add_index("clusters", ["user_id"], {:name=>"index_clusters_on_user_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("clusters_applications_helm", {:force=>:cascade})
+   -> 0.0045s
+-- create_table("clusters_applications_ingress", {:force=>:cascade})
+   -> 0.0044s
+-- create_table("clusters_applications_prometheus", {:force=>:cascade})
+   -> 0.0047s
+-- create_table("container_repositories", {:force=>:cascade})
+   -> 0.0050s
+-- add_index("container_repositories", ["project_id", "name"], {:name=>"index_container_repositories_on_project_id_and_name", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- add_index("container_repositories", ["project_id"], {:name=>"index_container_repositories_on_project_id", :using=>:btree})
+   -> 0.0032s
+-- create_table("conversational_development_index_metrics", {:force=>:cascade})
+   -> 0.0076s
+-- create_table("deploy_keys_projects", {:force=>:cascade})
+   -> 0.0037s
+-- add_index("deploy_keys_projects", ["project_id"], {:name=>"index_deploy_keys_projects_on_project_id", :using=>:btree})
+   -> 0.0032s
+-- create_table("deployments", {:force=>:cascade})
+   -> 0.0049s
+-- add_index("deployments", ["created_at"], {:name=>"index_deployments_on_created_at", :using=>:btree})
+   -> 0.0034s
+-- add_index("deployments", ["environment_id", "id"], {:name=>"index_deployments_on_environment_id_and_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("deployments", ["environment_id", "iid", "project_id"], {:name=>"index_deployments_on_environment_id_and_iid_and_project_id", :using=>:btree})
+   -> 0.0029s
+-- add_index("deployments", ["project_id", "iid"], {:name=>"index_deployments_on_project_id_and_iid", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- create_table("emails", {:force=>:cascade})
+   -> 0.0046s
+-- add_index("emails", ["confirmation_token"], {:name=>"index_emails_on_confirmation_token", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- add_index("emails", ["email"], {:name=>"index_emails_on_email", :unique=>true, :using=>:btree})
+   -> 0.0035s
+-- add_index("emails", ["user_id"], {:name=>"index_emails_on_user_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("environments", {:force=>:cascade})
+   -> 0.0052s
+-- add_index("environments", ["project_id", "name"], {:name=>"index_environments_on_project_id_and_name", :unique=>true, :using=>:btree})
+   -> 0.0031s
+-- add_index("environments", ["project_id", "slug"], {:name=>"index_environments_on_project_id_and_slug", :unique=>true, :using=>:btree})
+   -> 0.0028s
+-- create_table("events", {:force=>:cascade})
+   -> 0.0046s
+-- add_index("events", ["action"], {:name=>"index_events_on_action", :using=>:btree})
+   -> 0.0032s
+-- add_index("events", ["author_id"], {:name=>"index_events_on_author_id", :using=>:btree})
+   -> 0.0027s
+-- add_index("events", ["project_id", "id"], {:name=>"index_events_on_project_id_and_id", :using=>:btree})
+   -> 0.0027s
+-- add_index("events", ["target_type", "target_id"], {:name=>"index_events_on_target_type_and_target_id", :using=>:btree})
+   -> 0.0027s
+-- create_table("feature_gates", {:force=>:cascade})
+   -> 0.0046s
+-- add_index("feature_gates", ["feature_key", "key", "value"], {:name=>"index_feature_gates_on_feature_key_and_key_and_value", :unique=>true, :using=>:btree})
+   -> 0.0031s
+-- create_table("features", {:force=>:cascade})
+   -> 0.0041s
+-- add_index("features", ["key"], {:name=>"index_features_on_key", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- create_table("fork_network_members", {:force=>:cascade})
+   -> 0.0033s
+-- add_index("fork_network_members", ["fork_network_id"], {:name=>"index_fork_network_members_on_fork_network_id", :using=>:btree})
+   -> 0.0033s
+-- add_index("fork_network_members", ["project_id"], {:name=>"index_fork_network_members_on_project_id", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- create_table("fork_networks", {:force=>:cascade})
+   -> 0.0049s
+-- add_index("fork_networks", ["root_project_id"], {:name=>"index_fork_networks_on_root_project_id", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- create_table("forked_project_links", {:force=>:cascade})
+   -> 0.0032s
+-- add_index("forked_project_links", ["forked_to_project_id"], {:name=>"index_forked_project_links_on_forked_to_project_id", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- create_table("gcp_clusters", {:force=>:cascade})
+   -> 0.0074s
+-- add_index("gcp_clusters", ["project_id"], {:name=>"index_gcp_clusters_on_project_id", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- create_table("gpg_key_subkeys", {:force=>:cascade})
+   -> 0.0042s
+-- add_index("gpg_key_subkeys", ["fingerprint"], {:name=>"index_gpg_key_subkeys_on_fingerprint", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- add_index("gpg_key_subkeys", ["gpg_key_id"], {:name=>"index_gpg_key_subkeys_on_gpg_key_id", :using=>:btree})
+   -> 0.0032s
+-- add_index("gpg_key_subkeys", ["keyid"], {:name=>"index_gpg_key_subkeys_on_keyid", :unique=>true, :using=>:btree})
+   -> 0.0027s
+-- create_table("gpg_keys", {:force=>:cascade})
+   -> 0.0042s
+-- add_index("gpg_keys", ["fingerprint"], {:name=>"index_gpg_keys_on_fingerprint", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- add_index("gpg_keys", ["primary_keyid"], {:name=>"index_gpg_keys_on_primary_keyid", :unique=>true, :using=>:btree})
+   -> 0.0026s
+-- add_index("gpg_keys", ["user_id"], {:name=>"index_gpg_keys_on_user_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("gpg_signatures", {:force=>:cascade})
+   -> 0.0054s
+-- add_index("gpg_signatures", ["commit_sha"], {:name=>"index_gpg_signatures_on_commit_sha", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- add_index("gpg_signatures", ["gpg_key_id"], {:name=>"index_gpg_signatures_on_gpg_key_id", :using=>:btree})
+   -> 0.0026s
+-- add_index("gpg_signatures", ["gpg_key_primary_keyid"], {:name=>"index_gpg_signatures_on_gpg_key_primary_keyid", :using=>:btree})
+   -> 0.0029s
+-- add_index("gpg_signatures", ["gpg_key_subkey_id"], {:name=>"index_gpg_signatures_on_gpg_key_subkey_id", :using=>:btree})
+   -> 0.0032s
+-- add_index("gpg_signatures", ["project_id"], {:name=>"index_gpg_signatures_on_project_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("group_custom_attributes", {:force=>:cascade})
+   -> 0.0044s
+-- add_index("group_custom_attributes", ["group_id", "key"], {:name=>"index_group_custom_attributes_on_group_id_and_key", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- add_index("group_custom_attributes", ["key", "value"], {:name=>"index_group_custom_attributes_on_key_and_value", :using=>:btree})
+   -> 0.0028s
+-- create_table("identities", {:force=>:cascade})
+   -> 0.0043s
+-- add_index("identities", ["user_id"], {:name=>"index_identities_on_user_id", :using=>:btree})
+   -> 0.0034s
+-- create_table("issue_assignees", {:id=>false, :force=>:cascade})
+   -> 0.0013s
+-- add_index("issue_assignees", ["issue_id", "user_id"], {:name=>"index_issue_assignees_on_issue_id_and_user_id", :unique=>true, :using=>:btree})
+   -> 0.0028s
+-- add_index("issue_assignees", ["user_id"], {:name=>"index_issue_assignees_on_user_id", :using=>:btree})
+   -> 0.0029s
+-- create_table("issue_metrics", {:force=>:cascade})
+   -> 0.0032s
+-- add_index("issue_metrics", ["issue_id"], {:name=>"index_issue_metrics", :using=>:btree})
+   -> 0.0029s
+-- create_table("issues", {:force=>:cascade})
+   -> 0.0051s
+-- add_index("issues", ["author_id"], {:name=>"index_issues_on_author_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("issues", ["confidential"], {:name=>"index_issues_on_confidential", :using=>:btree})
+   -> 0.0029s
+-- add_index("issues", ["description"], {:name=>"index_issues_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}})
+   -> 0.0022s
+-- add_index("issues", ["milestone_id"], {:name=>"index_issues_on_milestone_id", :using=>:btree})
+   -> 0.0027s
+-- add_index("issues", ["moved_to_id"], {:name=>"index_issues_on_moved_to_id", :where=>"(moved_to_id IS NOT NULL)", :using=>:btree})
+   -> 0.0030s
+-- add_index("issues", ["project_id", "created_at", "id", "state"], {:name=>"index_issues_on_project_id_and_created_at_and_id_and_state", :using=>:btree})
+   -> 0.0039s
+-- add_index("issues", ["project_id", "due_date", "id", "state"], {:name=>"idx_issues_on_project_id_and_due_date_and_id_and_state_partial", :where=>"(due_date IS NOT NULL)", :using=>:btree})
+   -> 0.0031s
+-- add_index("issues", ["project_id", "iid"], {:name=>"index_issues_on_project_id_and_iid", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- add_index("issues", ["project_id", "updated_at", "id", "state"], {:name=>"index_issues_on_project_id_and_updated_at_and_id_and_state", :using=>:btree})
+   -> 0.0035s
+-- add_index("issues", ["relative_position"], {:name=>"index_issues_on_relative_position", :using=>:btree})
+   -> 0.0030s
+-- add_index("issues", ["state"], {:name=>"index_issues_on_state", :using=>:btree})
+   -> 0.0027s
+-- add_index("issues", ["title"], {:name=>"index_issues_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}})
+   -> 0.0021s
+-- add_index("issues", ["updated_at"], {:name=>"index_issues_on_updated_at", :using=>:btree})
+   -> 0.0030s
+-- add_index("issues", ["updated_by_id"], {:name=>"index_issues_on_updated_by_id", :where=>"(updated_by_id IS NOT NULL)", :using=>:btree})
+   -> 0.0028s
+-- create_table("keys", {:force=>:cascade})
+   -> 0.0048s
+-- add_index("keys", ["fingerprint"], {:name=>"index_keys_on_fingerprint", :unique=>true, :using=>:btree})
+   -> 0.0028s
+-- add_index("keys", ["user_id"], {:name=>"index_keys_on_user_id", :using=>:btree})
+   -> 0.0029s
+-- create_table("label_links", {:force=>:cascade})
+   -> 0.0041s
+-- add_index("label_links", ["label_id"], {:name=>"index_label_links_on_label_id", :using=>:btree})
+   -> 0.0027s
+-- add_index("label_links", ["target_id", "target_type"], {:name=>"index_label_links_on_target_id_and_target_type", :using=>:btree})
+   -> 0.0028s
+-- create_table("label_priorities", {:force=>:cascade})
+   -> 0.0031s
+-- add_index("label_priorities", ["priority"], {:name=>"index_label_priorities_on_priority", :using=>:btree})
+   -> 0.0028s
+-- add_index("label_priorities", ["project_id", "label_id"], {:name=>"index_label_priorities_on_project_id_and_label_id", :unique=>true, :using=>:btree})
+   -> 0.0027s
+-- create_table("labels", {:force=>:cascade})
+   -> 0.0046s
+-- add_index("labels", ["group_id", "project_id", "title"], {:name=>"index_labels_on_group_id_and_project_id_and_title", :unique=>true, :using=>:btree})
+   -> 0.0028s
+-- add_index("labels", ["project_id"], {:name=>"index_labels_on_project_id", :using=>:btree})
+   -> 0.0032s
+-- add_index("labels", ["template"], {:name=>"index_labels_on_template", :where=>"template", :using=>:btree})
+   -> 0.0027s
+-- add_index("labels", ["title"], {:name=>"index_labels_on_title", :using=>:btree})
+   -> 0.0030s
+-- add_index("labels", ["type", "project_id"], {:name=>"index_labels_on_type_and_project_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("lfs_objects", {:force=>:cascade})
+   -> 0.0040s
+-- add_index("lfs_objects", ["oid"], {:name=>"index_lfs_objects_on_oid", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- create_table("lfs_objects_projects", {:force=>:cascade})
+   -> 0.0035s
+-- add_index("lfs_objects_projects", ["project_id"], {:name=>"index_lfs_objects_projects_on_project_id", :using=>:btree})
+   -> 0.0025s
+-- create_table("lists", {:force=>:cascade})
+   -> 0.0033s
+-- add_index("lists", ["board_id", "label_id"], {:name=>"index_lists_on_board_id_and_label_id", :unique=>true, :using=>:btree})
+   -> 0.0026s
+-- add_index("lists", ["label_id"], {:name=>"index_lists_on_label_id", :using=>:btree})
+   -> 0.0026s
+-- create_table("members", {:force=>:cascade})
+   -> 0.0046s
+-- add_index("members", ["access_level"], {:name=>"index_members_on_access_level", :using=>:btree})
+   -> 0.0028s
+-- add_index("members", ["invite_token"], {:name=>"index_members_on_invite_token", :unique=>true, :using=>:btree})
+   -> 0.0027s
+-- add_index("members", ["requested_at"], {:name=>"index_members_on_requested_at", :using=>:btree})
+   -> 0.0025s
+-- add_index("members", ["source_id", "source_type"], {:name=>"index_members_on_source_id_and_source_type", :using=>:btree})
+   -> 0.0027s
+-- add_index("members", ["user_id"], {:name=>"index_members_on_user_id", :using=>:btree})
+   -> 0.0026s
+-- create_table("merge_request_diff_commits", {:id=>false, :force=>:cascade})
+   -> 0.0027s
+-- add_index("merge_request_diff_commits", ["merge_request_diff_id", "relative_order"], {:name=>"index_merge_request_diff_commits_on_mr_diff_id_and_order", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- add_index("merge_request_diff_commits", ["sha"], {:name=>"index_merge_request_diff_commits_on_sha", :using=>:btree})
+   -> 0.0029s
+-- create_table("merge_request_diff_files", {:id=>false, :force=>:cascade})
+   -> 0.0027s
+-- add_index("merge_request_diff_files", ["merge_request_diff_id", "relative_order"], {:name=>"index_merge_request_diff_files_on_mr_diff_id_and_order", :unique=>true, :using=>:btree})
+   -> 0.0027s
+-- create_table("merge_request_diffs", {:force=>:cascade})
+   -> 0.0042s
+-- add_index("merge_request_diffs", ["merge_request_id", "id"], {:name=>"index_merge_request_diffs_on_merge_request_id_and_id", :using=>:btree})
+   -> 0.0030s
+-- create_table("merge_request_metrics", {:force=>:cascade})
+   -> 0.0034s
+-- add_index("merge_request_metrics", ["first_deployed_to_production_at"], {:name=>"index_merge_request_metrics_on_first_deployed_to_production_at", :using=>:btree})
+   -> 0.0028s
+-- add_index("merge_request_metrics", ["merge_request_id"], {:name=>"index_merge_request_metrics", :using=>:btree})
+   -> 0.0025s
+-- add_index("merge_request_metrics", ["pipeline_id"], {:name=>"index_merge_request_metrics_on_pipeline_id", :using=>:btree})
+   -> 0.0026s
+-- create_table("merge_requests", {:force=>:cascade})
+   -> 0.0066s
+-- add_index("merge_requests", ["assignee_id"], {:name=>"index_merge_requests_on_assignee_id", :using=>:btree})
+   -> 0.0029s
+-- add_index("merge_requests", ["author_id"], {:name=>"index_merge_requests_on_author_id", :using=>:btree})
+   -> 0.0026s
+-- add_index("merge_requests", ["created_at"], {:name=>"index_merge_requests_on_created_at", :using=>:btree})
+   -> 0.0026s
+-- add_index("merge_requests", ["description"], {:name=>"index_merge_requests_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}})
+   -> 0.0020s
+-- add_index("merge_requests", ["head_pipeline_id"], {:name=>"index_merge_requests_on_head_pipeline_id", :using=>:btree})
+   -> 0.0027s
+-- add_index("merge_requests", ["latest_merge_request_diff_id"], {:name=>"index_merge_requests_on_latest_merge_request_diff_id", :using=>:btree})
+   -> 0.0025s
+-- add_index("merge_requests", ["merge_user_id"], {:name=>"index_merge_requests_on_merge_user_id", :where=>"(merge_user_id IS NOT NULL)", :using=>:btree})
+   -> 0.0029s
+-- add_index("merge_requests", ["milestone_id"], {:name=>"index_merge_requests_on_milestone_id", :using=>:btree})
+   -> 0.0030s
+-- add_index("merge_requests", ["source_branch"], {:name=>"index_merge_requests_on_source_branch", :using=>:btree})
+   -> 0.0026s
+-- add_index("merge_requests", ["source_project_id", "source_branch"], {:name=>"index_merge_requests_on_source_project_and_branch_state_opened", :where=>"((state)::text = 'opened'::text)", :using=>:btree})
+   -> 0.0029s
+-- add_index("merge_requests", ["source_project_id", "source_branch"], {:name=>"index_merge_requests_on_source_project_id_and_source_branch", :using=>:btree})
+   -> 0.0031s
+-- add_index("merge_requests", ["target_branch"], {:name=>"index_merge_requests_on_target_branch", :using=>:btree})
+   -> 0.0028s
+-- add_index("merge_requests", ["target_project_id", "iid"], {:name=>"index_merge_requests_on_target_project_id_and_iid", :unique=>true, :using=>:btree})
+   -> 0.0027s
+-- add_index("merge_requests", ["target_project_id", "merge_commit_sha", "id"], {:name=>"index_merge_requests_on_tp_id_and_merge_commit_sha_and_id", :using=>:btree})
+   -> 0.0029s
+-- add_index("merge_requests", ["title"], {:name=>"index_merge_requests_on_title", :using=>:btree})
+   -> 0.0026s
+-- add_index("merge_requests", ["title"], {:name=>"index_merge_requests_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}})
+   -> 0.0020s
+-- add_index("merge_requests", ["updated_by_id"], {:name=>"index_merge_requests_on_updated_by_id", :where=>"(updated_by_id IS NOT NULL)", :using=>:btree})
+   -> 0.0029s
+-- create_table("merge_requests_closing_issues", {:force=>:cascade})
+   -> 0.0031s
+-- add_index("merge_requests_closing_issues", ["issue_id"], {:name=>"index_merge_requests_closing_issues_on_issue_id", :using=>:btree})
+   -> 0.0026s
+-- add_index("merge_requests_closing_issues", ["merge_request_id"], {:name=>"index_merge_requests_closing_issues_on_merge_request_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("milestones", {:force=>:cascade})
+   -> 0.0044s
+-- add_index("milestones", ["description"], {:name=>"index_milestones_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}})
+   -> 0.0022s
+-- add_index("milestones", ["due_date"], {:name=>"index_milestones_on_due_date", :using=>:btree})
+   -> 0.0033s
+-- add_index("milestones", ["group_id"], {:name=>"index_milestones_on_group_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("milestones", ["project_id", "iid"], {:name=>"index_milestones_on_project_id_and_iid", :unique=>true, :using=>:btree})
+   -> 0.0028s
+-- add_index("milestones", ["title"], {:name=>"index_milestones_on_title", :using=>:btree})
+   -> 0.0026s
+-- add_index("milestones", ["title"], {:name=>"index_milestones_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}})
+   -> 0.0021s
+-- create_table("namespaces", {:force=>:cascade})
+   -> 0.0068s
+-- add_index("namespaces", ["created_at"], {:name=>"index_namespaces_on_created_at", :using=>:btree})
+   -> 0.0030s
+-- add_index("namespaces", ["name", "parent_id"], {:name=>"index_namespaces_on_name_and_parent_id", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- add_index("namespaces", ["name"], {:name=>"index_namespaces_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}})
+   -> 0.0020s
+-- add_index("namespaces", ["owner_id"], {:name=>"index_namespaces_on_owner_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("namespaces", ["parent_id", "id"], {:name=>"index_namespaces_on_parent_id_and_id", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- add_index("namespaces", ["path"], {:name=>"index_namespaces_on_path", :using=>:btree})
+   -> 0.0031s
+-- add_index("namespaces", ["path"], {:name=>"index_namespaces_on_path_trigram", :using=>:gin, :opclasses=>{"path"=>"gin_trgm_ops"}})
+   -> 0.0019s
+-- add_index("namespaces", ["require_two_factor_authentication"], {:name=>"index_namespaces_on_require_two_factor_authentication", :using=>:btree})
+   -> 0.0029s
+-- add_index("namespaces", ["type"], {:name=>"index_namespaces_on_type", :using=>:btree})
+   -> 0.0032s
+-- create_table("notes", {:force=>:cascade})
+   -> 0.0055s
+-- add_index("notes", ["author_id"], {:name=>"index_notes_on_author_id", :using=>:btree})
+   -> 0.0029s
+-- add_index("notes", ["commit_id"], {:name=>"index_notes_on_commit_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("notes", ["created_at"], {:name=>"index_notes_on_created_at", :using=>:btree})
+   -> 0.0029s
+-- add_index("notes", ["discussion_id"], {:name=>"index_notes_on_discussion_id", :using=>:btree})
+   -> 0.0029s
+-- add_index("notes", ["line_code"], {:name=>"index_notes_on_line_code", :using=>:btree})
+   -> 0.0029s
+-- add_index("notes", ["note"], {:name=>"index_notes_on_note_trigram", :using=>:gin, :opclasses=>{"note"=>"gin_trgm_ops"}})
+   -> 0.0024s
+-- add_index("notes", ["noteable_id", "noteable_type"], {:name=>"index_notes_on_noteable_id_and_noteable_type", :using=>:btree})
+   -> 0.0029s
+-- add_index("notes", ["noteable_type"], {:name=>"index_notes_on_noteable_type", :using=>:btree})
+   -> 0.0030s
+-- add_index("notes", ["project_id", "noteable_type"], {:name=>"index_notes_on_project_id_and_noteable_type", :using=>:btree})
+   -> 0.0027s
+-- add_index("notes", ["updated_at"], {:name=>"index_notes_on_updated_at", :using=>:btree})
+   -> 0.0026s
+-- create_table("notification_settings", {:force=>:cascade})
+   -> 0.0053s
+-- add_index("notification_settings", ["source_id", "source_type"], {:name=>"index_notification_settings_on_source_id_and_source_type", :using=>:btree})
+   -> 0.0028s
+-- add_index("notification_settings", ["user_id", "source_id", "source_type"], {:name=>"index_notifications_on_user_id_and_source_id_and_source_type", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- add_index("notification_settings", ["user_id"], {:name=>"index_notification_settings_on_user_id", :using=>:btree})
+   -> 0.0031s
+-- create_table("oauth_access_grants", {:force=>:cascade})
+   -> 0.0042s
+-- add_index("oauth_access_grants", ["token"], {:name=>"index_oauth_access_grants_on_token", :unique=>true, :using=>:btree})
+   -> 0.0031s
+-- create_table("oauth_access_tokens", {:force=>:cascade})
+   -> 0.0051s
+-- add_index("oauth_access_tokens", ["refresh_token"], {:name=>"index_oauth_access_tokens_on_refresh_token", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- add_index("oauth_access_tokens", ["resource_owner_id"], {:name=>"index_oauth_access_tokens_on_resource_owner_id", :using=>:btree})
+   -> 0.0025s
+-- add_index("oauth_access_tokens", ["token"], {:name=>"index_oauth_access_tokens_on_token", :unique=>true, :using=>:btree})
+   -> 0.0026s
+-- create_table("oauth_applications", {:force=>:cascade})
+   -> 0.0049s
+-- add_index("oauth_applications", ["owner_id", "owner_type"], {:name=>"index_oauth_applications_on_owner_id_and_owner_type", :using=>:btree})
+   -> 0.0030s
+-- add_index("oauth_applications", ["uid"], {:name=>"index_oauth_applications_on_uid", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- create_table("oauth_openid_requests", {:force=>:cascade})
+   -> 0.0048s
+-- create_table("pages_domains", {:force=>:cascade})
+   -> 0.0052s
+-- add_index("pages_domains", ["domain"], {:name=>"index_pages_domains_on_domain", :unique=>true, :using=>:btree})
+   -> 0.0027s
+-- add_index("pages_domains", ["project_id"], {:name=>"index_pages_domains_on_project_id", :using=>:btree})
+   -> 0.0030s
+-- create_table("personal_access_tokens", {:force=>:cascade})
+   -> 0.0056s
+-- add_index("personal_access_tokens", ["token"], {:name=>"index_personal_access_tokens_on_token", :unique=>true, :using=>:btree})
+   -> 0.0032s
+-- add_index("personal_access_tokens", ["user_id"], {:name=>"index_personal_access_tokens_on_user_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("project_authorizations", {:id=>false, :force=>:cascade})
+   -> 0.0018s
+-- add_index("project_authorizations", ["project_id"], {:name=>"index_project_authorizations_on_project_id", :using=>:btree})
+   -> 0.0033s
+-- add_index("project_authorizations", ["user_id", "project_id", "access_level"], {:name=>"index_project_authorizations_on_user_id_project_id_access_level", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- create_table("project_auto_devops", {:force=>:cascade})
+   -> 0.0043s
+-- add_index("project_auto_devops", ["project_id"], {:name=>"index_project_auto_devops_on_project_id", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- create_table("project_custom_attributes", {:force=>:cascade})
+   -> 0.0047s
+-- add_index("project_custom_attributes", ["key", "value"], {:name=>"index_project_custom_attributes_on_key_and_value", :using=>:btree})
+   -> 0.0030s
+-- add_index("project_custom_attributes", ["project_id", "key"], {:name=>"index_project_custom_attributes_on_project_id_and_key", :unique=>true, :using=>:btree})
+   -> 0.0028s
+-- create_table("project_features", {:force=>:cascade})
+   -> 0.0038s
+-- add_index("project_features", ["project_id"], {:name=>"index_project_features_on_project_id", :using=>:btree})
+   -> 0.0029s
+-- create_table("project_group_links", {:force=>:cascade})
+   -> 0.0036s
+-- add_index("project_group_links", ["group_id"], {:name=>"index_project_group_links_on_group_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("project_group_links", ["project_id"], {:name=>"index_project_group_links_on_project_id", :using=>:btree})
+   -> 0.0030s
+-- create_table("project_import_data", {:force=>:cascade})
+   -> 0.0049s
+-- add_index("project_import_data", ["project_id"], {:name=>"index_project_import_data_on_project_id", :using=>:btree})
+   -> 0.0027s
+-- create_table("project_statistics", {:force=>:cascade})
+   -> 0.0046s
+-- add_index("project_statistics", ["namespace_id"], {:name=>"index_project_statistics_on_namespace_id", :using=>:btree})
+   -> 0.0027s
+-- add_index("project_statistics", ["project_id"], {:name=>"index_project_statistics_on_project_id", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- create_table("projects", {:force=>:cascade})
+   -> 0.0090s
+-- add_index("projects", ["ci_id"], {:name=>"index_projects_on_ci_id", :using=>:btree})
+   -> 0.0033s
+-- add_index("projects", ["created_at"], {:name=>"index_projects_on_created_at", :using=>:btree})
+   -> 0.0030s
+-- add_index("projects", ["creator_id"], {:name=>"index_projects_on_creator_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("projects", ["description"], {:name=>"index_projects_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}})
+   -> 0.0022s
+-- add_index("projects", ["last_activity_at"], {:name=>"index_projects_on_last_activity_at", :using=>:btree})
+   -> 0.0032s
+-- add_index("projects", ["last_repository_check_failed"], {:name=>"index_projects_on_last_repository_check_failed", :using=>:btree})
+   -> 0.0030s
+-- add_index("projects", ["last_repository_updated_at"], {:name=>"index_projects_on_last_repository_updated_at", :using=>:btree})
+   -> 0.0031s
+-- add_index("projects", ["name"], {:name=>"index_projects_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}})
+   -> 0.0022s
+-- add_index("projects", ["namespace_id"], {:name=>"index_projects_on_namespace_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("projects", ["path"], {:name=>"index_projects_on_path", :using=>:btree})
+   -> 0.0028s
+-- add_index("projects", ["path"], {:name=>"index_projects_on_path_trigram", :using=>:gin, :opclasses=>{"path"=>"gin_trgm_ops"}})
+   -> 0.0023s
+-- add_index("projects", ["pending_delete"], {:name=>"index_projects_on_pending_delete", :using=>:btree})
+   -> 0.0029s
+-- add_index("projects", ["repository_storage"], {:name=>"index_projects_on_repository_storage", :using=>:btree})
+   -> 0.0026s
+-- add_index("projects", ["runners_token"], {:name=>"index_projects_on_runners_token", :using=>:btree})
+   -> 0.0034s
+-- add_index("projects", ["star_count"], {:name=>"index_projects_on_star_count", :using=>:btree})
+   -> 0.0028s
+-- add_index("projects", ["visibility_level"], {:name=>"index_projects_on_visibility_level", :using=>:btree})
+   -> 0.0027s
+-- create_table("protected_branch_merge_access_levels", {:force=>:cascade})
+   -> 0.0042s
+-- add_index("protected_branch_merge_access_levels", ["protected_branch_id"], {:name=>"index_protected_branch_merge_access", :using=>:btree})
+   -> 0.0029s
+-- create_table("protected_branch_push_access_levels", {:force=>:cascade})
+   -> 0.0037s
+-- add_index("protected_branch_push_access_levels", ["protected_branch_id"], {:name=>"index_protected_branch_push_access", :using=>:btree})
+   -> 0.0030s
+-- create_table("protected_branches", {:force=>:cascade})
+   -> 0.0048s
+-- add_index("protected_branches", ["project_id"], {:name=>"index_protected_branches_on_project_id", :using=>:btree})
+   -> 0.0030s
+-- create_table("protected_tag_create_access_levels", {:force=>:cascade})
+   -> 0.0037s
+-- add_index("protected_tag_create_access_levels", ["protected_tag_id"], {:name=>"index_protected_tag_create_access", :using=>:btree})
+   -> 0.0029s
+-- add_index("protected_tag_create_access_levels", ["user_id"], {:name=>"index_protected_tag_create_access_levels_on_user_id", :using=>:btree})
+   -> 0.0029s
+-- create_table("protected_tags", {:force=>:cascade})
+   -> 0.0051s
+-- add_index("protected_tags", ["project_id"], {:name=>"index_protected_tags_on_project_id", :using=>:btree})
+   -> 0.0034s
+-- create_table("push_event_payloads", {:id=>false, :force=>:cascade})
+   -> 0.0030s
+-- add_index("push_event_payloads", ["event_id"], {:name=>"index_push_event_payloads_on_event_id", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- create_table("redirect_routes", {:force=>:cascade})
+   -> 0.0049s
+-- add_index("redirect_routes", ["path"], {:name=>"index_redirect_routes_on_path", :unique=>true, :using=>:btree})
+   -> 0.0031s
+-- add_index("redirect_routes", ["source_type", "source_id"], {:name=>"index_redirect_routes_on_source_type_and_source_id", :using=>:btree})
+   -> 0.0034s
+-- create_table("releases", {:force=>:cascade})
+   -> 0.0043s
+-- add_index("releases", ["project_id", "tag"], {:name=>"index_releases_on_project_id_and_tag", :using=>:btree})
+   -> 0.0032s
+-- add_index("releases", ["project_id"], {:name=>"index_releases_on_project_id", :using=>:btree})
+   -> 0.0030s
+-- create_table("routes", {:force=>:cascade})
+   -> 0.0055s
+-- add_index("routes", ["path"], {:name=>"index_routes_on_path", :unique=>true, :using=>:btree})
+   -> 0.0028s
+-- add_index("routes", ["path"], {:name=>"index_routes_on_path_text_pattern_ops", :using=>:btree, :opclasses=>{"path"=>"varchar_pattern_ops"}})
+   -> 0.0026s
+-- add_index("routes", ["source_type", "source_id"], {:name=>"index_routes_on_source_type_and_source_id", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- create_table("sent_notifications", {:force=>:cascade})
+   -> 0.0048s
+-- add_index("sent_notifications", ["reply_key"], {:name=>"index_sent_notifications_on_reply_key", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- create_table("services", {:force=>:cascade})
+   -> 0.0091s
+-- add_index("services", ["project_id"], {:name=>"index_services_on_project_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("services", ["template"], {:name=>"index_services_on_template", :using=>:btree})
+   -> 0.0031s
+-- create_table("snippets", {:force=>:cascade})
+   -> 0.0050s
+-- add_index("snippets", ["author_id"], {:name=>"index_snippets_on_author_id", :using=>:btree})
+   -> 0.0030s
+-- add_index("snippets", ["file_name"], {:name=>"index_snippets_on_file_name_trigram", :using=>:gin, :opclasses=>{"file_name"=>"gin_trgm_ops"}})
+   -> 0.0020s
+-- add_index("snippets", ["project_id"], {:name=>"index_snippets_on_project_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("snippets", ["title"], {:name=>"index_snippets_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}})
+   -> 0.0020s
+-- add_index("snippets", ["updated_at"], {:name=>"index_snippets_on_updated_at", :using=>:btree})
+   -> 0.0026s
+-- add_index("snippets", ["visibility_level"], {:name=>"index_snippets_on_visibility_level", :using=>:btree})
+   -> 0.0026s
+-- create_table("spam_logs", {:force=>:cascade})
+   -> 0.0048s
+-- create_table("subscriptions", {:force=>:cascade})
+   -> 0.0041s
+-- add_index("subscriptions", ["subscribable_id", "subscribable_type", "user_id", "project_id"], {:name=>"index_subscriptions_on_subscribable_and_user_id_and_project_id", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- create_table("system_note_metadata", {:force=>:cascade})
+   -> 0.0040s
+-- add_index("system_note_metadata", ["note_id"], {:name=>"index_system_note_metadata_on_note_id", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- create_table("taggings", {:force=>:cascade})
+   -> 0.0047s
+-- add_index("taggings", ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], {:name=>"taggings_idx", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- add_index("taggings", ["taggable_id", "taggable_type", "context"], {:name=>"index_taggings_on_taggable_id_and_taggable_type_and_context", :using=>:btree})
+   -> 0.0025s
+-- create_table("tags", {:force=>:cascade})
+   -> 0.0044s
+-- add_index("tags", ["name"], {:name=>"index_tags_on_name", :unique=>true, :using=>:btree})
+   -> 0.0026s
+-- create_table("timelogs", {:force=>:cascade})
+   -> 0.0033s
+-- add_index("timelogs", ["issue_id"], {:name=>"index_timelogs_on_issue_id", :using=>:btree})
+   -> 0.0027s
+-- add_index("timelogs", ["merge_request_id"], {:name=>"index_timelogs_on_merge_request_id", :using=>:btree})
+   -> 0.0033s
+-- add_index("timelogs", ["user_id"], {:name=>"index_timelogs_on_user_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("todos", {:force=>:cascade})
+   -> 0.0043s
+-- add_index("todos", ["author_id"], {:name=>"index_todos_on_author_id", :using=>:btree})
+   -> 0.0027s
+-- add_index("todos", ["commit_id"], {:name=>"index_todos_on_commit_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("todos", ["note_id"], {:name=>"index_todos_on_note_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("todos", ["project_id"], {:name=>"index_todos_on_project_id", :using=>:btree})
+   -> 0.0027s
+-- add_index("todos", ["target_type", "target_id"], {:name=>"index_todos_on_target_type_and_target_id", :using=>:btree})
+   -> 0.0028s
+-- add_index("todos", ["user_id"], {:name=>"index_todos_on_user_id", :using=>:btree})
+   -> 0.0026s
+-- create_table("trending_projects", {:force=>:cascade})
+   -> 0.0030s
+-- add_index("trending_projects", ["project_id"], {:name=>"index_trending_projects_on_project_id", :using=>:btree})
+   -> 0.0027s
+-- create_table("u2f_registrations", {:force=>:cascade})
+   -> 0.0048s
+-- add_index("u2f_registrations", ["key_handle"], {:name=>"index_u2f_registrations_on_key_handle", :using=>:btree})
+   -> 0.0029s
+-- add_index("u2f_registrations", ["user_id"], {:name=>"index_u2f_registrations_on_user_id", :using=>:btree})
+   -> 0.0028s
+-- create_table("uploads", {:force=>:cascade})
+   -> 0.0044s
+-- add_index("uploads", ["checksum"], {:name=>"index_uploads_on_checksum", :using=>:btree})
+   -> 0.0028s
+-- add_index("uploads", ["model_id", "model_type"], {:name=>"index_uploads_on_model_id_and_model_type", :using=>:btree})
+   -> 0.0027s
+-- add_index("uploads", ["path"], {:name=>"index_uploads_on_path", :using=>:btree})
+   -> 0.0028s
+-- create_table("user_agent_details", {:force=>:cascade})
+   -> 0.0051s
+-- add_index("user_agent_details", ["subject_id", "subject_type"], {:name=>"index_user_agent_details_on_subject_id_and_subject_type", :using=>:btree})
+   -> 0.0028s
+-- create_table("user_custom_attributes", {:force=>:cascade})
+   -> 0.0044s
+-- add_index("user_custom_attributes", ["key", "value"], {:name=>"index_user_custom_attributes_on_key_and_value", :using=>:btree})
+   -> 0.0027s
+-- add_index("user_custom_attributes", ["user_id", "key"], {:name=>"index_user_custom_attributes_on_user_id_and_key", :unique=>true, :using=>:btree})
+   -> 0.0026s
+-- create_table("user_synced_attributes_metadata", {:force=>:cascade})
+   -> 0.0056s
+-- add_index("user_synced_attributes_metadata", ["user_id"], {:name=>"index_user_synced_attributes_metadata_on_user_id", :unique=>true, :using=>:btree})
+   -> 0.0027s
+-- create_table("users", {:force=>:cascade})
+   -> 0.0134s
+-- add_index("users", ["admin"], {:name=>"index_users_on_admin", :using=>:btree})
+   -> 0.0030s
+-- add_index("users", ["confirmation_token"], {:name=>"index_users_on_confirmation_token", :unique=>true, :using=>:btree})
+   -> 0.0029s
+-- add_index("users", ["created_at"], {:name=>"index_users_on_created_at", :using=>:btree})
+   -> 0.0034s
+-- add_index("users", ["email"], {:name=>"index_users_on_email", :unique=>true, :using=>:btree})
+   -> 0.0030s
+-- add_index("users", ["email"], {:name=>"index_users_on_email_trigram", :using=>:gin, :opclasses=>{"email"=>"gin_trgm_ops"}})
+   -> 0.0431s
+-- add_index("users", ["ghost"], {:name=>"index_users_on_ghost", :using=>:btree})
+   -> 0.0051s
+-- add_index("users", ["incoming_email_token"], {:name=>"index_users_on_incoming_email_token", :using=>:btree})
+   -> 0.0044s
+-- add_index("users", ["name"], {:name=>"index_users_on_name", :using=>:btree})
+   -> 0.0044s
+-- add_index("users", ["name"], {:name=>"index_users_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}})
+   -> 0.0034s
+-- add_index("users", ["reset_password_token"], {:name=>"index_users_on_reset_password_token", :unique=>true, :using=>:btree})
+   -> 0.0044s
+-- add_index("users", ["rss_token"], {:name=>"index_users_on_rss_token", :using=>:btree})
+   -> 0.0046s
+-- add_index("users", ["state"], {:name=>"index_users_on_state", :using=>:btree})
+   -> 0.0040s
+-- add_index("users", ["username"], {:name=>"index_users_on_username", :using=>:btree})
+   -> 0.0046s
+-- add_index("users", ["username"], {:name=>"index_users_on_username_trigram", :using=>:gin, :opclasses=>{"username"=>"gin_trgm_ops"}})
+   -> 0.0044s
+-- create_table("users_star_projects", {:force=>:cascade})
+   -> 0.0055s
+-- add_index("users_star_projects", ["project_id"], {:name=>"index_users_star_projects_on_project_id", :using=>:btree})
+   -> 0.0037s
+-- add_index("users_star_projects", ["user_id", "project_id"], {:name=>"index_users_star_projects_on_user_id_and_project_id", :unique=>true, :using=>:btree})
+   -> 0.0044s
+-- create_table("web_hook_logs", {:force=>:cascade})
+   -> 0.0060s
+-- add_index("web_hook_logs", ["web_hook_id"], {:name=>"index_web_hook_logs_on_web_hook_id", :using=>:btree})
+   -> 0.0034s
+-- create_table("web_hooks", {:force=>:cascade})
+   -> 0.0120s
+-- add_index("web_hooks", ["project_id"], {:name=>"index_web_hooks_on_project_id", :using=>:btree})
+   -> 0.0038s
+-- add_index("web_hooks", ["type"], {:name=>"index_web_hooks_on_type", :using=>:btree})
+   -> 0.0036s
+-- add_foreign_key("boards", "projects", {:name=>"fk_f15266b5f9", :on_delete=>:cascade})
+   -> 0.0030s
+-- add_foreign_key("chat_teams", "namespaces", {:on_delete=>:cascade})
+   -> 0.0021s
+-- add_foreign_key("ci_build_trace_section_names", "projects", {:on_delete=>:cascade})
+   -> 0.0022s
+-- add_foreign_key("ci_build_trace_sections", "ci_build_trace_section_names", {:column=>"section_name_id", :name=>"fk_264e112c66", :on_delete=>:cascade})
+   -> 0.0018s
+-- add_foreign_key("ci_build_trace_sections", "ci_builds", {:column=>"build_id", :name=>"fk_4ebe41f502", :on_delete=>:cascade})
+   -> 0.0024s
+-- add_foreign_key("ci_build_trace_sections", "projects", {:on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("ci_builds", "ci_pipelines", {:column=>"auto_canceled_by_id", :name=>"fk_a2141b1522", :on_delete=>:nullify})
+   -> 0.0023s
+-- add_foreign_key("ci_builds", "ci_stages", {:column=>"stage_id", :name=>"fk_3a9eaa254d", :on_delete=>:cascade})
+   -> 0.0020s
+-- add_foreign_key("ci_builds", "projects", {:name=>"fk_befce0568a", :on_delete=>:cascade})
+   -> 0.0024s
+-- add_foreign_key("ci_group_variables", "namespaces", {:column=>"group_id", :name=>"fk_33ae4d58d8", :on_delete=>:cascade})
+   -> 0.0024s
+-- add_foreign_key("ci_job_artifacts", "ci_builds", {:column=>"job_id", :on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("ci_job_artifacts", "projects", {:on_delete=>:cascade})
+   -> 0.0020s
+-- add_foreign_key("ci_pipeline_schedule_variables", "ci_pipeline_schedules", {:column=>"pipeline_schedule_id", :name=>"fk_41c35fda51", :on_delete=>:cascade})
+   -> 0.0027s
+-- add_foreign_key("ci_pipeline_schedules", "projects", {:name=>"fk_8ead60fcc4", :on_delete=>:cascade})
+   -> 0.0022s
+-- add_foreign_key("ci_pipeline_schedules", "users", {:column=>"owner_id", :name=>"fk_9ea99f58d2", :on_delete=>:nullify})
+   -> 0.0025s
+-- add_foreign_key("ci_pipeline_variables", "ci_pipelines", {:column=>"pipeline_id", :name=>"fk_f29c5f4380", :on_delete=>:cascade})
+   -> 0.0018s
+-- add_foreign_key("ci_pipelines", "ci_pipeline_schedules", {:column=>"pipeline_schedule_id", :name=>"fk_3d34ab2e06", :on_delete=>:nullify})
+   -> 0.0019s
+-- add_foreign_key("ci_pipelines", "ci_pipelines", {:column=>"auto_canceled_by_id", :name=>"fk_262d4c2d19", :on_delete=>:nullify})
+   -> 0.0029s
+-- add_foreign_key("ci_pipelines", "projects", {:name=>"fk_86635dbd80", :on_delete=>:cascade})
+   -> 0.0023s
+-- add_foreign_key("ci_runner_projects", "projects", {:name=>"fk_4478a6f1e4", :on_delete=>:cascade})
+   -> 0.0036s
+-- add_foreign_key("ci_stages", "ci_pipelines", {:column=>"pipeline_id", :name=>"fk_fb57e6cc56", :on_delete=>:cascade})
+   -> 0.0017s
+-- add_foreign_key("ci_stages", "projects", {:name=>"fk_2360681d1d", :on_delete=>:cascade})
+   -> 0.0020s
+-- add_foreign_key("ci_trigger_requests", "ci_triggers", {:column=>"trigger_id", :name=>"fk_b8ec8b7245", :on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("ci_triggers", "projects", {:name=>"fk_e3e63f966e", :on_delete=>:cascade})
+   -> 0.0021s
+-- add_foreign_key("ci_triggers", "users", {:column=>"owner_id", :name=>"fk_e8e10d1964", :on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("ci_variables", "projects", {:name=>"fk_ada5eb64b3", :on_delete=>:cascade})
+   -> 0.0021s
+-- add_foreign_key("cluster_platforms_kubernetes", "clusters", {:on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("cluster_projects", "clusters", {:on_delete=>:cascade})
+   -> 0.0018s
+-- add_foreign_key("cluster_projects", "projects", {:on_delete=>:cascade})
+   -> 0.0020s
+-- add_foreign_key("cluster_providers_gcp", "clusters", {:on_delete=>:cascade})
+   -> 0.0017s
+-- add_foreign_key("clusters", "users", {:on_delete=>:nullify})
+   -> 0.0018s
+-- add_foreign_key("clusters_applications_helm", "clusters", {:on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("container_repositories", "projects")
+   -> 0.0020s
+-- add_foreign_key("deploy_keys_projects", "projects", {:name=>"fk_58a901ca7e", :on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("deployments", "projects", {:name=>"fk_b9a3851b82", :on_delete=>:cascade})
+   -> 0.0021s
+-- add_foreign_key("environments", "projects", {:name=>"fk_d1c8c1da6a", :on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("events", "projects", {:on_delete=>:cascade})
+   -> 0.0020s
+-- add_foreign_key("events", "users", {:column=>"author_id", :name=>"fk_edfd187b6f", :on_delete=>:cascade})
+   -> 0.0020s
+-- add_foreign_key("fork_network_members", "fork_networks", {:on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("fork_network_members", "projects", {:column=>"forked_from_project_id", :name=>"fk_b01280dae4", :on_delete=>:nullify})
+   -> 0.0019s
+-- add_foreign_key("fork_network_members", "projects", {:on_delete=>:cascade})
+   -> 0.0018s
+-- add_foreign_key("fork_networks", "projects", {:column=>"root_project_id", :name=>"fk_e7b436b2b5", :on_delete=>:nullify})
+   -> 0.0018s
+-- add_foreign_key("forked_project_links", "projects", {:column=>"forked_to_project_id", :name=>"fk_434510edb0", :on_delete=>:cascade})
+   -> 0.0018s
+-- add_foreign_key("gcp_clusters", "projects", {:on_delete=>:cascade})
+   -> 0.0029s
+-- add_foreign_key("gcp_clusters", "services", {:on_delete=>:nullify})
+   -> 0.0022s
+-- add_foreign_key("gcp_clusters", "users", {:on_delete=>:nullify})
+   -> 0.0019s
+-- add_foreign_key("gpg_key_subkeys", "gpg_keys", {:on_delete=>:cascade})
+   -> 0.0017s
+-- add_foreign_key("gpg_keys", "users", {:on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("gpg_signatures", "gpg_key_subkeys", {:on_delete=>:nullify})
+   -> 0.0016s
+-- add_foreign_key("gpg_signatures", "gpg_keys", {:on_delete=>:nullify})
+   -> 0.0016s
+-- add_foreign_key("gpg_signatures", "projects", {:on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("group_custom_attributes", "namespaces", {:column=>"group_id", :on_delete=>:cascade})
+   -> 0.0014s
+-- add_foreign_key("issue_assignees", "issues", {:name=>"fk_b7d881734a", :on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("issue_assignees", "users", {:name=>"fk_5e0c8d9154", :on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("issue_metrics", "issues", {:on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("issues", "issues", {:column=>"moved_to_id", :name=>"fk_a194299be1", :on_delete=>:nullify})
+   -> 0.0014s
+-- add_foreign_key("issues", "milestones", {:name=>"fk_96b1dd429c", :on_delete=>:nullify})
+   -> 0.0016s
+-- add_foreign_key("issues", "projects", {:name=>"fk_899c8f3231", :on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("issues", "users", {:column=>"author_id", :name=>"fk_05f1e72feb", :on_delete=>:nullify})
+   -> 0.0015s
+-- add_foreign_key("issues", "users", {:column=>"updated_by_id", :name=>"fk_ffed080f01", :on_delete=>:nullify})
+   -> 0.0017s
+-- add_foreign_key("label_priorities", "labels", {:on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("label_priorities", "projects", {:on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("labels", "namespaces", {:column=>"group_id", :on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("labels", "projects", {:name=>"fk_7de4989a69", :on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("lists", "boards", {:name=>"fk_0d3f677137", :on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("lists", "labels", {:name=>"fk_7a5553d60f", :on_delete=>:cascade})
+   -> 0.0014s
+-- add_foreign_key("members", "users", {:name=>"fk_2e88fb7ce9", :on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("merge_request_diff_commits", "merge_request_diffs", {:on_delete=>:cascade})
+   -> 0.0014s
+-- add_foreign_key("merge_request_diff_files", "merge_request_diffs", {:on_delete=>:cascade})
+   -> 0.0014s
+-- add_foreign_key("merge_request_diffs", "merge_requests", {:name=>"fk_8483f3258f", :on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("merge_request_metrics", "ci_pipelines", {:column=>"pipeline_id", :on_delete=>:cascade})
+   -> 0.0017s
+-- add_foreign_key("merge_request_metrics", "merge_requests", {:on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("merge_request_metrics", "users", {:column=>"latest_closed_by_id", :name=>"fk_ae440388cc", :on_delete=>:nullify})
+   -> 0.0015s
+-- add_foreign_key("merge_request_metrics", "users", {:column=>"merged_by_id", :name=>"fk_7f28d925f3", :on_delete=>:nullify})
+   -> 0.0015s
+-- add_foreign_key("merge_requests", "ci_pipelines", {:column=>"head_pipeline_id", :name=>"fk_fd82eae0b9", :on_delete=>:nullify})
+   -> 0.0014s
+-- add_foreign_key("merge_requests", "merge_request_diffs", {:column=>"latest_merge_request_diff_id", :name=>"fk_06067f5644", :on_delete=>:nullify})
+   -> 0.0014s
+-- add_foreign_key("merge_requests", "milestones", {:name=>"fk_6a5165a692", :on_delete=>:nullify})
+   -> 0.0015s
+-- add_foreign_key("merge_requests", "projects", {:column=>"source_project_id", :name=>"fk_3308fe130c", :on_delete=>:nullify})
+   -> 0.0017s
+-- add_foreign_key("merge_requests", "projects", {:column=>"target_project_id", :name=>"fk_a6963e8447", :on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("merge_requests", "users", {:column=>"assignee_id", :name=>"fk_6149611a04", :on_delete=>:nullify})
+   -> 0.0016s
+-- add_foreign_key("merge_requests", "users", {:column=>"author_id", :name=>"fk_e719a85f8a", :on_delete=>:nullify})
+   -> 0.0017s
+-- add_foreign_key("merge_requests", "users", {:column=>"merge_user_id", :name=>"fk_ad525e1f87", :on_delete=>:nullify})
+   -> 0.0018s
+-- add_foreign_key("merge_requests", "users", {:column=>"updated_by_id", :name=>"fk_641731faff", :on_delete=>:nullify})
+   -> 0.0017s
+-- add_foreign_key("merge_requests_closing_issues", "issues", {:on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("merge_requests_closing_issues", "merge_requests", {:on_delete=>:cascade})
+   -> 0.0014s
+-- add_foreign_key("milestones", "namespaces", {:column=>"group_id", :name=>"fk_95650a40d4", :on_delete=>:cascade})
+   -> 0.0014s
+-- add_foreign_key("milestones", "projects", {:name=>"fk_9bd0a0c791", :on_delete=>:cascade})
+   -> 0.0017s
+-- add_foreign_key("notes", "projects", {:name=>"fk_99e097b079", :on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("oauth_openid_requests", "oauth_access_grants", {:column=>"access_grant_id", :name=>"fk_oauth_openid_requests_oauth_access_grants_access_grant_id"})
+   -> 0.0014s
+-- add_foreign_key("pages_domains", "projects", {:name=>"fk_ea2f6dfc6f", :on_delete=>:cascade})
+   -> 0.0021s
+-- add_foreign_key("personal_access_tokens", "users")
+   -> 0.0016s
+-- add_foreign_key("project_authorizations", "projects", {:on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("project_authorizations", "users", {:on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("project_auto_devops", "projects", {:on_delete=>:cascade})
+   -> 0.0026s
+-- add_foreign_key("project_custom_attributes", "projects", {:on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("project_features", "projects", {:name=>"fk_18513d9b92", :on_delete=>:cascade})
+   -> 0.0020s
+-- add_foreign_key("project_group_links", "projects", {:name=>"fk_daa8cee94c", :on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("project_import_data", "projects", {:name=>"fk_ffb9ee3a10", :on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("project_statistics", "projects", {:on_delete=>:cascade})
+   -> 0.0021s
+-- add_foreign_key("protected_branch_merge_access_levels", "protected_branches", {:name=>"fk_8a3072ccb3", :on_delete=>:cascade})
+   -> 0.0014s
+-- add_foreign_key("protected_branch_push_access_levels", "protected_branches", {:name=>"fk_9ffc86a3d9", :on_delete=>:cascade})
+   -> 0.0014s
+-- add_foreign_key("protected_branches", "projects", {:name=>"fk_7a9c6d93e7", :on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("protected_tag_create_access_levels", "namespaces", {:column=>"group_id"})
+   -> 0.0016s
+-- add_foreign_key("protected_tag_create_access_levels", "protected_tags", {:name=>"fk_f7dfda8c51", :on_delete=>:cascade})
+   -> 0.0013s
+-- add_foreign_key("protected_tag_create_access_levels", "users")
+   -> 0.0018s
+-- add_foreign_key("protected_tags", "projects", {:name=>"fk_8e4af87648", :on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("push_event_payloads", "events", {:name=>"fk_36c74129da", :on_delete=>:cascade})
+   -> 0.0013s
+-- add_foreign_key("releases", "projects", {:name=>"fk_47fe2a0596", :on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("services", "projects", {:name=>"fk_71cce407f9", :on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("snippets", "projects", {:name=>"fk_be41fd4bb7", :on_delete=>:cascade})
+   -> 0.0017s
+-- add_foreign_key("subscriptions", "projects", {:on_delete=>:cascade})
+   -> 0.0018s
+-- add_foreign_key("system_note_metadata", "notes", {:name=>"fk_d83a918cb1", :on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("timelogs", "issues", {:name=>"fk_timelogs_issues_issue_id", :on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("timelogs", "merge_requests", {:name=>"fk_timelogs_merge_requests_merge_request_id", :on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("todos", "projects", {:name=>"fk_45054f9c45", :on_delete=>:cascade})
+   -> 0.0018s
+-- add_foreign_key("trending_projects", "projects", {:on_delete=>:cascade})
+   -> 0.0015s
+-- add_foreign_key("u2f_registrations", "users")
+   -> 0.0017s
+-- add_foreign_key("user_custom_attributes", "users", {:on_delete=>:cascade})
+   -> 0.0019s
+-- add_foreign_key("user_synced_attributes_metadata", "users", {:on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("users_star_projects", "projects", {:name=>"fk_22cd27ddfc", :on_delete=>:cascade})
+   -> 0.0016s
+-- add_foreign_key("web_hook_logs", "web_hooks", {:on_delete=>:cascade})
+   -> 0.0014s
+-- add_foreign_key("web_hooks", "projects", {:name=>"fk_0c8ca6d9d1", :on_delete=>:cascade})
+   -> 0.0017s
+-- initialize_schema_migrations_table()
+   -> 0.0112s
+$ JOB_NAME=( $CI_JOB_NAME )
+$ export CI_NODE_INDEX=${JOB_NAME[-2]}
+$ export CI_NODE_TOTAL=${JOB_NAME[-1]}
+$ export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
+$ export KNAPSACK_GENERATE_REPORT=true
+$ export CACHE_CLASSES=true
+$ cp ${KNAPSACK_SPINACH_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH}
+$ scripts/gitaly-test-spawn
+Gem.path: ["/root/.gem/ruby/2.3.0", "/usr/local/lib/ruby/gems/2.3.0", "/usr/local/bundle"]
+ENV['BUNDLE_GEMFILE']: nil
+ENV['RUBYOPT']: nil
+bundle config in /builds/gitlab-org/gitlab-ce
+scripts/gitaly-test-spawn:10:in `<main>': undefined local variable or method `gitaly_dir' for main:Object (NameError)
+Did you mean?  gitaly_dir
+Settings are listed in order of priority. The top value will be used.
+retry
+Set for your local app (/usr/local/bundle/config): 3
+
+path
+Set for your local app (/usr/local/bundle/config): "vendor"
+Set via BUNDLE_PATH: "/usr/local/bundle"
+
+jobs
+Set for your local app (/usr/local/bundle/config): "2"
+
+clean
+Set for your local app (/usr/local/bundle/config): "true"
+
+without
+Set for your local app (/usr/local/bundle/config): [:production]
+
+silence_root_warning
+Set via BUNDLE_SILENCE_ROOT_WARNING: true
+
+app_config
+Set via BUNDLE_APP_CONFIG: "/usr/local/bundle"
+
+install_flags
+Set via BUNDLE_INSTALL_FLAGS: "--without=production --jobs=2 --path=vendor --retry=3 --quiet"
+
+bin
+Set via BUNDLE_BIN: "/usr/local/bundle/bin"
+
+gemfile
+Set via BUNDLE_GEMFILE: "/builds/gitlab-org/gitlab-ce/Gemfile"
+
+section_end:1517486961:build_script
+section_start:1517486961:after_script
+section_end:1517486962:after_script
+section_start:1517486962:upload_artifacts
+Uploading artifacts...
+WARNING: coverage/: no matching files              
+knapsack/: found 5 matching files                  
+WARNING: tmp/capybara/: no matching files          
+Uploading artifacts to coordinator... ok            id=50551722 responseStatus=201 Created token=XkN753rp
+section_end:1517486963:upload_artifacts
+ERROR: Job failed: exit code 1
+
\ No newline at end of file
diff --git a/spec/javascripts/fixtures/jobs.rb b/spec/javascripts/fixtures/jobs.rb
index 87d131dfe28b..6d5c6d5334f2 100644
--- a/spec/javascripts/fixtures/jobs.rb
+++ b/spec/javascripts/fixtures/jobs.rb
@@ -7,7 +7,7 @@
   let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
   let(:project) { create(:project_empty_repo, namespace: namespace, path: 'builds-project') }
   let(:pipeline) { create(:ci_empty_pipeline, project: project) }
-  let!(:build_with_artifacts) { create(:ci_build, :success, :artifacts, :trace, pipeline: pipeline, stage: 'test', artifacts_expire_at: Time.now + 18.months) }
+  let!(:build_with_artifacts) { create(:ci_build, :success, :artifacts, :trace_artifact, pipeline: pipeline, stage: 'test', artifacts_expire_at: Time.now + 18.months) }
   let!(:failed_build) { create(:ci_build, :failed, pipeline: pipeline, stage: 'build') }
   let!(:pending_build) { create(:ci_build, :pending, pipeline: pipeline, stage: 'deploy') }
 
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 3546532b9b46..91c9625ba066 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -238,11 +238,98 @@
     end
   end
 
+  describe '#read' do
+    shared_examples 'read successfully with IO' do
+      it 'yields with source' do
+        trace.read do |stream|
+          expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
+          expect(stream.stream).to be_a(IO)
+        end
+      end
+    end
+
+    shared_examples 'read successfully with StringIO' do
+      it 'yields with source' do
+        trace.read do |stream|
+          expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
+          expect(stream.stream).to be_a(StringIO)
+        end
+      end
+    end
+
+    shared_examples 'failed to read' do
+      it 'yields without source' do
+        trace.read do |stream|
+          expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
+          expect(stream.stream).to be_nil
+        end
+      end
+    end
+
+    context 'when trace artifact exists' do
+      before do
+        create(:ci_job_artifact, :trace, job: build)
+      end
+
+      it_behaves_like 'read successfully with IO'
+    end
+
+    context 'when current_path (with project_id) exists' do
+      before do
+        expect(trace).to receive(:default_path) { expand_fixture_path('trace/sample_trace') }
+      end
+
+      it_behaves_like 'read successfully with IO'
+    end
+
+    context 'when current_path (with project_ci_id) exists' do
+      before do
+        expect(trace).to receive(:deprecated_path) { expand_fixture_path('trace/sample_trace') }
+      end
+
+      it_behaves_like 'read successfully with IO'
+    end
+
+    context 'when db trace exists' do
+      before do
+        build.send(:write_attribute, :trace, "data")
+      end
+
+      it_behaves_like 'read successfully with StringIO'
+    end
+
+    context 'when no sources exist' do
+      it_behaves_like 'failed to read'
+    end
+  end
+
   describe 'trace handling' do
+    subject { trace.exist? }
+
     context 'trace does not exist' do
       it { expect(trace.exist?).to be(false) }
     end
 
+    context 'when trace artifact exists' do
+      before do
+        create(:ci_job_artifact, :trace, job: build)
+      end
+
+      it { is_expected.to be_truthy }
+
+      context 'when the trace artifact has been erased' do
+        before do
+          trace.erase!
+        end
+
+        it { is_expected.to be_falsy }
+
+        it 'removes associations' do
+          expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy
+        end
+      end
+    end
+
     context 'new trace path is used' do
       before do
         trace.send(:ensure_directory)
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 3af275049677..b39105abb410 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -726,7 +726,7 @@
 
     context 'build is erasable' do
       context 'new artifacts' do
-        let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
+        let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) }
 
         describe '#erase' do
           before do
@@ -760,7 +760,7 @@
         end
 
         describe '#erased?' do
-          let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
+          let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) }
           subject { build.erased? }
 
           context 'job has not been erased' do
@@ -795,7 +795,7 @@
     context 'old artifacts' do
       context 'build is erasable' do
         context 'new artifacts' do
-          let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
+          let!(:build) { create(:ci_build, :trace_artifact, :success, :legacy_artifacts) }
 
           describe '#erase' do
             before do
@@ -829,7 +829,7 @@
           end
 
           describe '#erased?' do
-            let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
+            let!(:build) { create(:ci_build, :trace_artifact, :success, :legacy_artifacts) }
             subject { build.erased? }
 
             context 'job has not been erased' do
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index a10afb98d2b2..1e8860cf2954 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -12,6 +12,9 @@
   it { is_expected.to respond_to(:created_at) }
   it { is_expected.to respond_to(:updated_at) }
 
+  it { is_expected.to delegate_method(:open).to(:file) }
+  it { is_expected.to delegate_method(:exists?).to(:file) }
+
   describe 'callbacks' do
     subject { create(:ci_job_artifact, :archive) }
 
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 79130f2d2a81..366bfeb4d72d 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -1,6 +1,8 @@
 require 'spec_helper'
 
 describe API::Jobs do
+  include HttpIOHelpers
+
   set(:project) do
     create(:project, :repository, public_builds: false)
   end
@@ -480,16 +482,43 @@ def get_for_ref(ref = pipeline.ref, job_name = job.name)
   end
 
   describe 'GET /projects/:id/jobs/:job_id/trace' do
-    let(:job) { create(:ci_build, :trace, pipeline: pipeline) }
-
     before do
       get api("/projects/#{project.id}/jobs/#{job.id}/trace", api_user)
     end
 
     context 'authorized user' do
-      it 'returns specific job trace' do
-        expect(response).to have_gitlab_http_status(200)
-        expect(response.body).to eq(job.trace.raw)
+      context 'when trace is in ObjectStorage' do
+        let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
+
+        before do
+          stub_remote_trace_206
+          allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+          allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
+          allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
+        end
+
+        it 'returns specific job trace' do
+          expect(response).to have_gitlab_http_status(200)
+          expect(response.body).to eq(job.trace.raw)
+        end
+      end
+
+      context 'when trace is artifact' do
+        let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
+
+        it 'returns specific job trace' do
+          expect(response).to have_gitlab_http_status(200)
+          expect(response.body).to eq(job.trace.raw)
+        end
+      end
+
+      context 'when trace is file' do
+        let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
+
+        it 'returns specific job trace' do
+          expect(response).to have_gitlab_http_status(200)
+          expect(response.body).to eq(job.trace.raw)
+        end
       end
     end
 
@@ -577,11 +606,11 @@ def get_for_ref(ref = pipeline.ref, job_name = job.name)
     end
 
     context 'job is erasable' do
-      let(:job) { create(:ci_build, :trace, :artifacts, :success, project: project, pipeline: pipeline) }
+      let(:job) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline) }
 
       it 'erases job content' do
         expect(response).to have_gitlab_http_status(201)
-        expect(job).not_to have_trace
+        expect(job.trace.exist?).to be_falsy
         expect(job.artifacts_file.exists?).to be_falsy
         expect(job.artifacts_metadata.exists?).to be_falsy
       end
@@ -595,7 +624,7 @@ def get_for_ref(ref = pipeline.ref, job_name = job.name)
     end
 
     context 'job is not erasable' do
-      let(:job) { create(:ci_build, :trace, project: project, pipeline: pipeline) }
+      let(:job) { create(:ci_build, :trace_live, project: project, pipeline: pipeline) }
 
       it 'responds with forbidden' do
         expect(response).to have_gitlab_http_status(403)
@@ -604,7 +633,7 @@ def get_for_ref(ref = pipeline.ref, job_name = job.name)
 
     context 'when a developer erases a build' do
       let(:role) { :developer }
-      let(:job) { create(:ci_build, :trace, :artifacts, :success, project: project, pipeline: pipeline, user: owner) }
+      let(:job) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline, user: owner) }
 
       context 'when the build was created by the developer' do
         let(:owner) { user }
@@ -627,7 +656,7 @@ def get_for_ref(ref = pipeline.ref, job_name = job.name)
 
     context 'artifacts did not expire' do
       let(:job) do
-        create(:ci_build, :trace, :artifacts, :success,
+        create(:ci_build, :trace_artifact, :artifacts, :success,
                project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days)
       end
 
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index c6366ffec62e..d68bba0082c6 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -639,7 +639,7 @@ def request_job(token = runner.token, **params)
     end
 
     describe 'PUT /api/v4/jobs/:id' do
-      let(:job) { create(:ci_build, :pending, :trace, pipeline: pipeline, runner_id: runner.id) }
+      let(:job) { create(:ci_build, :pending, :trace_live, pipeline: pipeline, runner_id: runner.id) }
 
       before do
         job.run!
@@ -681,11 +681,17 @@ def request_job(token = runner.token, **params)
       end
 
       context 'when tace is given' do
-        it 'updates a running build' do
-          update_job(trace: 'BUILD TRACE UPDATED')
+        it 'creates a trace artifact' do
+          allow_any_instance_of(BuildFinishedWorker).to receive(:perform).with(job.id) do
+            CreateTraceArtifactWorker.new.perform(job.id)
+          end
+
+          update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
 
+          job.reload
           expect(response).to have_gitlab_http_status(200)
-          expect(job.reload.trace.raw).to eq 'BUILD TRACE UPDATED'
+          expect(job.trace.raw).to eq 'BUILD TRACE UPDATED'
+          expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED'
         end
       end
 
@@ -714,7 +720,7 @@ def update_job(token = job.token, **params)
     end
 
     describe 'PATCH /api/v4/jobs/:id/trace' do
-      let(:job) { create(:ci_build, :running, :trace, runner_id: runner.id, pipeline: pipeline) }
+      let(:job) { create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) }
       let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
       let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
       let(:update_interval) { 10.seconds.to_i }
@@ -775,7 +781,7 @@ def update_job(token = job.token, **params)
 
         context 'when project for the build has been deleted' do
           let(:job) do
-            create(:ci_build, :running, :trace, runner_id: runner.id, pipeline: pipeline) do |job|
+            create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) do |job|
               job.project.update(pending_delete: true)
             end
           end
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index 862bf7e540d0..c6094c5ca5c9 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -356,7 +356,7 @@ def path_for_ref(ref = pipeline.ref, job = build.name)
   end
 
   describe 'GET /projects/:id/builds/:build_id/trace' do
-    let(:build) { create(:ci_build, :trace, pipeline: pipeline) }
+    let(:build) { create(:ci_build, :trace_live, pipeline: pipeline) }
 
     before do
       get v3_api("/projects/#{project.id}/builds/#{build.id}/trace", api_user)
@@ -451,7 +451,7 @@ def path_for_ref(ref = pipeline.ref, job = build.name)
     end
 
     context 'job is erasable' do
-      let(:build) { create(:ci_build, :trace, :artifacts, :success, project: project, pipeline: pipeline) }
+      let(:build) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline) }
 
       it 'erases job content' do
         expect(response.status).to eq 201
@@ -467,7 +467,7 @@ def path_for_ref(ref = pipeline.ref, job = build.name)
     end
 
     context 'job is not erasable' do
-      let(:build) { create(:ci_build, :trace, project: project, pipeline: pipeline) }
+      let(:build) { create(:ci_build, :trace_live, project: project, pipeline: pipeline) }
 
       it 'responds with forbidden' do
         expect(response.status).to eq 403
@@ -482,7 +482,7 @@ def path_for_ref(ref = pipeline.ref, job = build.name)
 
     context 'artifacts did not expire' do
       let(:build) do
-        create(:ci_build, :trace, :artifacts, :success,
+        create(:ci_build, :trace_artifact, :artifacts, :success,
                project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days)
       end
 
diff --git a/spec/services/ci/create_trace_artifact_service_spec.rb b/spec/services/ci/create_trace_artifact_service_spec.rb
new file mode 100644
index 000000000000..847a88920fee
--- /dev/null
+++ b/spec/services/ci/create_trace_artifact_service_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe Ci::CreateTraceArtifactService do
+  describe '#execute' do
+    subject { described_class.new(nil, nil).execute(job) }
+
+    let(:job) { create(:ci_build) }
+
+    context 'when the job does not have trace artifact' do
+      context 'when the job has a trace file' do
+        before do
+          allow_any_instance_of(Gitlab::Ci::Trace)
+            .to receive(:default_path) { expand_fixture_path('trace/sample_trace') }
+
+          allow_any_instance_of(JobArtifactUploader).to receive(:move_to_cache) { false }
+          allow_any_instance_of(JobArtifactUploader).to receive(:move_to_store) { false }
+        end
+
+        it 'creates trace artifact' do
+          expect { subject }.to change { Ci::JobArtifact.count }.by(1)
+
+          expect(job.job_artifacts_trace.read_attribute(:file)).to eq('sample_trace')
+        end
+
+        context 'when the job has already had trace artifact' do
+          before do
+            create(:ci_job_artifact, :trace, job: job)
+          end
+
+          it 'does not create trace artifact' do
+            expect { subject }.not_to change { Ci::JobArtifact.count }
+          end
+        end
+      end
+
+      context 'when the job does not have a trace file' do
+        it 'does not create trace artifact' do
+          expect { subject }.not_to change { Ci::JobArtifact.count }
+        end
+      end
+    end
+  end
+end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index ea5d1f67b0c8..4750091a7a41 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -17,7 +17,8 @@
     %i[id status user token coverage trace runner artifacts_expire_at
        artifacts_file artifacts_metadata artifacts_size created_at
        updated_at started_at finished_at queued_at erased_by
-       erased_at auto_canceled_by job_artifacts job_artifacts_archive job_artifacts_metadata].freeze
+       erased_at auto_canceled_by job_artifacts job_artifacts_archive
+       job_artifacts_metadata job_artifacts_trace].freeze
 
   IGNORE_ACCESSORS =
     %i[type lock_version target_url base_tags trace_sections
@@ -37,7 +38,7 @@
     let(:build) do
       create(:ci_build, :failed, :artifacts, :expired, :erased,
              :queued, :coverage, :tags, :allowed_to_fail, :on_tag,
-             :triggered, :trace, :teardown_environment,
+             :triggered, :trace_artifact, :teardown_environment,
              description: 'my-job', stage: 'test',  pipeline: pipeline,
              auto_canceled_by: create(:ci_empty_pipeline, project: project)) do |build|
                ##
diff --git a/spec/support/http_io/http_io_helpers.rb b/spec/support/http_io/http_io_helpers.rb
new file mode 100644
index 000000000000..31e07e720cdf
--- /dev/null
+++ b/spec/support/http_io/http_io_helpers.rb
@@ -0,0 +1,64 @@
+module HttpIOHelpers
+  def stub_remote_trace_206
+    WebMock.stub_request(:get, remote_trace_url)
+      .to_return { |request| remote_trace_response(request, 206) }
+  end
+
+  def stub_remote_trace_200
+    WebMock.stub_request(:get, remote_trace_url)
+      .to_return { |request| remote_trace_response(request, 200) }
+  end
+
+  def stub_remote_trace_500
+    WebMock.stub_request(:get, remote_trace_url)
+      .to_return(status: [500, "Internal Server Error"])
+  end
+
+  def remote_trace_url
+    "http://trace.com/trace"
+  end
+
+  def remote_trace_response(request, responce_status)
+    range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/)
+
+    {
+      status: responce_status,
+      headers: remote_trace_response_headers(responce_status, range[1].to_i, range[2].to_i),
+      body: range_trace_body(range[1].to_i, range[2].to_i)
+    }
+  end
+
+  def remote_trace_response_headers(responce_status, from, to)
+    headers = { 'Content-Type' => 'text/plain' }
+
+    if responce_status == 206
+      headers.merge('Content-Range' => "bytes #{from}-#{to}/#{remote_trace_size}")
+    end
+
+    headers
+  end
+
+  def range_trace_body(from, to)
+    remote_trace_body[from..to]
+  end
+
+  def remote_trace_body
+    @remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace'))
+  end
+
+  def remote_trace_size
+    remote_trace_body.length
+  end
+
+  def set_smaller_buffer_size_than(file_size)
+    blocks = (file_size / 128)
+    new_size = (blocks / 2) * 128
+    stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
+  end
+
+  def set_larger_buffer_size_than(file_size)
+    blocks = (file_size / 128)
+    new_size = (blocks * 2) * 128
+    stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
+  end
+end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index fda70a8441bc..f4c4b69b7736 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -23,6 +23,33 @@
                     store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
   end
 
+  describe '#open' do
+    subject { uploader.open }
+
+    context 'when trace is stored in File storage' do
+      context 'when file exists' do
+        let(:file) do
+          fixture_file_upload(
+            Rails.root.join('spec/fixtures/trace/sample_trace'), 'text/plain')
+        end
+
+        before do
+          uploader.store!(file)
+        end
+
+        it 'returns io stream' do
+          is_expected.to be_a(IO)
+        end
+      end
+
+      context 'when file does not exist' do
+        it 'returns nil' do
+          is_expected.to be_nil
+        end
+      end
+    end
+  end
+
   context 'file is stored in valid local_path' do
     let(:file) do
       fixture_file_upload(
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index 1a7ffd5cdbff..c7ff8cf3b92c 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -6,17 +6,15 @@
       let!(:build) { create(:ci_build) }
 
       it 'calculates coverage and calls hooks' do
-        expect(BuildCoverageWorker)
+        expect(BuildTraceSectionsWorker)
           .to receive(:new).ordered.and_call_original
-        expect(BuildHooksWorker)
+        expect(BuildCoverageWorker)
           .to receive(:new).ordered.and_call_original
 
-        expect(BuildTraceSectionsWorker)
-          .to receive(:perform_async)
-        expect_any_instance_of(BuildCoverageWorker)
-          .to receive(:perform)
-        expect_any_instance_of(BuildHooksWorker)
-          .to receive(:perform)
+        expect_any_instance_of(BuildTraceSectionsWorker).to receive(:perform)
+        expect_any_instance_of(BuildCoverageWorker).to receive(:perform)
+        expect(BuildHooksWorker).to receive(:perform_async)
+        expect(CreateTraceArtifactWorker).to receive(:perform_async)
 
         described_class.new.perform(build.id)
       end
diff --git a/spec/workers/create_trace_artifact_worker_spec.rb b/spec/workers/create_trace_artifact_worker_spec.rb
new file mode 100644
index 000000000000..854abd9cca77
--- /dev/null
+++ b/spec/workers/create_trace_artifact_worker_spec.rb
@@ -0,0 +1,29 @@
+require 'spec_helper'
+
+describe CreateTraceArtifactWorker do
+  describe '#perform' do
+    subject { described_class.new.perform(job&.id) }
+
+    context 'when job is found' do
+      let(:job) { create(:ci_build) }
+
+      it 'executes service' do
+        expect_any_instance_of(Ci::CreateTraceArtifactService)
+          .to receive(:execute).with(job)
+
+        subject
+      end
+    end
+
+    context 'when job is not found' do
+      let(:job) { nil }
+
+      it 'does not execute service' do
+        expect_any_instance_of(Ci::CreateTraceArtifactService)
+          .not_to receive(:execute)
+
+        subject
+      end
+    end
+  end
+end
-- 
GitLab


From 8af23def1d6450420d06b8de54d23311a978de20 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= <ayufan@ayufan.eu>
Date: Wed, 28 Feb 2018 21:09:34 +0100
Subject: [PATCH 11/14] Revert "Merge branch '3867-port-to-ce' into 'master'"

This reverts commit 54a575f1bbba44573ab92dc58a4242f1ee734c5d, reversing
changes made to c63af942e5baf7849a94fa99da8494bcba28e3f8.
---
 app/controllers/concerns/uploads_actions.rb   |  61 ++-------
 app/controllers/groups/uploads_controller.rb  |  30 +++--
 .../projects/lfs_storage_controller.rb        |   2 +-
 .../projects/uploads_controller.rb            |  21 +--
 app/controllers/uploads_controller.rb         |  75 +++++++----
 app/models/appearance.rb                      |   1 -
 app/models/concerns/avatarable.rb             |  24 ----
 app/models/group.rb                           |  14 +-
 app/models/note.rb                            |   1 -
 app/models/project.rb                         |  15 ++-
 app/models/upload.rb                          |  49 +++----
 app/models/user.rb                            |  16 ++-
 .../migrate_attachments_service.rb            |   4 +-
 app/uploaders/attachment_uploader.rb          |   8 +-
 app/uploaders/avatar_uploader.rb              |  19 +--
 app/uploaders/file_mover.rb                   |   3 +-
 app/uploaders/file_uploader.rb                | 118 +++++------------
 app/uploaders/gitlab_uploader.rb              |  77 ++++++-----
 app/uploaders/job_artifact_uploader.rb        |  26 +++-
 app/uploaders/legacy_artifact_uploader.rb     |  26 +++-
 app/uploaders/lfs_object_uploader.rb          |  21 ++-
 app/uploaders/namespace_file_uploader.rb      |  18 +--
 app/uploaders/personal_file_uploader.rb       |  30 ++---
 app/uploaders/records_uploads.rb              |  80 ++++--------
 app/uploaders/uploader_helper.rb              |   9 +-
 app/uploaders/workhorse.rb                    |   7 -
 app/workers/upload_checksum_worker.rb         |   2 +-
 config/gitlab.yml.example                     |   8 --
 config/initializers/1_settings.rb             |  13 +-
 ...119135717_add_uploader_index_to_uploads.rb |  20 ---
 db/schema.rb                                  |   2 +-
 doc/development/file_storage.md               | 104 +--------------
 lib/api/runner.rb                             |   6 +-
 lib/backup/artifacts.rb                       |   2 +-
 .../populate_untracked_uploads.rb             |   2 +-
 .../prepare_untracked_uploads.rb              |   9 +-
 lib/gitlab/gfm/uploads_rewriter.rb            |   2 +-
 lib/gitlab/import_export/uploads_saver.rb     |   8 +-
 lib/gitlab/uploads_transfer.rb                |   2 +-
 lib/gitlab/workhorse.rb                       |   9 +-
 .../groups/uploads_controller_spec.rb         |   4 +-
 .../projects/artifacts_controller_spec.rb     |   3 +-
 .../projects/raw_controller_spec.rb           |   2 +-
 spec/controllers/uploads_controller_spec.rb   |  13 --
 spec/factories/groups.rb                      |   2 +-
 spec/factories/notes.rb                       |   4 +-
 spec/factories/projects.rb                    |   2 +-
 spec/factories/uploads.rb                     |  26 +---
 spec/factories/users.rb                       |   2 +-
 .../prepare_untracked_uploads_spec.rb         |  57 +++++----
 spec/lib/gitlab/gfm/uploads_rewriter_spec.rb  |   2 +-
 .../import_export/uploads_restorer_spec.rb    |   9 +-
 .../import_export/uploads_saver_spec.rb       |   4 +-
 spec/models/namespace_spec.rb                 |   2 +-
 spec/models/upload_spec.rb                    |  73 +++++++++--
 spec/requests/api/runner_spec.rb              |   4 +-
 spec/requests/lfs_http_spec.rb                |   4 +-
 spec/services/issues/move_service_spec.rb     |   4 +-
 .../migrate_attachments_service_spec.rb       |   4 +-
 .../uploads_actions_shared_examples.rb        |  62 ++++-----
 .../gitlab_uploader_shared_examples.rb        |  48 -------
 spec/support/test_env.rb                      |   2 +-
 .../track_untracked_uploads_helpers.rb        |   2 +-
 spec/uploaders/attachment_uploader_spec.rb    |  30 +++--
 spec/uploaders/avatar_uploader_spec.rb        |  18 +--
 spec/uploaders/file_mover_spec.rb             |  18 +--
 spec/uploaders/file_uploader_spec.rb          | 121 +++++++++++++-----
 spec/uploaders/job_artifact_uploader_spec.rb  |  32 ++++-
 .../legacy_artifact_uploader_spec.rb          |  49 +++++--
 spec/uploaders/lfs_object_uploader_spec.rb    |  38 +++++-
 .../uploaders/namespace_file_uploader_spec.rb |  21 +--
 spec/uploaders/personal_file_uploader_spec.rb |  28 ++--
 spec/uploaders/records_uploads_spec.rb        |  73 ++++++-----
 spec/workers/upload_checksum_worker_spec.rb   |  29 ++---
 74 files changed, 822 insertions(+), 914 deletions(-)
 delete mode 100644 app/uploaders/workhorse.rb
 delete mode 100644 db/migrate/20180119135717_add_uploader_index_to_uploads.rb
 delete mode 100644 spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb

diff --git a/app/controllers/concerns/uploads_actions.rb b/app/controllers/concerns/uploads_actions.rb
index 61554029d095..a6fb1f400011 100644
--- a/app/controllers/concerns/uploads_actions.rb
+++ b/app/controllers/concerns/uploads_actions.rb
@@ -1,8 +1,6 @@
 module UploadsActions
   include Gitlab::Utils::StrongMemoize
 
-  UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze
-
   def create
     link_to_file = UploadService.new(model, params[:file], uploader_class).execute
 
@@ -19,71 +17,34 @@ def create
     end
   end
 
-  # This should either
-  #   - send the file directly
-  #   - or redirect to its URL
-  #
   def show
     return render_404 unless uploader.exists?
 
-    if uploader.file_storage?
-      disposition = uploader.image_or_video? ? 'inline' : 'attachment'
-      expires_in 0.seconds, must_revalidate: true, private: true
-
-      send_file uploader.file.path, disposition: disposition
-    else
-      redirect_to uploader.url
-    end
-  end
-
-  private
+    disposition = uploader.image_or_video? ? 'inline' : 'attachment'
 
-  def uploader_class
-    raise NotImplementedError
-  end
+    expires_in 0.seconds, must_revalidate: true, private: true
 
-  def upload_mount
-    mounted_as = params[:mounted_as]
-    mounted_as if UPLOAD_MOUNTS.include?(mounted_as)
+    send_file uploader.file.path, disposition: disposition
   end
 
-  def uploader_mounted?
-    upload_model_class < CarrierWave::Mount::Extension && !upload_mount.nil?
-  end
+  private
 
   def uploader
     strong_memoize(:uploader) do
-      if uploader_mounted?
-        model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend
-      else
-        build_uploader_from_upload || build_uploader_from_params
-      end
-    end
-  end
-
-  def build_uploader_from_upload
-    return nil unless params[:secret] && params[:filename]
+      return if show_model.nil?
 
-    upload_path = uploader_class.upload_path(params[:secret], params[:filename])
-    upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_path)
-    upload&.build_uploader
-  end
+      file_uploader = FileUploader.new(show_model, params[:secret])
+      file_uploader.retrieve_from_store!(params[:filename])
 
-  def build_uploader_from_params
-    uploader = uploader_class.new(model, params[:secret])
-    uploader.retrieve_from_store!(params[:filename])
-    uploader
+      file_uploader
+    end
   end
 
   def image_or_video?
     uploader && uploader.exists? && uploader.image_or_video?
   end
 
-  def find_model
-    nil
-  end
-
-  def model
-    strong_memoize(:model) { find_model }
+  def uploader_class
+    FileUploader
   end
 end
diff --git a/app/controllers/groups/uploads_controller.rb b/app/controllers/groups/uploads_controller.rb
index f1578f75e88b..e6bd9806401d 100644
--- a/app/controllers/groups/uploads_controller.rb
+++ b/app/controllers/groups/uploads_controller.rb
@@ -7,23 +7,29 @@ class Groups::UploadsController < Groups::ApplicationController
 
   private
 
-  def upload_model_class
-    Group
-  end
+  def show_model
+    strong_memoize(:show_model) do
+      group_id = params[:group_id]
 
-  def uploader_class
-    NamespaceFileUploader
+      Group.find_by_full_path(group_id)
+    end
   end
 
-  def find_model
-    return @group if @group
-
-    group_id = params[:group_id]
+  def authorize_upload_file!
+    render_404 unless can?(current_user, :upload_file, group)
+  end
 
-    Group.find_by_full_path(group_id)
+  def uploader
+    strong_memoize(:uploader) do
+      file_uploader = uploader_class.new(show_model, params[:secret])
+      file_uploader.retrieve_from_store!(params[:filename])
+      file_uploader
+    end
   end
 
-  def authorize_upload_file!
-    render_404 unless can?(current_user, :upload_file, group)
+  def uploader_class
+    NamespaceFileUploader
   end
+
+  alias_method :model, :group
 end
diff --git a/app/controllers/projects/lfs_storage_controller.rb b/app/controllers/projects/lfs_storage_controller.rb
index 941638db4276..293869345bd4 100644
--- a/app/controllers/projects/lfs_storage_controller.rb
+++ b/app/controllers/projects/lfs_storage_controller.rb
@@ -60,7 +60,7 @@ def tmp_filename
 
   def store_file(oid, size, tmp_file)
     # Define tmp_file_path early because we use it in "ensure"
-    tmp_file_path = File.join(LfsObjectUploader.workhorse_upload_path, tmp_file)
+    tmp_file_path = File.join("#{Gitlab.config.lfs.storage_path}/tmp/upload", tmp_file)
 
     object = LfsObject.find_or_create_by(oid: oid, size: size)
     file_exists = object.file.exists? || move_tmp_file_to_storage(object, tmp_file_path)
diff --git a/app/controllers/projects/uploads_controller.rb b/app/controllers/projects/uploads_controller.rb
index f5cf089ad98d..4685bbe80b4b 100644
--- a/app/controllers/projects/uploads_controller.rb
+++ b/app/controllers/projects/uploads_controller.rb
@@ -1,7 +1,6 @@
 class Projects::UploadsController < Projects::ApplicationController
   include UploadsActions
 
-  # These will kick you out if you don't have access.
   skip_before_action :project, :repository,
     if: -> { action_name == 'show' && image_or_video? }
 
@@ -9,20 +8,14 @@ class Projects::UploadsController < Projects::ApplicationController
 
   private
 
-  def upload_model_class
-    Project
-  end
+  def show_model
+    strong_memoize(:show_model) do
+      namespace = params[:namespace_id]
+      id = params[:project_id]
 
-  def uploader_class
-    FileUploader
+      Project.find_by_full_path("#{namespace}/#{id}")
+    end
   end
 
-  def find_model
-    return @project if @project
-
-    namespace = params[:namespace_id]
-    id = params[:project_id]
-
-    Project.find_by_full_path("#{namespace}/#{id}")
-  end
+  alias_method :model, :project
 end
diff --git a/app/controllers/uploads_controller.rb b/app/controllers/uploads_controller.rb
index 3d227b0a9551..16a74f82d3f6 100644
--- a/app/controllers/uploads_controller.rb
+++ b/app/controllers/uploads_controller.rb
@@ -1,34 +1,19 @@
 class UploadsController < ApplicationController
   include UploadsActions
 
-  UnknownUploadModelError = Class.new(StandardError)
-
-  MODEL_CLASSES = {
-    "user"             => User,
-    "project"          => Project,
-    "note"             => Note,
-    "group"            => Group,
-    "appearance"       => Appearance,
-    "personal_snippet" => PersonalSnippet,
-    nil                => PersonalSnippet
-  }.freeze
-
-  rescue_from UnknownUploadModelError, with: :render_404
-
   skip_before_action :authenticate_user!
-  before_action :upload_mount_satisfied?
   before_action :find_model
   before_action :authorize_access!, only: [:show]
   before_action :authorize_create_access!, only: [:create]
 
-  def uploader_class
-    PersonalFileUploader
-  end
+  private
 
   def find_model
     return nil unless params[:id]
 
-    upload_model_class.find(params[:id])
+    return render_404 unless upload_model && upload_mount
+
+    @model = upload_model.find(params[:id])
   end
 
   def authorize_access!
@@ -68,17 +53,55 @@ def render_unauthorized
     end
   end
 
-  def upload_model_class
-    MODEL_CLASSES[params[:model]] || raise(UnknownUploadModelError)
+  def upload_model
+    upload_models = {
+      "user"    => User,
+      "project" => Project,
+      "note"    => Note,
+      "group"   => Group,
+      "appearance" => Appearance,
+      "personal_snippet" => PersonalSnippet
+    }
+
+    upload_models[params[:model]]
+  end
+
+  def upload_mount
+    return true unless params[:mounted_as]
+
+    upload_mounts = %w(avatar attachment file logo header_logo)
+
+    if upload_mounts.include?(params[:mounted_as])
+      params[:mounted_as]
+    end
   end
 
-  def upload_model_class_has_mounts?
-    upload_model_class < CarrierWave::Mount::Extension
+  def uploader
+    return @uploader if defined?(@uploader)
+
+    case model
+    when nil
+      @uploader = PersonalFileUploader.new(nil, params[:secret])
+
+      @uploader.retrieve_from_store!(params[:filename])
+    when PersonalSnippet
+      @uploader = PersonalFileUploader.new(model, params[:secret])
+
+      @uploader.retrieve_from_store!(params[:filename])
+    else
+      @uploader = @model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend
+
+      redirect_to @uploader.url unless @uploader.file_storage?
+    end
+
+    @uploader
   end
 
-  def upload_mount_satisfied?
-    return true unless upload_model_class_has_mounts?
+  def uploader_class
+    PersonalFileUploader
+  end
 
-    upload_model_class.uploader_options.has_key?(upload_mount)
+  def model
+    @model ||= find_model
   end
 end
diff --git a/app/models/appearance.rb b/app/models/appearance.rb
index dcd14c08f3c9..76cfe28742aa 100644
--- a/app/models/appearance.rb
+++ b/app/models/appearance.rb
@@ -11,7 +11,6 @@ class Appearance < ActiveRecord::Base
 
   mount_uploader :logo,         AttachmentUploader
   mount_uploader :header_logo,  AttachmentUploader
-
   has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
 
   CACHE_KEY = 'current_appearance'.freeze
diff --git a/app/models/concerns/avatarable.rb b/app/models/concerns/avatarable.rb
index d35e37935fb0..10659030910d 100644
--- a/app/models/concerns/avatarable.rb
+++ b/app/models/concerns/avatarable.rb
@@ -1,30 +1,6 @@
 module Avatarable
   extend ActiveSupport::Concern
 
-  included do
-    prepend ShadowMethods
-
-    validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
-    validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
-
-    mount_uploader :avatar, AvatarUploader
-  end
-
-  module ShadowMethods
-    def avatar_url(**args)
-      # We use avatar_path instead of overriding avatar_url because of carrierwave.
-      # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
-
-      avatar_path(only_path: args.fetch(:only_path, true)) || super
-    end
-  end
-
-  def avatar_type
-    unless self.avatar.image?
-      self.errors.add :avatar, "only images allowed"
-    end
-  end
-
   def avatar_path(only_path: true)
     return unless self[:avatar].present?
 
diff --git a/app/models/group.rb b/app/models/group.rb
index 62b1322ebe6f..fddace033873 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -29,14 +29,18 @@ class Group < Namespace
   has_many :variables, class_name: 'Ci::GroupVariable'
   has_many :custom_attributes, class_name: 'GroupCustomAttribute'
 
-  has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
-
+  validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
   validate :visibility_level_allowed_by_projects
   validate :visibility_level_allowed_by_sub_groups
   validate :visibility_level_allowed_by_parent
 
+  validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
+
   validates :two_factor_grace_period, presence: true, numericality: { greater_than_or_equal_to: 0 }
 
+  mount_uploader :avatar, AvatarUploader
+  has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+
   after_create :post_create_hook
   after_destroy :post_destroy_hook
   after_save :update_two_factor_requirement
@@ -112,6 +116,12 @@ def visibility_level_allowed?(level = self.visibility_level)
       visibility_level_allowed_by_sub_groups?(level)
   end
 
+  def avatar_url(**args)
+    # We use avatar_path instead of overriding avatar_url because of carrierwave.
+    # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
+    avatar_path(args)
+  end
+
   def lfs_enabled?
     return false unless Gitlab.config.lfs.enabled
     return Gitlab.config.lfs.enabled if self[:lfs_enabled].nil?
diff --git a/app/models/note.rb b/app/models/note.rb
index a84db8982e59..184fbd5f5ae9 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -88,7 +88,6 @@ def values
     end
   end
 
-  # @deprecated attachments are handler by the MarkdownUploader
   mount_uploader :attachment, AttachmentUploader
 
   # Scopes
diff --git a/app/models/project.rb b/app/models/project.rb
index 90f5df6265dc..4def590a7a96 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -256,6 +256,9 @@ class Project < ActiveRecord::Base
   validates :star_count, numericality: { greater_than_or_equal_to: 0 }
   validate :check_limit, on: :create
   validate :check_repository_path_availability, on: :update, if: ->(project) { project.renamed? }
+  validate :avatar_type,
+    if: ->(project) { project.avatar.present? && project.avatar_changed? }
+  validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
   validate :visibility_level_allowed_by_group
   validate :visibility_level_allowed_as_fork
   validate :check_wiki_path_conflict
@@ -263,6 +266,7 @@ class Project < ActiveRecord::Base
     presence: true,
     inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } }
 
+  mount_uploader :avatar, AvatarUploader
   has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
 
   # Scopes
@@ -285,6 +289,7 @@ class Project < ActiveRecord::Base
   scope :non_archived, -> { where(archived: false) }
   scope :for_milestones, ->(ids) { joins(:milestones).where('milestones.id' => ids).distinct }
   scope :with_push, -> { joins(:events).where('events.action = ?', Event::PUSHED) }
+
   scope :with_project_feature, -> { joins('LEFT JOIN project_features ON projects.id = project_features.project_id') }
   scope :with_statistics, -> { includes(:statistics) }
   scope :with_shared_runners, -> { where(shared_runners_enabled: true) }
@@ -918,12 +923,20 @@ def jira_tracker?
     issues_tracker.to_param == 'jira'
   end
 
+  def avatar_type
+    unless self.avatar.image?
+      self.errors.add :avatar, 'only images allowed'
+    end
+  end
+
   def avatar_in_git
     repository.avatar
   end
 
   def avatar_url(**args)
-    Gitlab::Routing.url_helpers.project_avatar_url(self) if avatar_in_git
+    # We use avatar_path instead of overriding avatar_url because of carrierwave.
+    # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
+    avatar_path(args) || (Gitlab::Routing.url_helpers.project_avatar_url(self) if avatar_in_git)
   end
 
   # For compatibility with old code
diff --git a/app/models/upload.rb b/app/models/upload.rb
index fb55fd8007b5..f194d7bdb808 100644
--- a/app/models/upload.rb
+++ b/app/models/upload.rb
@@ -9,11 +9,22 @@ class Upload < ActiveRecord::Base
   validates :model, presence: true
   validates :uploader, presence: true
 
-  before_save  :calculate_checksum!, if: :foreground_checksummable?
-  after_commit :schedule_checksum,   if: :checksummable?
+  before_save  :calculate_checksum, if:     :foreground_checksum?
+  after_commit :schedule_checksum,  unless: :foreground_checksum?
 
-  def self.hexdigest(path)
-    Digest::SHA256.file(path).hexdigest
+  def self.remove_path(path)
+    where(path: path).destroy_all
+  end
+
+  def self.record(uploader)
+    remove_path(uploader.relative_path)
+
+    create(
+      size: uploader.file.size,
+      path: uploader.relative_path,
+      model: uploader.model,
+      uploader: uploader.class.to_s
+    )
   end
 
   def absolute_path
@@ -22,18 +33,10 @@ def absolute_path
     uploader_class.absolute_path(self)
   end
 
-  def calculate_checksum!
-    self.checksum = nil
-    return unless checksummable?
+  def calculate_checksum
+    return unless exist?
 
-    self.checksum = self.class.hexdigest(absolute_path)
-  end
-
-  def build_uploader
-    uploader_class.new(model).tap do |uploader|
-      uploader.upload = self
-      uploader.retrieve_from_store!(identifier)
-    end
+    self.checksum = Digest::SHA256.file(absolute_path).hexdigest
   end
 
   def exist?
@@ -42,16 +45,8 @@ def exist?
 
   private
 
-  def checksummable?
-    checksum.nil? && local? && exist?
-  end
-
-  def local?
-    true
-  end
-
-  def foreground_checksummable?
-    checksummable? && size <= CHECKSUM_THRESHOLD
+  def foreground_checksum?
+    size <= CHECKSUM_THRESHOLD
   end
 
   def schedule_checksum
@@ -62,10 +57,6 @@ def relative_path?
     !path.start_with?('/')
   end
 
-  def identifier
-    File.basename(path)
-  end
-
   def uploader_class
     Object.const_get(uploader)
   end
diff --git a/app/models/user.rb b/app/models/user.rb
index 89e787c32741..fb5d56a68b0e 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -137,7 +137,6 @@ def update_tracked_fields!(request)
   has_many :assigned_merge_requests,  dependent: :nullify, foreign_key: :assignee_id, class_name: "MergeRequest" # rubocop:disable Cop/ActiveRecordDependent
 
   has_many :custom_attributes, class_name: 'UserCustomAttribute'
-  has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
 
   #
   # Validations
@@ -160,10 +159,12 @@ def update_tracked_fields!(request)
   validate :namespace_uniq, if: :username_changed?
   validate :namespace_move_dir_allowed, if: :username_changed?
 
+  validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
   validate :unique_email, if: :email_changed?
   validate :owns_notification_email, if: :notification_email_changed?
   validate :owns_public_email, if: :public_email_changed?
   validate :signup_domain_valid?, on: :create, if: ->(user) { !user.created_by_id }
+  validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
 
   before_validation :sanitize_attrs
   before_validation :set_notification_email, if: :email_changed?
@@ -224,6 +225,9 @@ def inactive_message
     end
   end
 
+  mount_uploader :avatar, AvatarUploader
+  has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+
   # Scopes
   scope :admins, -> { where(admin: true) }
   scope :blocked, -> { with_states(:blocked, :ldap_blocked) }
@@ -523,6 +527,12 @@ def namespace_move_dir_allowed
     end
   end
 
+  def avatar_type
+    unless avatar.image?
+      errors.add :avatar, "only images allowed"
+    end
+  end
+
   def unique_email
     if !emails.exists?(email: email) && Email.exists?(email: email)
       errors.add(:email, 'has already been taken')
@@ -850,7 +860,9 @@ def temp_oauth_email?
   end
 
   def avatar_url(size: nil, scale: 2, **args)
-    GravatarService.new.execute(email, size, scale, username: username)
+    # We use avatar_path instead of overriding avatar_url because of carrierwave.
+    # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
+    avatar_path(args) || GravatarService.new.execute(email, size, scale, username: username)
   end
 
   def primary_email_verified?
diff --git a/app/services/projects/hashed_storage/migrate_attachments_service.rb b/app/services/projects/hashed_storage/migrate_attachments_service.rb
index bc897d891d5a..f8aaec8a9c06 100644
--- a/app/services/projects/hashed_storage/migrate_attachments_service.rb
+++ b/app/services/projects/hashed_storage/migrate_attachments_service.rb
@@ -14,9 +14,9 @@ def execute
         @old_path = project.full_path
         @new_path = project.disk_path
 
-        origin = FileUploader.absolute_base_dir(project)
+        origin = FileUploader.dynamic_path_segment(project)
         project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
-        target = FileUploader.absolute_base_dir(project)
+        target = FileUploader.dynamic_path_segment(project)
 
         result = move_folder!(origin, target)
         project.save!
diff --git a/app/uploaders/attachment_uploader.rb b/app/uploaders/attachment_uploader.rb
index 4930fb2fca72..109eb2fea0b5 100644
--- a/app/uploaders/attachment_uploader.rb
+++ b/app/uploaders/attachment_uploader.rb
@@ -1,12 +1,10 @@
 class AttachmentUploader < GitlabUploader
+  include RecordsUploads
   include UploaderHelper
-  include RecordsUploads::Concern
 
   storage :file
 
-  private
-
-  def dynamic_segment
-    File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s)
+  def store_dir
+    "#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
   end
 end
diff --git a/app/uploaders/avatar_uploader.rb b/app/uploaders/avatar_uploader.rb
index 5c8e1cea62e0..cbb79376d5f8 100644
--- a/app/uploaders/avatar_uploader.rb
+++ b/app/uploaders/avatar_uploader.rb
@@ -1,24 +1,25 @@
 class AvatarUploader < GitlabUploader
+  include RecordsUploads
   include UploaderHelper
-  include RecordsUploads::Concern
 
   storage :file
 
-  def exists?
-    model.avatar.file && model.avatar.file.present?
+  def store_dir
+    "#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
   end
 
-  def move_to_cache
-    false
+  def exists?
+    model.avatar.file && model.avatar.file.present?
   end
 
+  # We set move_to_store and move_to_cache to 'false' to prevent stealing
+  # the avatar file from a project when forking it.
+  # https://gitlab.com/gitlab-org/gitlab-ce/issues/26158
   def move_to_store
     false
   end
 
-  private
-
-  def dynamic_segment
-    File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s)
+  def move_to_cache
+    false
   end
 end
diff --git a/app/uploaders/file_mover.rb b/app/uploaders/file_mover.rb
index e7af1483d233..00c2888d2241 100644
--- a/app/uploaders/file_mover.rb
+++ b/app/uploaders/file_mover.rb
@@ -21,8 +21,7 @@ def move
   end
 
   def update_markdown
-    updated_text = model.read_attribute(update_field)
-                        .gsub(temp_file_uploader.markdown_link, uploader.markdown_link)
+    updated_text = model.read_attribute(update_field).gsub(temp_file_uploader.to_markdown, uploader.to_markdown)
     model.update_attribute(update_field, updated_text)
 
     true
diff --git a/app/uploaders/file_uploader.rb b/app/uploaders/file_uploader.rb
index 85ae9863b130..0b591e3bbbbe 100644
--- a/app/uploaders/file_uploader.rb
+++ b/app/uploaders/file_uploader.rb
@@ -1,38 +1,23 @@
-# This class breaks the actual CarrierWave concept.
-# Every uploader should use a base_dir that is model agnostic so we can build
-# back URLs from base_dir-relative paths saved in the `Upload` model.
-#
-# As the `.base_dir` is model dependent and **not** saved in the upload model (see #upload_path)
-# there is no way to build back the correct file path without the model, which defies
-# CarrierWave way of storing files.
-#
 class FileUploader < GitlabUploader
+  include RecordsUploads
   include UploaderHelper
-  include RecordsUploads::Concern
 
   MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}
-  DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)}
 
   storage :file
 
-  def self.root
-    File.join(options.storage_path, 'uploads')
-  end
-
-  def self.absolute_path(upload)
+  def self.absolute_path(upload_record)
     File.join(
-      absolute_base_dir(upload.model),
-      upload.path # already contain the dynamic_segment, see #upload_path
+      self.dynamic_path_segment(upload_record.model),
+      upload_record.path
     )
   end
 
-  def self.base_dir(model)
-    model_path_segment(model)
-  end
-
-  # used in migrations and import/exports
-  def self.absolute_base_dir(model)
-    File.join(root, base_dir(model))
+  # Not using `GitlabUploader.base_dir` because all project namespaces are in
+  # the `public/uploads` dir.
+  #
+  def self.base_dir
+    root_dir
   end
 
   # Returns the part of `store_dir` that can change based on the model's current
@@ -44,96 +29,63 @@ def self.absolute_base_dir(model)
   # model - Object that responds to `full_path` and `disk_path`
   #
   # Returns a String without a trailing slash
-  def self.model_path_segment(model)
+  def self.dynamic_path_segment(model)
     if model.hashed_storage?(:attachments)
-      model.disk_path
+      dynamic_path_builder(model.disk_path)
     else
-      model.full_path
+      dynamic_path_builder(model.full_path)
     end
   end
 
-  def self.upload_path(secret, identifier)
-    File.join(secret, identifier)
-  end
-
-  def self.generate_secret
-    SecureRandom.hex
+  # Auxiliary method to build dynamic path segment when not using a project model
+  #
+  # Prefer to use the `.dynamic_path_segment` as it includes Hashed Storage specific logic
+  def self.dynamic_path_builder(path)
+    File.join(CarrierWave.root, base_dir, path)
   end
 
   attr_accessor :model
+  attr_reader :secret
 
   def initialize(model, secret = nil)
     @model = model
-    @secret = secret
-  end
-
-  def base_dir
-    self.class.base_dir(@model)
+    @secret = secret || generate_secret
   end
 
-  # we don't need to know the actual path, an uploader instance should be
-  # able to yield the file content on demand, so we should build the digest
-  def absolute_path
-    self.class.absolute_path(@upload)
+  def store_dir
+    File.join(dynamic_path_segment, @secret)
   end
 
-  def upload_path
-    self.class.upload_path(dynamic_segment, identifier)
+  def relative_path
+    self.file.path.sub("#{dynamic_path_segment}/", '')
   end
 
-  def model_path_segment
-    self.class.model_path_segment(@model)
+  def to_markdown
+    to_h[:markdown]
   end
 
-  def store_dir
-    File.join(base_dir, dynamic_segment)
-  end
+  def to_h
+    filename = image_or_video? ? self.file.basename : self.file.filename
+    escaped_filename = filename.gsub("]", "\\]")
 
-  def markdown_link
-    markdown = "[#{markdown_name}](#{secure_url})"
+    markdown = "[#{escaped_filename}](#{secure_url})"
     markdown.prepend("!") if image_or_video? || dangerous?
-    markdown
-  end
 
-  def to_h
     {
-      alt:      markdown_name,
+      alt:      filename,
       url:      secure_url,
-      markdown: markdown_link
+      markdown: markdown
     }
   end
 
-  def filename
-    self.file.filename
-  end
-
-  # the upload does not hold the secret, but holds the path
-  # which contains the secret: extract it
-  def upload=(value)
-    if matches = DYNAMIC_PATH_PATTERN.match(value.path)
-      @secret = matches[:secret]
-      @identifier = matches[:identifier]
-    end
-
-    super
-  end
-
-  def secret
-    @secret ||= self.class.generate_secret
-  end
-
   private
 
-  def markdown_name
-    (image_or_video? ? File.basename(filename, File.extname(filename)) : filename).gsub("]", "\\]")
+  def dynamic_path_segment
+    self.class.dynamic_path_segment(model)
   end
 
-  def identifier
-    @identifier ||= filename
-  end
-
-  def dynamic_segment
-    secret
+  def generate_secret
+    SecureRandom.hex
   end
 
   def secure_url
diff --git a/app/uploaders/gitlab_uploader.rb b/app/uploaders/gitlab_uploader.rb
index b12829efe734..7f72b3ce4711 100644
--- a/app/uploaders/gitlab_uploader.rb
+++ b/app/uploaders/gitlab_uploader.rb
@@ -1,31 +1,27 @@
 class GitlabUploader < CarrierWave::Uploader::Base
-  class_attribute :options
-
-  class << self
-    # DSL setter
-    def storage_options(options)
-      self.options = options
-    end
-
-    def root
-      options.storage_path
-    end
+  def self.absolute_path(upload_record)
+    File.join(CarrierWave.root, upload_record.path)
+  end
 
-    # represent the directory namespacing at the class level
-    def base_dir
-      options.fetch('base_dir', '')
-    end
+  def self.root_dir
+    'uploads'
+  end
 
-    def file_storage?
-      storage == CarrierWave::Storage::File
-    end
+  # When object storage is used, keep the `root_dir` as `base_dir`.
+  # The files aren't really in folders there, they just have a name.
+  # The files that contain user input in their name, also contain a hash, so
+  # the names are still unique
+  #
+  # This method is overridden in the `FileUploader`
+  def self.base_dir
+    return root_dir unless file_storage?
 
-    def absolute_path(upload_record)
-      File.join(root, upload_record.path)
-    end
+    File.join(root_dir, '-', 'system')
   end
 
-  storage_options Gitlab.config.uploads
+  def self.file_storage?
+    self.storage == CarrierWave::Storage::File
+  end
 
   delegate :base_dir, :file_storage?, to: :class
 
@@ -35,28 +31,34 @@ def file_cache_storage?
 
   # Reduce disk IO
   def move_to_cache
-    file_storage?
+    true
   end
 
   # Reduce disk IO
   def move_to_store
-    file_storage?
+    true
   end
 
-  def exists?
-    file.present?
-  end
-
-  def store_dir
-    File.join(base_dir, dynamic_segment)
+  # Designed to be overridden by child uploaders that have a dynamic path
+  # segment -- that is, a path that changes based on mutable attributes of its
+  # associated model
+  #
+  # For example, `FileUploader` builds the storage path based on the associated
+  # project model's `path_with_namespace` value, which can change when the
+  # project or its containing namespace is moved or renamed.
+  def relative_path
+    self.file.path.sub("#{root}/", '')
   end
 
-  def cache_dir
-    File.join(root, base_dir, 'tmp/cache')
+  def exists?
+    file.present?
   end
 
+  # Override this if you don't want to save files by default to the Rails.root directory
   def work_dir
-    File.join(root, base_dir, 'tmp/work')
+    # Default path set by CarrierWave:
+    # https://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L182
+    CarrierWave.tmp_path
   end
 
   def filename
@@ -65,13 +67,6 @@ def filename
 
   private
 
-  # Designed to be overridden by child uploaders that have a dynamic path
-  # segment -- that is, a path that changes based on mutable attributes of its
-  # associated model
-  def dynamic_segment
-    raise(NotImplementedError)
-  end
-
   # To prevent files from moving across filesystems, override the default
   # implementation:
   # http://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L181-L183
@@ -79,6 +74,6 @@ def workfile_path(for_file = original_filename)
     # To be safe, keep this directory outside of the the cache directory
     # because calling CarrierWave.clean_cache_files! will remove any files in
     # the cache directory.
-    File.join(work_dir, cache_id, version_name.to_s, for_file)
+    File.join(work_dir, @cache_id, version_name.to_s, for_file)
   end
 end
diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb
index 0abb462ab7d7..15dfb5a5763a 100644
--- a/app/uploaders/job_artifact_uploader.rb
+++ b/app/uploaders/job_artifact_uploader.rb
@@ -1,7 +1,13 @@
 class JobArtifactUploader < GitlabUploader
-  extend Workhorse::UploadPath
+  storage :file
 
-  storage_options Gitlab.config.artifacts
+  def self.local_store_path
+    Gitlab.config.artifacts.path
+  end
+
+  def self.artifacts_upload_path
+    File.join(self.local_store_path, 'tmp/uploads/')
+  end
 
   def size
     return super if model.size.nil?
@@ -10,12 +16,24 @@ def size
   end
 
   def store_dir
-    dynamic_segment
+    default_local_path
+  end
+
+  def cache_dir
+    File.join(self.class.local_store_path, 'tmp/cache')
+  end
+
+  def work_dir
+    File.join(self.class.local_store_path, 'tmp/work')
   end
 
   private
 
-  def dynamic_segment
+  def default_local_path
+    File.join(self.class.local_store_path, default_path)
+  end
+
+  def default_path
     creation_date = model.created_at.utc.strftime('%Y_%m_%d')
 
     File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
diff --git a/app/uploaders/legacy_artifact_uploader.rb b/app/uploaders/legacy_artifact_uploader.rb
index 28c458d3ff17..4f7f8a63108b 100644
--- a/app/uploaders/legacy_artifact_uploader.rb
+++ b/app/uploaders/legacy_artifact_uploader.rb
@@ -1,15 +1,33 @@
 class LegacyArtifactUploader < GitlabUploader
-  extend Workhorse::UploadPath
+  storage :file
 
-  storage_options Gitlab.config.artifacts
+  def self.local_store_path
+    Gitlab.config.artifacts.path
+  end
+
+  def self.artifacts_upload_path
+    File.join(self.local_store_path, 'tmp/uploads/')
+  end
 
   def store_dir
-    dynamic_segment
+    default_local_path
+  end
+
+  def cache_dir
+    File.join(self.class.local_store_path, 'tmp/cache')
+  end
+
+  def work_dir
+    File.join(self.class.local_store_path, 'tmp/work')
   end
 
   private
 
-  def dynamic_segment
+  def default_local_path
+    File.join(self.class.local_store_path, default_path)
+  end
+
+  def default_path
     File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
   end
 end
diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb
index e04c97ce1791..d11ebf0f9ca4 100644
--- a/app/uploaders/lfs_object_uploader.rb
+++ b/app/uploaders/lfs_object_uploader.rb
@@ -1,24 +1,19 @@
 class LfsObjectUploader < GitlabUploader
-  extend Workhorse::UploadPath
+  storage :file
 
-  # LfsObject are in `tmp/upload` instead of `tmp/uploads`
-  def self.workhorse_upload_path
-    File.join(root, 'tmp/upload')
+  def store_dir
+    "#{Gitlab.config.lfs.storage_path}/#{model.oid[0, 2]}/#{model.oid[2, 2]}"
   end
 
-  storage_options Gitlab.config.lfs
+  def cache_dir
+    "#{Gitlab.config.lfs.storage_path}/tmp/cache"
+  end
 
   def filename
     model.oid[4..-1]
   end
 
-  def store_dir
-    dynamic_segment
-  end
-
-  private
-
-  def dynamic_segment
-    File.join(model.oid[0, 2], model.oid[2, 2])
+  def work_dir
+    File.join(Gitlab.config.lfs.storage_path, 'tmp', 'work')
   end
 end
diff --git a/app/uploaders/namespace_file_uploader.rb b/app/uploaders/namespace_file_uploader.rb
index 993e85fbc132..672126e9ec23 100644
--- a/app/uploaders/namespace_file_uploader.rb
+++ b/app/uploaders/namespace_file_uploader.rb
@@ -1,19 +1,15 @@
 class NamespaceFileUploader < FileUploader
-  # Re-Override
-  def self.root
-    options.storage_path
+  def self.base_dir
+    File.join(root_dir, '-', 'system', 'namespace')
   end
 
-  def self.base_dir(model)
-    File.join(options.base_dir, 'namespace', model_path_segment(model))
+  def self.dynamic_path_segment(model)
+    dynamic_path_builder(model.id.to_s)
   end
 
-  def self.model_path_segment(model)
-    File.join(model.id.to_s)
-  end
+  private
 
-  # Re-Override
-  def store_dir
-    File.join(base_dir, dynamic_segment)
+  def secure_url
+    File.join('/uploads', @secret, file.filename)
   end
 end
diff --git a/app/uploaders/personal_file_uploader.rb b/app/uploaders/personal_file_uploader.rb
index e7d9ecd3222b..3298ad104ec9 100644
--- a/app/uploaders/personal_file_uploader.rb
+++ b/app/uploaders/personal_file_uploader.rb
@@ -1,27 +1,23 @@
 class PersonalFileUploader < FileUploader
-  # Re-Override
-  def self.root
-    options.storage_path
+  def self.dynamic_path_segment(model)
+    File.join(CarrierWave.root, model_path(model))
   end
 
-  def self.base_dir(model)
-    File.join(options.base_dir, model_path_segment(model))
-  end
-
-  def self.model_path_segment(model)
-    return 'temp/' unless model
-
-    File.join(model.class.to_s.underscore, model.id.to_s)
-  end
-
-  # Revert-Override
-  def store_dir
-    File.join(base_dir, dynamic_segment)
+  def self.base_dir
+    File.join(root_dir, '-', 'system')
   end
 
   private
 
   def secure_url
-    File.join('/', base_dir, secret, file.filename)
+    File.join(self.class.model_path(model), secret, file.filename)
+  end
+
+  def self.model_path(model)
+    if model
+      File.join("/#{base_dir}", model.class.to_s.underscore, model.id.to_s)
+    else
+      File.join("/#{base_dir}", 'temp')
+    end
   end
 end
diff --git a/app/uploaders/records_uploads.rb b/app/uploaders/records_uploads.rb
index dfb8dccec57d..feb4f04d7b75 100644
--- a/app/uploaders/records_uploads.rb
+++ b/app/uploaders/records_uploads.rb
@@ -1,61 +1,35 @@
 module RecordsUploads
-  module Concern
-    extend ActiveSupport::Concern
+  extend ActiveSupport::Concern
 
-    attr_accessor :upload
-
-    included do
-      after  :store,  :record_upload
-      before :remove, :destroy_upload
-    end
-
-    # After storing an attachment, create a corresponding Upload record
-    #
-    # NOTE: We're ignoring the argument passed to this callback because we want
-    # the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the
-    # `Tempfile` object the callback gets.
-    #
-    # Called `after :store`
-    def record_upload(_tempfile = nil)
-      return unless model
-      return unless file && file.exists?
-
-      Upload.transaction do
-        uploads.where(path: upload_path).delete_all
-        upload.destroy! if upload
-
-        self.upload = build_upload_from_uploader(self)
-        upload.save!
-      end
-    end
-
-    def upload_path
-      File.join(store_dir, filename.to_s)
-    end
-
-    private
+  included do
+    after :store,   :record_upload
+    before :remove, :destroy_upload
+  end
 
-    def uploads
-      Upload.order(id: :desc).where(uploader: self.class.to_s)
-    end
+  # After storing an attachment, create a corresponding Upload record
+  #
+  # NOTE: We're ignoring the argument passed to this callback because we want
+  # the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the
+  # `Tempfile` object the callback gets.
+  #
+  # Called `after :store`
+  def record_upload(_tempfile = nil)
+    return unless model
+    return unless file_storage?
+    return unless file.exists?
+
+    Upload.record(self)
+  end
 
-    def build_upload_from_uploader(uploader)
-      Upload.new(
-        size: uploader.file.size,
-        path: uploader.upload_path,
-        model: uploader.model,
-        uploader: uploader.class.to_s
-      )
-    end
+  private
 
-    # Before removing an attachment, destroy any Upload records at the same path
-    #
-    # Called `before :remove`
-    def destroy_upload(*args)
-      return unless file && file.exists?
+  # Before removing an attachment, destroy any Upload records at the same path
+  #
+  # Called `before :remove`
+  def destroy_upload(*args)
+    return unless file_storage?
+    return unless file
 
-      self.upload = nil
-      uploads.where(path: upload_path).delete_all
-    end
+    Upload.remove_path(relative_path)
   end
 end
diff --git a/app/uploaders/uploader_helper.rb b/app/uploaders/uploader_helper.rb
index fd446d310922..7635c20ab3a5 100644
--- a/app/uploaders/uploader_helper.rb
+++ b/app/uploaders/uploader_helper.rb
@@ -32,7 +32,14 @@ def dangerous?
   def extension_match?(extensions)
     return false unless file
 
-    extension = file.try(:extension) || File.extname(file.path).delete('.')
+    extension =
+      if file.respond_to?(:extension)
+        file.extension
+      else
+        # Not all CarrierWave storages respond to :extension
+        File.extname(file.path).delete('.')
+      end
+
     extensions.include?(extension.downcase)
   end
 end
diff --git a/app/uploaders/workhorse.rb b/app/uploaders/workhorse.rb
deleted file mode 100644
index 782032cf5163..000000000000
--- a/app/uploaders/workhorse.rb
+++ /dev/null
@@ -1,7 +0,0 @@
-module Workhorse
-  module UploadPath
-    def workhorse_upload_path
-      File.join(root, base_dir, 'tmp/uploads')
-    end
-  end
-end
diff --git a/app/workers/upload_checksum_worker.rb b/app/workers/upload_checksum_worker.rb
index 65d40336f18b..9222760c0316 100644
--- a/app/workers/upload_checksum_worker.rb
+++ b/app/workers/upload_checksum_worker.rb
@@ -3,7 +3,7 @@ class UploadChecksumWorker
 
   def perform(upload_id)
     upload = Upload.find(upload_id)
-    upload.calculate_checksum!
+    upload.calculate_checksum
     upload.save!
   rescue ActiveRecord::RecordNotFound
     Rails.logger.error("UploadChecksumWorker: couldn't find upload #{upload_id}, skipping")
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index 33230b9355dc..25f4085deb29 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -152,12 +152,6 @@ production: &base
     # The location where LFS objects are stored (default: shared/lfs-objects).
     # storage_path: shared/lfs-objects
 
-  ## Uploads (attachments, avatars, etc...)
-  uploads:
-    # The location where uploads objects are stored (default: public/).
-    # storage_path: public/
-    # base_dir: uploads/-/system
-
   ## GitLab Pages
   pages:
     enabled: false
@@ -650,8 +644,6 @@ test:
     enabled: false
   artifacts:
     path: tmp/tests/artifacts
-  uploads:
-    storage_path: tmp/tests/public
   gitlab:
     host: localhost
     port: 80
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 5ad46d47cb63..5b4e6b5db882 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -300,10 +300,8 @@ def cron_for_usage_ping
 #
 Settings['artifacts'] ||= Settingslogic.new({})
 Settings.artifacts['enabled']      = true if Settings.artifacts['enabled'].nil?
-Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts"))
-# Settings.artifact['path'] is deprecated, use `storage_path` instead
-Settings.artifacts['path']         = Settings.artifacts['storage_path']
-Settings.artifacts['max_size'] ||= 100 # in megabytes
+Settings.artifacts['path']         = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
+Settings.artifacts['max_size']   ||= 100 # in megabytes
 
 #
 # Registry
@@ -340,13 +338,6 @@ def cron_for_usage_ping
 Settings.lfs['enabled']      = true if Settings.lfs['enabled'].nil?
 Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
 
-#
-# Uploads
-#
-Settings['uploads'] ||= Settingslogic.new({})
-Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
-Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
-
 #
 # Mattermost
 #
diff --git a/db/migrate/20180119135717_add_uploader_index_to_uploads.rb b/db/migrate/20180119135717_add_uploader_index_to_uploads.rb
deleted file mode 100644
index a678c3d049f6..000000000000
--- a/db/migrate/20180119135717_add_uploader_index_to_uploads.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
-# for more information on how to write migrations for GitLab.
-
-class AddUploaderIndexToUploads < ActiveRecord::Migration
-  include Gitlab::Database::MigrationHelpers
-
-  DOWNTIME = false
-
-  disable_ddl_transaction!
-
-  def up
-    remove_concurrent_index :uploads, :path
-    add_concurrent_index    :uploads, [:uploader, :path], using: :btree
-  end
-
-  def down
-    remove_concurrent_index :uploads, [:uploader, :path]
-    add_concurrent_index    :uploads, :path, using: :btree
-  end
-end
diff --git a/db/schema.rb b/db/schema.rb
index 01a2df13dd30..0d97b6f9ddd2 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -1755,7 +1755,7 @@
 
   add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree
   add_index "uploads", ["model_id", "model_type"], name: "index_uploads_on_model_id_and_model_type", using: :btree
-  add_index "uploads", ["uploader", "path"], name: "index_uploads_on_uploader_and_path", using: :btree
+  add_index "uploads", ["path"], name: "index_uploads_on_path", using: :btree
 
   create_table "user_agent_details", force: :cascade do |t|
     t.string "user_agent", null: false
diff --git a/doc/development/file_storage.md b/doc/development/file_storage.md
index 76354b928203..cf00e24e11a8 100644
--- a/doc/development/file_storage.md
+++ b/doc/development/file_storage.md
@@ -14,8 +14,8 @@ There are many places where file uploading is used, according to contexts:
   - User snippet attachments
 * Project
   - Project avatars
-  - Issues/MR/Notes Markdown attachments
-  - Issues/MR/Notes Legacy Markdown attachments
+  - Issues/MR Markdown attachments
+  - Issues/MR Legacy Markdown attachments
   - CI Build Artifacts
   - LFS Objects
 
@@ -25,7 +25,7 @@ There are many places where file uploading is used, according to contexts:
 GitLab started saving everything on local disk. While directory location changed from previous versions,
 they are still not 100% standardized. You can see them below:
 
-| Description                           | In DB? | Relative path (from CarrierWave.root)                       | Uploader class         | model_type |
+| Description                           | In DB? | Relative path                                               | Uploader class         | model_type |
 | ------------------------------------- | ------ | ----------------------------------------------------------- | ---------------------- | ---------- |
 | Instance logo                         | yes    | uploads/-/system/appearance/logo/:id/:filename              | `AttachmentUploader`   | Appearance |
 | Header logo                           | yes    | uploads/-/system/appearance/header_logo/:id/:filename       | `AttachmentUploader`   | Appearance |
@@ -33,107 +33,17 @@ they are still not 100% standardized. You can see them below:
 | User avatars                          | yes    | uploads/-/system/user/avatar/:id/:filename                  | `AvatarUploader`       | User       |
 | User snippet attachments              | yes    | uploads/-/system/personal_snippet/:id/:random_hex/:filename | `PersonalFileUploader` | Snippet    |
 | Project avatars                       | yes    | uploads/-/system/project/avatar/:id/:filename               | `AvatarUploader`       | Project    |
-| Issues/MR/Notes Markdown attachments        | yes    | uploads/:project_path_with_namespace/:random_hex/:filename  | `FileUploader`         | Project    |
-| Issues/MR/Notes Legacy Markdown attachments | no     | uploads/-/system/note/attachment/:id/:filename              | `AttachmentUploader`   | Note       |
+| Issues/MR Markdown attachments        | yes    | uploads/:project_path_with_namespace/:random_hex/:filename  | `FileUploader`         | Project    |
+| Issues/MR Legacy Markdown attachments | no     | uploads/-/system/note/attachment/:id/:filename              | `AttachmentUploader`   | Note       |
 | CI Artifacts (CE)                     | yes    | shared/artifacts/:year_:month/:project_id/:id               | `ArtifactUploader`     | Ci::Build  |
 | LFS Objects  (CE)                     | yes    | shared/lfs-objects/:hex/:hex/:object_hash                   | `LfsObjectUploader`    | LfsObject  |
 
 CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader`
-while in EE they inherit the `ObjectStorage` and store files in and S3 API compatible object store.
+while in EE they inherit the `ObjectStoreUploader` and store files in and S3 API compatible object store.
 
-In the case of Issues/MR/Notes Markdown attachments, there is a different approach using the [Hashed Storage] layout,
+In the case of Issues/MR Markdown attachments, there is a different approach using the [Hashed Storage] layout,
 instead of basing the path into a mutable variable `:project_path_with_namespace`, it's possible to use the
 hash of the project ID instead, if project migrates to the new approach (introduced in 10.2).
 
-### Path segments
-
-Files are stored at multiple locations and use different path schemes. 
-All the `GitlabUploader` derived classes should comply with this path segment schema:
-
-```
-|   GitlabUploader
-| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
-| `<gitlab_root>/public/` | `uploads/-/system/`       | `user/avatar/:id/`                | `:filename`                      |
-| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
-| `CarrierWave.root`      | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment`  | `CarrierWave::Uploader#filename` |
-|                         | `CarrierWave::Uploader#store_dir`                             |                                  | 
-
-|   FileUploader
-| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
-| `<gitlab_root>/shared/` | `artifacts/`              | `:year_:month/:id`                | `:filename`                      |
-| `<gitlab_root>/shared/` | `snippets/`               | `:secret/`                        | `:filename`                      |
-| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
-| `CarrierWave.root`      | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment`  | `CarrierWave::Uploader#filename` |
-|                         | `CarrierWave::Uploader#store_dir`                             |                                  | 
-|                         |                           | `FileUploader#upload_path                                            |
-
-|   ObjectStore::Concern (store = remote)
-| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
-| `<bucket_name>`         | <ignored>                 | `user/avatar/:id/`                  | `:filename`                      |
-| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
-| `#fog_dir`              | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment`    | `CarrierWave::Uploader#filename` |
-|                         |                           | `ObjectStorage::Concern#store_dir`  |                                  | 
-|                         |                           | `ObjectStorage::Concern#upload_path                                    |
-```
-
-The `RecordsUploads::Concern` concern will create an `Upload` entry for every file stored by a `GitlabUploader` persisting the dynamic parts of the path using
-`GitlabUploader#dynamic_path`. You may then use the `Upload#build_uploader` method to manipulate the file.
-
-## Object Storage
-
-By including the `ObjectStorage::Concern` in the `GitlabUploader` derived class, you may enable the object storage for this uploader. To enable the object storage
-in your uploader, you need to either 1) include `RecordsUpload::Concern` and prepend `ObjectStorage::Extension::RecordsUploads` or 2) mount the uploader and create a new field named `<mount>_store`.
-
-The `CarrierWave::Uploader#store_dir` is overriden to
-
- - `GitlabUploader.base_dir` + `GitlabUploader.dynamic_segment` when the store is LOCAL
- - `GitlabUploader.dynamic_segment` when the store is REMOTE (the bucket name is used to namespace)
-
-### Using `ObjectStorage::Extension::RecordsUploads`
-
-> Note: this concern will automatically include `RecordsUploads::Concern` if not already included.
-
-The `ObjectStorage::Concern` uploader will search for the matching `Upload` to select the correct object store. The `Upload` is mapped using `#store_dirs + identifier` for each store (LOCAL/REMOTE).
-
-```ruby
-class SongUploader < GitlabUploader
-  include RecordsUploads::Concern
-  include ObjectStorage::Concern
-  prepend ObjectStorage::Extension::RecordsUploads
-
-  ...
-end
-
-class Thing < ActiveRecord::Base
-  mount :theme, SongUploader # we have a great theme song!
-
-  ...
-end
-```
-
-### Using a mounted uploader
-
-The `ObjectStorage::Concern` will query the `model.<mount>_store` attribute to select the correct object store.
-This column must be present in the model schema.
-
-```ruby
-class SongUploader < GitlabUploader
-  include ObjectStorage::Concern
-
-  ...
-end
-
-class Thing < ActiveRecord::Base
-  attr_reader :theme_store # this is an ActiveRecord attribute
-  mount :theme, SongUploader # we have a great theme song!
-
-  def theme_store
-    super || ObjectStorage::Store::LOCAL
-  end
-
-  ...
-end
-```
-
 [CarrierWave]: https://github.com/carrierwaveuploader/carrierwave
 [Hashed Storage]: ../administration/repository_storage_types.md
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index 1f80646a2ea5..80feb629d54c 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -215,9 +215,9 @@ class Runner < Grape::API
         job = authenticate_job!
         forbidden!('Job is not running!') unless job.running?
 
-        workhorse_upload_path = JobArtifactUploader.workhorse_upload_path
-        artifacts = uploaded_file(:file, workhorse_upload_path)
-        metadata = uploaded_file(:metadata, workhorse_upload_path)
+        artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
+        artifacts = uploaded_file(:file, artifacts_upload_path)
+        metadata = uploaded_file(:metadata, artifacts_upload_path)
 
         bad_request!('Missing artifacts file!') unless artifacts
         file_to_large! unless artifacts.size < max_artifacts_size
diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb
index 4383124d150b..7a582a200561 100644
--- a/lib/backup/artifacts.rb
+++ b/lib/backup/artifacts.rb
@@ -3,7 +3,7 @@
 module Backup
   class Artifacts < Files
     def initialize
-      super('artifacts', JobArtifactUploader.root)
+      super('artifacts', LegacyArtifactUploader.local_store_path)
     end
 
     def create_files_dir
diff --git a/lib/gitlab/background_migration/populate_untracked_uploads.rb b/lib/gitlab/background_migration/populate_untracked_uploads.rb
index 8a8e770940e9..d60e41d9f9de 100644
--- a/lib/gitlab/background_migration/populate_untracked_uploads.rb
+++ b/lib/gitlab/background_migration/populate_untracked_uploads.rb
@@ -143,7 +143,7 @@ def path_relative_to_upload_dir
         end
 
         def absolute_path
-          File.join(Gitlab.config.uploads.storage_path, path)
+          File.join(CarrierWave.root, path)
         end
       end
 
diff --git a/lib/gitlab/background_migration/prepare_untracked_uploads.rb b/lib/gitlab/background_migration/prepare_untracked_uploads.rb
index a7a1bbe17525..4e0121ca34d5 100644
--- a/lib/gitlab/background_migration/prepare_untracked_uploads.rb
+++ b/lib/gitlab/background_migration/prepare_untracked_uploads.rb
@@ -11,12 +11,9 @@ class PrepareUntrackedUploads # rubocop:disable Metrics/ClassLength
 
       FIND_BATCH_SIZE = 500
       RELATIVE_UPLOAD_DIR = "uploads".freeze
-      ABSOLUTE_UPLOAD_DIR = File.join(
-        Gitlab.config.uploads.storage_path,
-        RELATIVE_UPLOAD_DIR
-      )
+      ABSOLUTE_UPLOAD_DIR = "#{CarrierWave.root}/#{RELATIVE_UPLOAD_DIR}".freeze
       FOLLOW_UP_MIGRATION = 'PopulateUntrackedUploads'.freeze
-      START_WITH_ROOT_REGEX = %r{\A#{Gitlab.config.uploads.storage_path}/}
+      START_WITH_CARRIERWAVE_ROOT_REGEX = %r{\A#{CarrierWave.root}/}
       EXCLUDED_HASHED_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/@hashed/*".freeze
       EXCLUDED_TMP_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/tmp/*".freeze
 
@@ -84,7 +81,7 @@ def yield_paths_in_batches(stdout, batch_size, &block)
         paths = []
 
         stdout.each_line("\0") do |line|
-          paths << line.chomp("\0").sub(START_WITH_ROOT_REGEX, '')
+          paths << line.chomp("\0").sub(START_WITH_CARRIERWAVE_ROOT_REGEX, '')
 
           if paths.size >= batch_size
             yield(paths)
diff --git a/lib/gitlab/gfm/uploads_rewriter.rb b/lib/gitlab/gfm/uploads_rewriter.rb
index 3fdc3c27f739..8fab54896160 100644
--- a/lib/gitlab/gfm/uploads_rewriter.rb
+++ b/lib/gitlab/gfm/uploads_rewriter.rb
@@ -27,7 +27,7 @@ def rewrite(target_project)
           with_link_in_tmp_dir(file.file) do |open_tmp_file|
             new_uploader.store!(open_tmp_file)
           end
-          new_uploader.markdown_link
+          new_uploader.to_markdown
         end
       end
 
diff --git a/lib/gitlab/import_export/uploads_saver.rb b/lib/gitlab/import_export/uploads_saver.rb
index 2f08dda55fdc..627a487d5779 100644
--- a/lib/gitlab/import_export/uploads_saver.rb
+++ b/lib/gitlab/import_export/uploads_saver.rb
@@ -17,13 +17,15 @@ def save
         false
       end
 
-      def uploads_path
-        FileUploader.absolute_base_dir(@project)
-      end
+      private
 
       def uploads_export_path
         File.join(@shared.export_path, 'uploads')
       end
+
+      def uploads_path
+        FileUploader.dynamic_path_segment(@project)
+      end
     end
   end
 end
diff --git a/lib/gitlab/uploads_transfer.rb b/lib/gitlab/uploads_transfer.rb
index 7d7400bdabf8..b5f41240529c 100644
--- a/lib/gitlab/uploads_transfer.rb
+++ b/lib/gitlab/uploads_transfer.rb
@@ -1,7 +1,7 @@
 module Gitlab
   class UploadsTransfer < ProjectTransfer
     def root_dir
-      FileUploader.root
+      File.join(CarrierWave.root, FileUploader.base_dir)
     end
   end
 end
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index b3f8b0d174dc..633da44b22d2 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -55,14 +55,14 @@ def git_http_ok(repository, is_wiki, user, action, show_all_refs: false)
 
       def lfs_upload_ok(oid, size)
         {
-          StoreLFSPath: LfsObjectUploader.workhorse_upload_path,
+          StoreLFSPath: "#{Gitlab.config.lfs.storage_path}/tmp/upload",
           LfsOid: oid,
           LfsSize: size
         }
       end
 
       def artifact_upload_ok
-        { TempPath: JobArtifactUploader.workhorse_upload_path }
+        { TempPath: JobArtifactUploader.artifacts_upload_path }
       end
 
       def send_git_blob(repository, blob)
@@ -147,11 +147,8 @@ def send_git_patch(repository, diff_refs)
       end
 
       def send_artifacts_entry(build, entry)
-        file = build.artifacts_file
-        archive = file.file_storage? ? file.path : file.url
-
         params = {
-          'Archive' => archive,
+          'Archive' => build.artifacts_file.path,
           'Entry' => Base64.encode64(entry.to_s)
         }
 
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
index 6a1869d1a48b..67a11e56e947 100644
--- a/spec/controllers/groups/uploads_controller_spec.rb
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -6,7 +6,5 @@
     { group_id: model }
   end
 
-  it_behaves_like 'handle uploads' do
-    let(:uploader_class) { NamespaceFileUploader }
-  end
+  it_behaves_like 'handle uploads'
 end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 25a2e13fe1a6..12cb7b2647ff 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -145,7 +145,8 @@ def params
       context 'when using local file storage' do
         it_behaves_like 'a valid file' do
           let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
-          let(:archive_path) { JobArtifactUploader.root }
+          let(:store) { ObjectStoreUploader::LOCAL_STORE }
+          let(:archive_path) { JobArtifactUploader.local_store_path }
         end
       end
     end
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index b7df42168e00..3a0c3faa7b4d 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -53,7 +53,7 @@
           end
 
           it 'serves the file' do
-            expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
+            expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
             get(:show,
                 namespace_id: public_project.namespace.to_param,
                 project_id: public_project,
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index 376b229ffc98..b1f601a19e59 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -180,7 +180,6 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png'
-
               response
             end
           end
@@ -197,7 +196,6 @@
         it_behaves_like 'content not cached without revalidation' do
           subject do
             get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png'
-
             response
           end
         end
@@ -222,7 +220,6 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
-
               response
             end
           end
@@ -242,7 +239,6 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
-
               response
             end
           end
@@ -295,7 +291,6 @@
               it_behaves_like 'content not cached without revalidation' do
                 subject do
                   get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
-
                   response
                 end
               end
@@ -327,7 +322,6 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
-
               response
             end
           end
@@ -347,7 +341,6 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
-
               response
             end
           end
@@ -391,7 +384,6 @@
               it_behaves_like 'content not cached without revalidation' do
                 subject do
                   get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
-
                   response
                 end
               end
@@ -428,7 +420,6 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
-
               response
             end
           end
@@ -448,7 +439,6 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
-
               response
             end
           end
@@ -501,7 +491,6 @@
               it_behaves_like 'content not cached without revalidation' do
                 subject do
                   get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
-
                   response
                 end
               end
@@ -533,7 +522,6 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png'
-
               response
             end
           end
@@ -553,7 +541,6 @@
           it_behaves_like 'content not cached without revalidation' do
             subject do
               get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png'
-
               response
             end
           end
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index 8c531cf59096..1512f5a0e58e 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -18,7 +18,7 @@
     end
 
     trait :with_avatar do
-      avatar { fixture_file_upload('spec/fixtures/dk.png') }
+      avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
     end
 
     trait :access_requestable do
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index 3f4e408b3a63..2defb4935ad0 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -122,11 +122,11 @@
     end
 
     trait :with_attachment do
-      attachment { fixture_file_upload(Rails.root.join( "spec/fixtures/dk.png"), "image/png") }
+      attachment { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png") }
     end
 
     trait :with_svg_attachment do
-      attachment { fixture_file_upload(Rails.root.join("spec/fixtures/unsanitized.svg"), "image/svg+xml") }
+      attachment { fixture_file_upload(Rails.root + "spec/fixtures/unsanitized.svg", "image/svg+xml") }
     end
 
     transient do
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 16d328a5bc27..d0f3911f7301 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -90,7 +90,7 @@
     end
 
     trait :with_avatar do
-      avatar { fixture_file_upload('spec/fixtures/dk.png') }
+      avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
     end
 
     trait :broken_storage do
diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb
index c8cfe251d270..c39500faea1a 100644
--- a/spec/factories/uploads.rb
+++ b/spec/factories/uploads.rb
@@ -1,42 +1,24 @@
 FactoryBot.define do
   factory :upload do
     model { build(:project) }
+    path { "uploads/-/system/project/avatar/avatar.jpg" }
     size 100.kilobytes
     uploader "AvatarUploader"
 
-    # we should build a mount agnostic upload by default
-    transient do
-      mounted_as :avatar
-      secret SecureRandom.hex
-    end
-
-    # this needs to comply with RecordsUpload::Concern#upload_path
-    path { File.join("uploads/-/system", model.class.to_s.underscore, mounted_as.to_s, 'avatar.jpg') }
-
-    trait :personal_snippet_upload do
+    trait :personal_snippet do
       model { build(:personal_snippet) }
-      path { File.join(secret, 'myfile.jpg') }
       uploader "PersonalFileUploader"
     end
 
     trait :issuable_upload do
-      path { File.join(secret, 'myfile.jpg') }
+      path { "#{SecureRandom.hex}/myfile.jpg" }
       uploader "FileUploader"
     end
 
     trait :namespace_upload do
+      path { "#{SecureRandom.hex}/myfile.jpg" }
       model { build(:group) }
-      path { File.join(secret, 'myfile.jpg') }
       uploader "NamespaceFileUploader"
     end
-
-    trait :attachment_upload do
-      transient do
-        mounted_as :attachment
-      end
-
-      model { build(:note) }
-      uploader "AttachmentUploader"
-    end
   end
 end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 769fd656e7a4..e62e0b263ca0 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -38,7 +38,7 @@
     end
 
     trait :with_avatar do
-      avatar { fixture_file_upload('spec/fixtures/dk.png') }
+      avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
     end
 
     trait :two_factor_via_otp do
diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
index 370c2490b97a..8bb9ebe0419b 100644
--- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
+++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
@@ -23,27 +23,6 @@
     end
   end
 
-  # E.g. The installation is in use at the time of migration, and someone has
-  # just uploaded a file
-  shared_examples 'does not add files in /uploads/tmp' do
-    let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
-
-    before do
-      FileUtils.mkdir(File.dirname(tmp_file))
-      FileUtils.touch(tmp_file)
-    end
-
-    after do
-      FileUtils.rm(tmp_file)
-    end
-
-    it 'does not add files from /uploads/tmp' do
-      described_class.new.perform
-
-      expect(untracked_files_for_uploads.count).to eq(5)
-    end
-  end
-
   it 'ensures the untracked_files_for_uploads table exists' do
     expect do
       described_class.new.perform
@@ -130,8 +109,24 @@
         end
       end
 
+      # E.g. The installation is in use at the time of migration, and someone has
+      # just uploaded a file
       context 'when there are files in /uploads/tmp' do
-        it_behaves_like 'does not add files in /uploads/tmp'
+        let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
+
+        before do
+          FileUtils.touch(tmp_file)
+        end
+
+        after do
+          FileUtils.rm(tmp_file)
+        end
+
+        it 'does not add files from /uploads/tmp' do
+          described_class.new.perform
+
+          expect(untracked_files_for_uploads.count).to eq(5)
+        end
       end
     end
   end
@@ -202,8 +197,24 @@
         end
       end
 
+      # E.g. The installation is in use at the time of migration, and someone has
+      # just uploaded a file
       context 'when there are files in /uploads/tmp' do
-        it_behaves_like 'does not add files in /uploads/tmp'
+        let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
+
+        before do
+          FileUtils.touch(tmp_file)
+        end
+
+        after do
+          FileUtils.rm(tmp_file)
+        end
+
+        it 'does not add files from /uploads/tmp' do
+          described_class.new.perform
+
+          expect(untracked_files_for_uploads.count).to eq(5)
+        end
       end
     end
   end
diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
index 326ed2f2ecfa..39e3b875c49f 100644
--- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
@@ -17,7 +17,7 @@
     end
 
     let(:text) do
-      "Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}"
+      "Text and #{image_uploader.to_markdown} and #{zip_uploader.to_markdown}"
     end
 
     describe '#rewrite' do
diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
index a685521cbf09..63992ea8ab8f 100644
--- a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
@@ -4,6 +4,7 @@
   describe 'bundle a project Git repo' do
     let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
     let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
+    let(:uploads_path) { FileUploader.dynamic_path_segment(project) }
 
     before do
       allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
@@ -25,9 +26,9 @@
       end
 
       it 'copies the uploads to the project path' do
-        subject.restore
+        restorer.restore
 
-        uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
+        uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
 
         expect(uploads).to include('dummy.txt')
       end
@@ -43,9 +44,9 @@
       end
 
       it 'copies the uploads to the project path' do
-        subject.restore
+        restorer.restore
 
-        uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
+        uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
 
         expect(uploads).to include('dummy.txt')
       end
diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
index 959779523f49..e8948de1f3af 100644
--- a/spec/lib/gitlab/import_export/uploads_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
@@ -30,7 +30,7 @@
       it 'copies the uploads to the export path' do
         saver.save
 
-        uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
+        uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
 
         expect(uploads).to include('banana_sample.gif')
       end
@@ -52,7 +52,7 @@
       it 'copies the uploads to the export path' do
         saver.save
 
-        uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
+        uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
 
         expect(uploads).to include('banana_sample.gif')
       end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 6b7dbad128ce..c3673a0e2a36 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -204,7 +204,7 @@
       let(:parent) { create(:group, name: 'parent', path: 'parent') }
       let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
       let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) }
-      let(:uploads_dir) { FileUploader.root }
+      let(:uploads_dir) { File.join(CarrierWave.root, FileUploader.base_dir) }
       let(:pages_dir) { File.join(TestEnv.pages_path) }
 
       before do
diff --git a/spec/models/upload_spec.rb b/spec/models/upload_spec.rb
index 42f3d6097702..345382ea8c7d 100644
--- a/spec/models/upload_spec.rb
+++ b/spec/models/upload_spec.rb
@@ -45,6 +45,51 @@
     end
   end
 
+  describe '.remove_path' do
+    it 'removes all records at the given path' do
+      described_class.create!(
+        size: File.size(__FILE__),
+        path: __FILE__,
+        model: build_stubbed(:user),
+        uploader: 'AvatarUploader'
+      )
+
+      expect { described_class.remove_path(__FILE__) }
+        .to change { described_class.count }.from(1).to(0)
+    end
+  end
+
+  describe '.record' do
+    let(:fake_uploader) do
+      double(
+        file: double(size: 12_345),
+        relative_path: 'foo/bar.jpg',
+        model: build_stubbed(:user),
+        class: 'AvatarUploader'
+      )
+    end
+
+    it 'removes existing paths before creation' do
+      expect(described_class).to receive(:remove_path)
+        .with(fake_uploader.relative_path)
+
+      described_class.record(fake_uploader)
+    end
+
+    it 'creates a new record and assigns size, path, model, and uploader' do
+      upload = described_class.record(fake_uploader)
+
+      aggregate_failures do
+        expect(upload).to be_persisted
+        expect(upload.size).to eq fake_uploader.file.size
+        expect(upload.path).to eq fake_uploader.relative_path
+        expect(upload.model_id).to eq fake_uploader.model.id
+        expect(upload.model_type).to eq fake_uploader.model.class.to_s
+        expect(upload.uploader).to eq fake_uploader.class
+      end
+    end
+  end
+
   describe '#absolute_path' do
     it 'returns the path directly when already absolute' do
       path = '/path/to/namespace/project/secret/file.jpg'
@@ -66,27 +111,27 @@
     end
   end
 
-  describe '#calculate_checksum!' do
-    let(:upload) do
-      described_class.new(path: __FILE__,
-                          size: described_class::CHECKSUM_THRESHOLD - 1.megabyte)
-    end
-
-    it 'sets `checksum` to SHA256 sum of the file' do
+  describe '#calculate_checksum' do
+    it 'calculates the SHA256 sum' do
+      upload = described_class.new(
+        path: __FILE__,
+        size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
+      )
       expected = Digest::SHA256.file(__FILE__).hexdigest
 
-      expect { upload.calculate_checksum! }
+      expect { upload.calculate_checksum }
         .to change { upload.checksum }.from(nil).to(expected)
     end
 
-    it 'sets `checksum` to nil for a non-existant file' do
-      expect(upload).to receive(:exist?).and_return(false)
+    it 'returns nil for a non-existant file' do
+      upload = described_class.new(
+        path: __FILE__,
+        size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
+      )
 
-      checksum = Digest::SHA256.file(__FILE__).hexdigest
-      upload.checksum = checksum
+      expect(upload).to receive(:exist?).and_return(false)
 
-      expect { upload.calculate_checksum! }
-        .to change { upload.checksum }.from(checksum).to(nil)
+      expect(upload.calculate_checksum).to be_nil
     end
   end
 
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index c5c0b0c28675..cb66d23b77cf 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -945,7 +945,7 @@ def authorize_artifacts_with_token_in_headers(params = {}, request_headers = hea
         context 'when artifacts are being stored inside of tmp path' do
           before do
             # by configuring this path we allow to pass temp file from any path
-            allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
+            allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
           end
 
           context 'when job has been erased' do
@@ -1122,7 +1122,7 @@ def authorize_artifacts_with_token_in_headers(params = {}, request_headers = hea
             # by configuring this path we allow to pass file from @tmpdir only
             # but all temporary files are stored in system tmp directory
             @tmpdir = Dir.mktmpdir
-            allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir)
+            allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
           end
 
           after do
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 930ef49b7f32..bee918a20aa6 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -958,7 +958,7 @@
             end
 
             it 'responds with status 200, location of lfs store and object details' do
-              expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
+              expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
               expect(json_response['LfsOid']).to eq(sample_oid)
               expect(json_response['LfsSize']).to eq(sample_size)
             end
@@ -1075,7 +1075,7 @@
             end
 
             it 'with location of lfs store and object details' do
-              expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
+              expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
               expect(json_response['LfsOid']).to eq(sample_oid)
               expect(json_response['LfsSize']).to eq(sample_size)
             end
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 322c91065e71..388c9d63c7b9 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -6,7 +6,7 @@
   let(:title) { 'Some issue' }
   let(:description) { 'Some issue description' }
   let(:old_project) { create(:project) }
-  let(:new_project) { create(:project, group: create(:group)) }
+  let(:new_project) { create(:project) }
   let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') }
 
   let(:old_issue) do
@@ -250,7 +250,7 @@
 
         context 'issue description with uploads' do
           let(:uploader) { build(:file_uploader, project: old_project) }
-          let(:description) { "Text and #{uploader.markdown_link}" }
+          let(:description) { "Text and #{uploader.to_markdown}" }
 
           include_context 'issue move executed'
 
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
index 15699574b3a9..50e59954f73b 100644
--- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -6,7 +6,7 @@
   let(:legacy_storage) { Storage::LegacyProject.new(project) }
   let(:hashed_storage) { Storage::HashedProject.new(project) }
 
-  let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
+  let!(:upload) { Upload.find_by(path: file_uploader.relative_path) }
   let(:file_uploader) { build(:file_uploader, project: project) }
   let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
   let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
@@ -58,6 +58,6 @@
   end
 
   def base_path(storage)
-    File.join(FileUploader.root, storage.disk_path)
+    FileUploader.dynamic_path_builder(storage.disk_path)
   end
 end
diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
index 7ce80c82439c..935c08221e0c 100644
--- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
@@ -2,8 +2,6 @@
   let(:user)  { create(:user) }
   let(:jpg)   { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
   let(:txt)   { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
-  let(:secret) { FileUploader.generate_secret }
-  let(:uploader_class) { FileUploader }
 
   describe "POST #create" do
     context 'when a user is not authorized to upload a file' do
@@ -67,12 +65,7 @@
 
   describe "GET #show" do
     let(:show_upload) do
-      get :show, params.merge(secret: secret, filename: "rails_sample.jpg")
-    end
-
-    before do
-      expect(FileUploader).to receive(:generate_secret).and_return(secret)
-      UploadService.new(model, jpg, uploader_class).execute
+      get :show, params.merge(secret: "123456", filename: "image.jpg")
     end
 
     context "when the model is public" do
@@ -82,6 +75,11 @@
 
       context "when not signed in" do
         context "when the file exists" do
+          before do
+            allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+            allow(jpg).to receive(:exists?).and_return(true)
+          end
+
           it "responds with status 200" do
             show_upload
 
@@ -90,10 +88,6 @@
         end
 
         context "when the file doesn't exist" do
-          before do
-            allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
-          end
-
           it "responds with status 404" do
             show_upload
 
@@ -108,6 +102,11 @@
         end
 
         context "when the file exists" do
+          before do
+            allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+            allow(jpg).to receive(:exists?).and_return(true)
+          end
+
           it "responds with status 200" do
             show_upload
 
@@ -116,10 +115,6 @@
         end
 
         context "when the file doesn't exist" do
-          before do
-            allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
-          end
-
           it "responds with status 404" do
             show_upload
 
@@ -136,6 +131,11 @@
 
       context "when not signed in" do
         context "when the file exists" do
+          before do
+            allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+            allow(jpg).to receive(:exists?).and_return(true)
+          end
+
           context "when the file is an image" do
             before do
               allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
@@ -149,10 +149,6 @@
           end
 
           context "when the file is not an image" do
-            before do
-              allow_any_instance_of(FileUploader).to receive(:image?).and_return(false)
-            end
-
             it "redirects to the sign in page" do
               show_upload
 
@@ -162,10 +158,6 @@
         end
 
         context "when the file doesn't exist" do
-          before do
-            allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
-          end
-
           it "redirects to the sign in page" do
             show_upload
 
@@ -185,6 +177,11 @@
           end
 
           context "when the file exists" do
+            before do
+              allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+              allow(jpg).to receive(:exists?).and_return(true)
+            end
+
             it "responds with status 200" do
               show_upload
 
@@ -193,10 +190,6 @@
           end
 
           context "when the file doesn't exist" do
-            before do
-              allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
-            end
-
             it "responds with status 404" do
               show_upload
 
@@ -207,6 +200,11 @@
 
         context "when the user doesn't have access to the model" do
           context "when the file exists" do
+            before do
+              allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+              allow(jpg).to receive(:exists?).and_return(true)
+            end
+
             context "when the file is an image" do
               before do
                 allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
@@ -220,10 +218,6 @@
             end
 
             context "when the file is not an image" do
-              before do
-                allow_any_instance_of(FileUploader).to receive(:image?).and_return(false)
-              end
-
               it "responds with status 404" do
                 show_upload
 
@@ -233,10 +227,6 @@
           end
 
           context "when the file doesn't exist" do
-            before do
-              allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
-            end
-
             it "responds with status 404" do
               show_upload
 
diff --git a/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb
deleted file mode 100644
index 934d53e7bbac..000000000000
--- a/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-shared_examples "matches the method pattern" do |method|
-  let(:target) { subject }
-  let(:args) { nil }
-  let(:pattern) { patterns[method] }
-
-  it do
-    return skip "No pattern provided, skipping." unless pattern
-
-    expect(target.method(method).call(*args)).to match(pattern)
-  end
-end
-
-shared_examples "builds correct paths" do |**patterns|
-  let(:patterns) { patterns }
-
-  before do
-    allow(subject).to receive(:filename).and_return('<filename>')
-  end
-
-  describe "#store_dir" do
-    it_behaves_like "matches the method pattern", :store_dir
-  end
-
-  describe "#cache_dir" do
-    it_behaves_like "matches the method pattern", :cache_dir
-  end
-
-  describe "#work_dir" do
-    it_behaves_like "matches the method pattern", :work_dir
-  end
-
-  describe "#upload_path" do
-    it_behaves_like "matches the method pattern", :upload_path
-  end
-
-  describe ".absolute_path" do
-    it_behaves_like "matches the method pattern", :absolute_path do
-      let(:target) { subject.class }
-      let(:args) { [upload] }
-    end
-  end
-
-  describe ".base_dir" do
-    it_behaves_like "matches the method pattern", :base_dir do
-      let(:target) { subject.class }
-    end
-  end
-end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index c275522159c7..9e5f08fbc516 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -237,7 +237,7 @@ def pages_path
   end
 
   def artifacts_path
-    Gitlab.config.artifacts.storage_path
+    Gitlab.config.artifacts.path
   end
 
   # When no cached assets exist, manually hit the root path to create them
diff --git a/spec/support/track_untracked_uploads_helpers.rb b/spec/support/track_untracked_uploads_helpers.rb
index 5752078d2a0a..d05eda082016 100644
--- a/spec/support/track_untracked_uploads_helpers.rb
+++ b/spec/support/track_untracked_uploads_helpers.rb
@@ -1,6 +1,6 @@
 module TrackUntrackedUploadsHelpers
   def uploaded_file
-    fixture_path = Rails.root.join('spec/fixtures/rails_sample.jpg')
+    fixture_path = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
     fixture_file_upload(fixture_path)
   end
 
diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb
index 091ba824fc6b..04ee6e9bfadb 100644
--- a/spec/uploaders/attachment_uploader_spec.rb
+++ b/spec/uploaders/attachment_uploader_spec.rb
@@ -1,14 +1,28 @@
 require 'spec_helper'
 
 describe AttachmentUploader do
-  let(:note) { create(:note, :with_attachment) }
-  let(:uploader) { note.attachment }
-  let(:upload) { create(:upload, :attachment_upload, model: uploader.model) }
+  let(:uploader) { described_class.new(build_stubbed(:user)) }
 
-  subject { uploader }
+  describe "#store_dir" do
+    it "stores in the system dir" do
+      expect(uploader.store_dir).to start_with("uploads/-/system/user")
+    end
 
-  it_behaves_like 'builds correct paths',
-                  store_dir: %r[uploads/-/system/note/attachment/],
-                  upload_path: %r[uploads/-/system/note/attachment/],
-                  absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/]
+    it "uses the old path when using object storage" do
+      expect(described_class).to receive(:file_storage?).and_return(false)
+      expect(uploader.store_dir).to start_with("uploads/user")
+    end
+  end
+
+  describe '#move_to_cache' do
+    it 'is true' do
+      expect(uploader.move_to_cache).to eq(true)
+    end
+  end
+
+  describe '#move_to_store' do
+    it 'is true' do
+      expect(uploader.move_to_store).to eq(true)
+    end
+  end
 end
diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb
index bf9028c92606..1dc574699d8d 100644
--- a/spec/uploaders/avatar_uploader_spec.rb
+++ b/spec/uploaders/avatar_uploader_spec.rb
@@ -1,16 +1,18 @@
 require 'spec_helper'
 
 describe AvatarUploader do
-  let(:model) { create(:user, :with_avatar) }
-  let(:uploader) { described_class.new(model, :avatar) }
-  let(:upload) { create(:upload, model: model) }
+  let(:uploader) { described_class.new(build_stubbed(:user)) }
 
-  subject { uploader }
+  describe "#store_dir" do
+    it "stores in the system dir" do
+      expect(uploader.store_dir).to start_with("uploads/-/system/user")
+    end
 
-  it_behaves_like 'builds correct paths',
-                  store_dir: %r[uploads/-/system/user/avatar/],
-                  upload_path: %r[uploads/-/system/user/avatar/],
-                  absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
+    it "uses the old path when using object storage" do
+      expect(described_class).to receive(:file_storage?).and_return(false)
+      expect(uploader.store_dir).to start_with("uploads/user")
+    end
+  end
 
   describe '#move_to_cache' do
     it 'is false' do
diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb
index bc024cd307c7..0cf462e95538 100644
--- a/spec/uploaders/file_mover_spec.rb
+++ b/spec/uploaders/file_mover_spec.rb
@@ -3,13 +3,13 @@
 describe FileMover do
   let(:filename) { 'banana_sample.gif' }
   let(:file) { fixture_file_upload(Rails.root.join('spec', 'fixtures', filename)) }
-  let(:temp_file_path) { File.join('uploads/-/system/temp', 'secret55', filename) }
-
   let(:temp_description) do
-    "test ![banana_sample](/#{temp_file_path}) "\
-    "same ![banana_sample](/#{temp_file_path}) "
+    'test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) same ![banana_sample]'\
+    '(/uploads/-/system/temp/secret55/banana_sample.gif)'
   end
-  let(:file_path) { File.join('uploads/-/system/personal_snippet', snippet.id.to_s, 'secret55', filename) }
+  let(:temp_file_path) { File.join('secret55', filename).to_s }
+  let(:file_path) { File.join('uploads', '-', 'system', 'personal_snippet', snippet.id.to_s, 'secret55', filename).to_s }
+
   let(:snippet) { create(:personal_snippet, description: temp_description) }
 
   subject { described_class.new(file_path, snippet).execute }
@@ -28,8 +28,8 @@
 
         expect(snippet.reload.description)
           .to eq(
-            "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "\
-            "same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "
+            "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"\
+            " same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"
           )
       end
 
@@ -50,8 +50,8 @@
 
         expect(snippet.reload.description)
           .to eq(
-            "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "\
-            "same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "
+            "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"\
+            " same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"
           )
       end
 
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index a72f853df752..845516e50040 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -1,57 +1,118 @@
 require 'spec_helper'
 
 describe FileUploader do
-  let(:group) { create(:group, name: 'awesome') }
-  let(:project) { create(:project, namespace: group, name: 'project') }
-  let(:uploader) { described_class.new(project) }
-  let(:upload)  { double(model: project, path: 'secret/foo.jpg') }
+  let(:uploader) { described_class.new(build_stubbed(:project)) }
 
-  subject { uploader }
+  context 'legacy storage' do
+    let(:project) { build_stubbed(:project) }
+
+    describe '.absolute_path' do
+      it 'returns the correct absolute path by building it dynamically' do
+        upload = double(model: project, path: 'secret/foo.jpg')
+
+        dynamic_segment = project.full_path
 
-  shared_examples 'builds correct legacy storage paths' do
-    include_examples 'builds correct paths',
-                     store_dir: %r{awesome/project/\h+},
-                     absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg}
+        expect(described_class.absolute_path(upload))
+          .to end_with("#{dynamic_segment}/secret/foo.jpg")
+      end
+    end
+
+    describe "#store_dir" do
+      it "stores in the namespace path" do
+        uploader = described_class.new(project)
+
+        expect(uploader.store_dir).to include(project.full_path)
+        expect(uploader.store_dir).not_to include("system")
+      end
+    end
   end
 
-  shared_examples 'uses hashed storage' do
+  context 'hashed storage' do
     context 'when rolled out attachments' do
-      before do
-        allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed')
+      let(:project) { build_stubbed(:project, :hashed) }
+
+      describe '.absolute_path' do
+        it 'returns the correct absolute path by building it dynamically' do
+          upload = double(model: project, path: 'secret/foo.jpg')
+
+          dynamic_segment = project.disk_path
+
+          expect(described_class.absolute_path(upload))
+            .to end_with("#{dynamic_segment}/secret/foo.jpg")
+        end
       end
 
-      let(:project) { build_stubbed(:project, :hashed, namespace: group, name: 'project') }
+      describe "#store_dir" do
+        it "stores in the namespace path" do
+          uploader = described_class.new(project)
 
-      it_behaves_like 'builds correct paths',
-                      store_dir: %r{ca/fe/fe/ed/\h+},
-                      absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
+          expect(uploader.store_dir).to include(project.disk_path)
+          expect(uploader.store_dir).not_to include("system")
+        end
+      end
     end
 
     context 'when only repositories are rolled out' do
-      let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
+      let(:project) { build_stubbed(:project, storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
 
-      it_behaves_like 'builds correct legacy storage paths'
-    end
-  end
+      describe '.absolute_path' do
+        it 'returns the correct absolute path by building it dynamically' do
+          upload = double(model: project, path: 'secret/foo.jpg')
 
-  context 'legacy storage' do
-    it_behaves_like 'builds correct legacy storage paths'
-    include_examples 'uses hashed storage'
+          dynamic_segment = project.full_path
+
+          expect(described_class.absolute_path(upload))
+            .to end_with("#{dynamic_segment}/secret/foo.jpg")
+        end
+      end
+
+      describe "#store_dir" do
+        it "stores in the namespace path" do
+          uploader = described_class.new(project)
+
+          expect(uploader.store_dir).to include(project.full_path)
+          expect(uploader.store_dir).not_to include("system")
+        end
+      end
+    end
   end
 
   describe 'initialize' do
-    let(:uploader) { described_class.new(double, 'secret') }
+    it 'generates a secret if none is provided' do
+      expect(SecureRandom).to receive(:hex).and_return('secret')
+
+      uploader = described_class.new(double)
+
+      expect(uploader.secret).to eq 'secret'
+    end
 
     it 'accepts a secret parameter' do
-      expect(described_class).not_to receive(:generate_secret)
-      expect(uploader.secret).to eq('secret')
+      expect(SecureRandom).not_to receive(:hex)
+
+      uploader = described_class.new(double, 'secret')
+
+      expect(uploader.secret).to eq 'secret'
     end
   end
 
-  describe '#secret' do
-    it 'generates a secret if none is provided' do
-      expect(described_class).to receive(:generate_secret).and_return('secret')
-      expect(uploader.secret).to eq('secret')
+  describe '#move_to_cache' do
+    it 'is true' do
+      expect(uploader.move_to_cache).to eq(true)
+    end
+  end
+
+  describe '#move_to_store' do
+    it 'is true' do
+      expect(uploader.move_to_store).to eq(true)
+    end
+  end
+
+  describe '#relative_path' do
+    it 'removes the leading dynamic path segment' do
+      fixture = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
+      uploader.store!(fixture_file_upload(fixture))
+
+      expect(uploader.relative_path).to match(%r{\A\h{32}/rails_sample.jpg\z})
     end
   end
 end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index d606404e95df..a067c3e75f49 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -3,13 +3,33 @@
 describe JobArtifactUploader do
   let(:job_artifact) { create(:ci_job_artifact) }
   let(:uploader) { described_class.new(job_artifact, :file) }
+  let(:local_path) { Gitlab.config.artifacts.path }
 
-  subject { uploader }
+  describe '#store_dir' do
+    subject { uploader.store_dir }
 
-  it_behaves_like "builds correct paths",
-                  store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z],
-                  cache_dir: %r[artifacts/tmp/cache],
-                  work_dir: %r[artifacts/tmp/work]
+    let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.job_id}/#{job_artifact.id}" }
+
+    context 'when using local storage' do
+      it { is_expected.to start_with(local_path) }
+      it { is_expected.to match(%r{\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z}) }
+      it { is_expected.to end_with(path) }
+    end
+  end
+
+  describe '#cache_dir' do
+    subject { uploader.cache_dir }
+
+    it { is_expected.to start_with(local_path) }
+    it { is_expected.to end_with('/tmp/cache') }
+  end
+
+  describe '#work_dir' do
+    subject { uploader.work_dir }
+
+    it { is_expected.to start_with(local_path) }
+    it { is_expected.to end_with('/tmp/work') }
+  end
 
   context 'file is stored in valid local_path' do
     let(:file) do
@@ -23,7 +43,7 @@
 
     subject { uploader.file.path }
 
-    it { is_expected.to start_with("#{uploader.root}/#{uploader.class.base_dir}") }
+    it { is_expected.to start_with(local_path) }
     it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
     it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") }
     it { is_expected.to end_with("ci_build_artifacts.zip") }
diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb
index 54c6a8b869b3..efeffb78772d 100644
--- a/spec/uploaders/legacy_artifact_uploader_spec.rb
+++ b/spec/uploaders/legacy_artifact_uploader_spec.rb
@@ -3,22 +3,49 @@
 describe LegacyArtifactUploader do
   let(:job) { create(:ci_build) }
   let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
-  let(:local_path) { described_class.root }
+  let(:local_path) { Gitlab.config.artifacts.path }
 
-  subject { uploader }
+  describe '.local_store_path' do
+    subject { described_class.local_store_path }
 
-  # TODO: move to Workhorse::UploadPath
-  describe '.workhorse_upload_path' do
-    subject { described_class.workhorse_upload_path }
+    it "delegate to artifacts path" do
+      expect(Gitlab.config.artifacts).to receive(:path)
+
+      subject
+    end
+  end
+
+  describe '.artifacts_upload_path' do
+    subject { described_class.artifacts_upload_path }
 
     it { is_expected.to start_with(local_path) }
-    it { is_expected.to end_with('tmp/uploads') }
+    it { is_expected.to end_with('tmp/uploads/') }
+  end
+
+  describe '#store_dir' do
+    subject { uploader.store_dir }
+
+    let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
+
+    context 'when using local storage' do
+      it { is_expected.to start_with(local_path) }
+      it { is_expected.to end_with(path) }
+    end
   end
 
-  it_behaves_like "builds correct paths",
-                  store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z],
-                  cache_dir: %r[artifacts/tmp/cache],
-                  work_dir: %r[artifacts/tmp/work]
+  describe '#cache_dir' do
+    subject { uploader.cache_dir }
+
+    it { is_expected.to start_with(local_path) }
+    it { is_expected.to end_with('/tmp/cache') }
+  end
+
+  describe '#work_dir' do
+    subject { uploader.work_dir }
+
+    it { is_expected.to start_with(local_path) }
+    it { is_expected.to end_with('/tmp/work') }
+  end
 
   describe '#filename' do
     # we need to use uploader, as this makes to use mounter
@@ -42,7 +69,7 @@
 
     subject { uploader.file.path }
 
-    it { is_expected.to start_with("#{uploader.root}") }
+    it { is_expected.to start_with(local_path) }
     it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
     it { is_expected.to include("/#{job.project_id}/") }
     it { is_expected.to end_with("ci_build_artifacts.zip") }
diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb
index 6ebc885daa8f..7088bc23334c 100644
--- a/spec/uploaders/lfs_object_uploader_spec.rb
+++ b/spec/uploaders/lfs_object_uploader_spec.rb
@@ -2,13 +2,39 @@
 
 describe LfsObjectUploader do
   let(:lfs_object) { create(:lfs_object, :with_file) }
-  let(:uploader) { described_class.new(lfs_object, :file) }
+  let(:uploader) { described_class.new(lfs_object) }
   let(:path) { Gitlab.config.lfs.storage_path }
 
-  subject { uploader }
+  describe '#move_to_cache' do
+    it 'is true' do
+      expect(uploader.move_to_cache).to eq(true)
+    end
+  end
 
-  it_behaves_like "builds correct paths",
-                  store_dir: %r[\h{2}/\h{2}],
-                  cache_dir: %r[/lfs-objects/tmp/cache],
-                  work_dir: %r[/lfs-objects/tmp/work]
+  describe '#move_to_store' do
+    it 'is true' do
+      expect(uploader.move_to_store).to eq(true)
+    end
+  end
+
+  describe '#store_dir' do
+    subject { uploader.store_dir }
+
+    it { is_expected.to start_with(path) }
+    it { is_expected.to end_with("#{lfs_object.oid[0, 2]}/#{lfs_object.oid[2, 2]}") }
+  end
+
+  describe '#cache_dir' do
+    subject { uploader.cache_dir }
+
+    it { is_expected.to start_with(path) }
+    it { is_expected.to end_with('/tmp/cache') }
+  end
+
+  describe '#work_dir' do
+    subject { uploader.work_dir }
+
+    it { is_expected.to start_with(path) }
+    it { is_expected.to end_with('/tmp/work') }
+  end
 end
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
index 24a2fc0f72e5..c6c4500c179d 100644
--- a/spec/uploaders/namespace_file_uploader_spec.rb
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -1,16 +1,21 @@
 require 'spec_helper'
 
-IDENTIFIER = %r{\h+/\S+}
-
 describe NamespaceFileUploader do
   let(:group) { build_stubbed(:group) }
   let(:uploader) { described_class.new(group) }
-  let(:upload) { create(:upload, :namespace_upload, model: group) }
 
-  subject { uploader }
+  describe "#store_dir" do
+    it "stores in the namespace id directory" do
+      expect(uploader.store_dir).to include(group.id.to_s)
+    end
+  end
+
+  describe ".absolute_path" do
+    it "stores in thecorrect directory" do
+      upload_record = create(:upload, :namespace_upload, model: group)
 
-  it_behaves_like 'builds correct paths',
-                  store_dir: %r[uploads/-/system/namespace/\d+],
-                  upload_path: IDENTIFIER,
-                  absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}]
+      expect(described_class.absolute_path(upload_record))
+        .to include("-/system/namespace/#{group.id}")
+    end
+  end
 end
diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb
index ed1fba6eddab..cbafa9f478dc 100644
--- a/spec/uploaders/personal_file_uploader_spec.rb
+++ b/spec/uploaders/personal_file_uploader_spec.rb
@@ -1,27 +1,25 @@
 require 'spec_helper'
 
-IDENTIFIER = %r{\h+/\S+}
-
 describe PersonalFileUploader do
-  let(:model) { create(:personal_snippet) }
-  let(:uploader) { described_class.new(model) }
-  let(:upload) { create(:upload, :personal_snippet_upload) }
+  let(:uploader) { described_class.new(build_stubbed(:project)) }
+  let(:snippet) { create(:personal_snippet) }
 
-  subject { uploader }
+  describe '.absolute_path' do
+    it 'returns the correct absolute path by building it dynamically' do
+      upload = double(model: snippet, path: 'secret/foo.jpg')
 
-  it_behaves_like 'builds correct paths',
-                  store_dir: %r[uploads/-/system/personal_snippet/\d+],
-                  upload_path: IDENTIFIER,
-                  absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}]
+      dynamic_segment = "personal_snippet/#{snippet.id}"
 
-  describe '#to_h' do
-    before do
-      subject.instance_variable_set(:@secret, 'secret')
+      expect(described_class.absolute_path(upload)).to end_with("/-/system/#{dynamic_segment}/secret/foo.jpg")
     end
+  end
+
+  describe '#to_h' do
+    it 'returns the hass' do
+      uploader = described_class.new(snippet, 'secret')
 
-    it 'is correct' do
       allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name'))
-      expected_url = "/uploads/-/system/personal_snippet/#{model.id}/secret/file_name"
+      expected_url = "/uploads/-/system/personal_snippet/#{snippet.id}/secret/file_name"
 
       expect(uploader.to_h).to eq(
         alt: 'file_name',
diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb
index 9a3e5d83e01f..7ef7fb7d758b 100644
--- a/spec/uploaders/records_uploads_spec.rb
+++ b/spec/uploaders/records_uploads_spec.rb
@@ -3,16 +3,16 @@
 describe RecordsUploads do
   let!(:uploader) do
     class RecordsUploadsExampleUploader < GitlabUploader
-      include RecordsUploads::Concern
+      include RecordsUploads
 
       storage :file
 
-      def dynamic_segment
-        'co/fe/ee'
+      def model
+        FactoryBot.build_stubbed(:user)
       end
     end
 
-    RecordsUploadsExampleUploader.new(build_stubbed(:user))
+    RecordsUploadsExampleUploader.new
   end
 
   def upload_fixture(filename)
@@ -20,55 +20,48 @@ def upload_fixture(filename)
   end
 
   describe 'callbacks' do
-    let(:upload) { create(:upload) }
-
-    before do
-      uploader.upload = upload
-    end
-
-    it '#record_upload after `store`' do
+    it 'calls `record_upload` after `store`' do
       expect(uploader).to receive(:record_upload).once
 
       uploader.store!(upload_fixture('doc_sample.txt'))
     end
 
-    it '#destroy_upload after `remove`' do
+    it 'calls `destroy_upload` after `remove`' do
+      expect(uploader).to receive(:destroy_upload).once
+
       uploader.store!(upload_fixture('doc_sample.txt'))
 
-      expect(uploader).to receive(:destroy_upload).once
       uploader.remove!
     end
   end
 
   describe '#record_upload callback' do
-    it 'creates an Upload record after store' do
-      expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.to change { Upload.count }.by(1)
-    end
+    it 'returns early when not using file storage' do
+      allow(uploader).to receive(:file_storage?).and_return(false)
+      expect(Upload).not_to receive(:record)
 
-    it 'creates a new record and assigns size, path, model, and uploader' do
       uploader.store!(upload_fixture('rails_sample.jpg'))
-
-      upload = uploader.upload
-      aggregate_failures do
-        expect(upload).to be_persisted
-        expect(upload.size).to eq uploader.file.size
-        expect(upload.path).to eq uploader.upload_path
-        expect(upload.model_id).to eq uploader.model.id
-        expect(upload.model_type).to eq uploader.model.class.to_s
-        expect(upload.uploader).to eq uploader.class.to_s
-      end
     end
 
-    it "does not create an Upload record when the file doesn't exist" do
+    it "returns early when the file doesn't exist" do
       allow(uploader).to receive(:file).and_return(double(exists?: false))
+      expect(Upload).not_to receive(:record)
 
-      expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count }
+      uploader.store!(upload_fixture('rails_sample.jpg'))
+    end
+
+    it 'creates an Upload record after store' do
+      expect(Upload).to receive(:record)
+        .with(uploader)
+
+      uploader.store!(upload_fixture('rails_sample.jpg'))
     end
 
     it 'does not create an Upload record if model is missing' do
-      allow_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil)
+      expect_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil)
+      expect(Upload).not_to receive(:record).with(uploader)
 
-      expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count }
+      uploader.store!(upload_fixture('rails_sample.jpg'))
     end
 
     it 'it destroys Upload records at the same path before recording' do
@@ -79,15 +72,29 @@ def upload_fixture(filename)
         uploader: uploader.class.to_s
       )
 
-      uploader.upload = existing
       uploader.store!(upload_fixture('rails_sample.jpg'))
 
       expect { existing.reload }.to raise_error(ActiveRecord::RecordNotFound)
-      expect(Upload.count).to eq(1)
+      expect(Upload.count).to eq 1
     end
   end
 
   describe '#destroy_upload callback' do
+    it 'returns early when not using file storage' do
+      uploader.store!(upload_fixture('rails_sample.jpg'))
+
+      allow(uploader).to receive(:file_storage?).and_return(false)
+      expect(Upload).not_to receive(:remove_path)
+
+      uploader.remove!
+    end
+
+    it 'returns early when file is nil' do
+      expect(Upload).not_to receive(:remove_path)
+
+      uploader.remove!
+    end
+
     it 'it destroys Upload records at the same path after removal' do
       uploader.store!(upload_fixture('rails_sample.jpg'))
 
diff --git a/spec/workers/upload_checksum_worker_spec.rb b/spec/workers/upload_checksum_worker_spec.rb
index 9e50ce15871a..911360da66ca 100644
--- a/spec/workers/upload_checksum_worker_spec.rb
+++ b/spec/workers/upload_checksum_worker_spec.rb
@@ -2,31 +2,18 @@
 
 describe UploadChecksumWorker do
   describe '#perform' do
-    subject { described_class.new }
-
-    context 'without a valid record' do
-      it 'rescues ActiveRecord::RecordNotFound' do
-        expect { subject.perform(999_999) }.not_to raise_error
-      end
+    it 'rescues ActiveRecord::RecordNotFound' do
+      expect { described_class.new.perform(999_999) }.not_to raise_error
     end
 
-    context 'with a valid record' do
-      let(:upload) { create(:user, :with_avatar).avatar.upload }
-
-      before do
-        expect(Upload).to receive(:find).and_return(upload)
-        allow(upload).to receive(:foreground_checksumable?).and_return(false)
-      end
+    it 'calls calculate_checksum_without_delay and save!' do
+      upload = spy
+      expect(Upload).to receive(:find).with(999_999).and_return(upload)
 
-      it 'calls calculate_checksum!' do
-        expect(upload).to receive(:calculate_checksum!)
-        subject.perform(upload.id)
-      end
+      described_class.new.perform(999_999)
 
-      it 'calls save!' do
-        expect(upload).to receive(:save!)
-        subject.perform(upload.id)
-      end
+      expect(upload).to have_received(:calculate_checksum)
+      expect(upload).to have_received(:save!)
     end
   end
 end
-- 
GitLab


From d4a282751d6161c34403a5b34e569ebddbf5a0ca Mon Sep 17 00:00:00 2001
From: Sean McGivern <sean@mcgivern.me.uk>
Date: Wed, 14 Feb 2018 09:53:37 +0000
Subject: [PATCH 12/14] Merge branch
 '4879-support-private-https-urls-for-object-storage' into 'master'

Resolve ""Support private HTTPS urls for object storage""

Closes #4879

See merge request gitlab-org/gitlab-ee!4475
---
 .../fog_google_https_private_urls.rb          | 20 ++++++++++++++++
 .../fog_google_https_private_urls_spec.rb     | 24 +++++++++++++++++++
 2 files changed, 44 insertions(+)
 create mode 100644 config/initializers/fog_google_https_private_urls.rb
 create mode 100644 spec/initializers/fog_google_https_private_urls_spec.rb

diff --git a/config/initializers/fog_google_https_private_urls.rb b/config/initializers/fog_google_https_private_urls.rb
new file mode 100644
index 000000000000..f92e623a5d26
--- /dev/null
+++ b/config/initializers/fog_google_https_private_urls.rb
@@ -0,0 +1,20 @@
+#
+# Monkey patching the https support for private urls
+# See https://gitlab.com/gitlab-org/gitlab-ee/issues/4879
+#
+module Fog
+  module Storage
+    class GoogleXML
+      class File < Fog::Model
+        module MonkeyPatch
+          def url(expires)
+            requires :key
+            collection.get_https_url(key, expires)
+          end
+        end
+
+        prepend MonkeyPatch
+      end
+    end
+  end
+end
diff --git a/spec/initializers/fog_google_https_private_urls_spec.rb b/spec/initializers/fog_google_https_private_urls_spec.rb
new file mode 100644
index 000000000000..de3c157ab7bd
--- /dev/null
+++ b/spec/initializers/fog_google_https_private_urls_spec.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+
+describe 'Fog::Storage::GoogleXML::File' do
+  let(:storage) do
+    Fog.mock!
+    Fog::Storage.new({
+                       google_storage_access_key_id: "asdf",
+                       google_storage_secret_access_key: "asdf",
+                       provider: "Google"
+                     })
+  end
+
+  let(:file) do
+    directory = storage.directories.create(key: 'data')
+    directory.files.create(
+      body: 'Hello World!',
+      key: 'hello_world.txt'
+    )
+  end
+
+  it 'delegates to #get_https_url' do
+    expect(file.url(Time.now)).to start_with("https://")
+  end
+end
-- 
GitLab


From b4dc556c2f40f2e8e4d71c5dd8d1747974f8147f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= <ayufan@ayufan.eu>
Date: Tue, 13 Feb 2018 21:07:18 +0000
Subject: [PATCH 13/14] Merge branch
 '4915-background-upload-option-is-not-effective' into 'master'

Resolve "Background upload option is not effective"

Closes #4915

See merge request gitlab-org/gitlab-ee!4507
---
 ...ckground-upload-option-is-not-effective.yml |  5 +++++
 config/initializers/1_settings.rb              | 18 +++++++++---------
 2 files changed, 14 insertions(+), 9 deletions(-)
 create mode 100644 changelogs/unreleased-ee/4915-background-upload-option-is-not-effective.yml

diff --git a/changelogs/unreleased-ee/4915-background-upload-option-is-not-effective.yml b/changelogs/unreleased-ee/4915-background-upload-option-is-not-effective.yml
new file mode 100644
index 000000000000..54ae83f8d76e
--- /dev/null
+++ b/changelogs/unreleased-ee/4915-background-upload-option-is-not-effective.yml
@@ -0,0 +1,5 @@
+---
+title: Fix the background_upload configuration being ignored.
+merge_request: 4507
+author:
+type: fixed
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index acfa300882c6..3c116501e4c5 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -307,9 +307,9 @@ def cron_for_usage_ping
 Settings.artifacts['max_size'] ||= 100 # in megabytes
 
 Settings.artifacts['object_store'] ||= Settingslogic.new({})
-Settings.artifacts['object_store']['enabled']           ||= false
-Settings.artifacts['object_store']['remote_directory']  ||= nil
-Settings.artifacts['object_store']['background_upload'] ||= true
+Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
+Settings.artifacts['object_store']['remote_directory'] ||= nil
+Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
 # Convert upload connection settings to use string keys, to make Fog happy
 Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
 
@@ -348,9 +348,9 @@ def cron_for_usage_ping
 Settings.lfs['enabled']      = true if Settings.lfs['enabled'].nil?
 Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
 Settings.lfs['object_store'] ||= Settingslogic.new({})
-Settings.lfs['object_store']['enabled']           ||= false
-Settings.lfs['object_store']['remote_directory']  ||= nil
-Settings.lfs['object_store']['background_upload'] ||= true
+Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
+Settings.lfs['object_store']['remote_directory'] ||= nil
+Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
 # Convert upload connection settings to use string keys, to make Fog happy
 Settings.lfs['object_store']['connection']&.deep_stringify_keys!
 
@@ -361,9 +361,9 @@ def cron_for_usage_ping
 Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
 Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
 Settings.uploads['object_store'] ||= Settingslogic.new({})
-Settings.uploads['object_store']['enabled']           ||= false
-Settings.uploads['object_store']['remote_directory']  ||= 'uploads'
-Settings.uploads['object_store']['background_upload'] ||= true
+Settings.uploads['object_store']['enabled'] = false if Settings.uploads['object_store']['enabled'].nil?
+Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
+Settings.uploads['object_store']['background_upload'] = true if Settings.uploads['object_store']['background_upload'].nil?
 # Convert upload connection settings to use string keys, to make Fog happy
 Settings.uploads['object_store']['connection']&.deep_stringify_keys!
 
-- 
GitLab


From a22f6fa6e50bb31921415b01fd345d6802581390 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= <ayufan@ayufan.eu>
Date: Tue, 27 Feb 2018 13:09:33 +0000
Subject: [PATCH 14/14] Merge branch 'fix/sm/atomic-migration' into 'master'

Fix migrate! method (Minimal fix with ExclusiveLock to prevent race conditions)

Closes #4928 and #4980

See merge request gitlab-org/gitlab-ee!4624
---
 app/uploaders/records_uploads.rb              |   3 +-
 ee/app/uploaders/object_storage.rb            | 103 +++++++++++++-----
 spec/ee/spec/models/ee/lfs_object_spec.rb     |  16 ++-
 spec/requests/lfs_http_spec.rb                |   2 +-
 .../object_storage_shared_examples.rb         |  49 +++++++++
 spec/uploaders/job_artifact_uploader_spec.rb  |  10 ++
 spec/uploaders/object_storage_spec.rb         |  27 +++++
 7 files changed, 180 insertions(+), 30 deletions(-)

diff --git a/app/uploaders/records_uploads.rb b/app/uploaders/records_uploads.rb
index 458928bc0672..89c74a788356 100644
--- a/app/uploaders/records_uploads.rb
+++ b/app/uploaders/records_uploads.rb
@@ -24,8 +24,7 @@ def record_upload(_tempfile = nil)
         uploads.where(path: upload_path).delete_all
         upload.destroy! if upload
 
-        self.upload = build_upload
-        upload.save!
+        self.upload = build_upload.tap(&:save!)
       end
     end
 
diff --git a/ee/app/uploaders/object_storage.rb b/ee/app/uploaders/object_storage.rb
index e5b087524f5f..23013f99d324 100644
--- a/ee/app/uploaders/object_storage.rb
+++ b/ee/app/uploaders/object_storage.rb
@@ -61,6 +61,39 @@ def current_upload_satisfies?(paths, model)
     end
   end
 
+  # Add support for automatic background uploading after the file is stored.
+  #
+  module BackgroundMove
+    extend ActiveSupport::Concern
+
+    def background_upload(mount_points = [])
+      return unless mount_points.any?
+
+      run_after_commit do
+        mount_points.each { |mount| send(mount).schedule_background_upload } # rubocop:disable GitlabSecurity/PublicSend
+      end
+    end
+
+    def changed_mounts
+      self.class.uploaders.select do |mount, uploader_class|
+        mounted_as = uploader_class.serialization_column(self.class, mount)
+        uploader = send(:"#{mounted_as}") # rubocop:disable GitlabSecurity/PublicSend
+
+        next unless uploader
+        next unless uploader.exists?
+        next unless send(:"#{mounted_as}_changed?") # rubocop:disable GitlabSecurity/PublicSend
+
+        mount
+      end.keys
+    end
+
+    included do
+      after_save on: [:create, :update] do
+        background_upload(changed_mounts)
+      end
+    end
+  end
+
   module Concern
     extend ActiveSupport::Concern
 
@@ -127,7 +160,7 @@ def persist_object_store!
       return unless persist_object_store?
 
       updated = model.update_column(store_serialization_column, object_store)
-      raise ActiveRecordError unless updated
+      raise 'Failed to update object store' unless updated
     end
 
     def use_file
@@ -153,32 +186,12 @@ def filename
     #   new_store: Enum (Store::LOCAL, Store::REMOTE)
     #
     def migrate!(new_store)
-      return unless object_store != new_store
-      return unless file
+      uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain
+      raise 'Already running' unless uuid
 
-      new_file = nil
-      file_to_delete = file
-      from_object_store = object_store
-      self.object_store = new_store # changes the storage and file
-
-      cache_stored_file! if file_storage?
-
-      with_callbacks(:migrate, file_to_delete) do
-        with_callbacks(:store, file_to_delete) do # for #store_versions!
-          new_file = storage.store!(file)
-          persist_object_store!
-          self.file = new_file
-        end
-      end
-
-      file
-    rescue => e
-      # in case of failure delete new file
-      new_file.delete unless new_file.nil?
-      # revert back to the old file
-      self.object_store = from_object_store
-      self.file = file_to_delete
-      raise e
+      unsafe_migrate!(new_store)
+    ensure
+      Gitlab::ExclusiveLease.cancel(exclusive_lease_key, uuid)
     end
 
     def schedule_migration_to_object_storage(*args)
@@ -261,5 +274,43 @@ def storage_for(store)
         raise UnknownStoreError
       end
     end
+
+    def exclusive_lease_key
+      "object_storage_migrate:#{model.class}:#{model.id}"
+    end
+
+    #
+    # Move the file to another store
+    #
+    #   new_store: Enum (Store::LOCAL, Store::REMOTE)
+    #
+    def unsafe_migrate!(new_store)
+      return unless object_store != new_store
+      return unless file
+
+      new_file = nil
+      file_to_delete = file
+      from_object_store = object_store
+      self.object_store = new_store # changes the storage and file
+
+      cache_stored_file! if file_storage?
+
+      with_callbacks(:migrate, file_to_delete) do
+        with_callbacks(:store, file_to_delete) do # for #store_versions!
+          new_file = storage.store!(file)
+          persist_object_store!
+          self.file = new_file
+        end
+      end
+
+      file
+    rescue => e
+      # in case of failure delete new file
+      new_file.delete unless new_file.nil?
+      # revert back to the old file
+      self.object_store = from_object_store
+      self.file = file_to_delete
+      raise e
+    end
   end
 end
diff --git a/spec/ee/spec/models/ee/lfs_object_spec.rb b/spec/ee/spec/models/ee/lfs_object_spec.rb
index e425f5bc112b..28dbcbc4189d 100644
--- a/spec/ee/spec/models/ee/lfs_object_spec.rb
+++ b/spec/ee/spec/models/ee/lfs_object_spec.rb
@@ -61,10 +61,24 @@
           end
 
           it 'schedules the model for migration' do
-            expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
+            expect(ObjectStorage::BackgroundMoveWorker)
+              .to receive(:perform_async)
+              .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
+              .once
 
             subject
           end
+
+          it 'schedules the model for migration once' do
+            expect(ObjectStorage::BackgroundMoveWorker)
+              .to receive(:perform_async)
+              .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
+              .once
+
+            lfs_object = create(:lfs_object)
+            lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+            lfs_object.save!
+          end
         end
 
         context 'when is unlicensed' do
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index b244d29a3058..04c0114b5d65 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -997,7 +997,7 @@
 
           context 'and workhorse requests upload finalize for a new lfs object' do
             before do
-              allow_any_instance_of(LfsObjectUploader).to receive(:exists?) { false }
+              lfs_object.destroy
             end
 
             context 'with object storage disabled' do
diff --git a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
index 0022b2f803fb..6fceb5d18afc 100644
--- a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
+++ b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
@@ -20,6 +20,19 @@ def checksum
     migrate(from)
   end
 
+  it 'returns corresponding file type' do
+    expect(subject).to be_an(CarrierWave::Uploader::Base)
+    expect(subject).to be_a(ObjectStorage::Concern)
+
+    if from == described_class::Store::REMOTE
+      expect(subject.file).to be_a(CarrierWave::Storage::Fog::File)
+    elsif from == described_class::Store::LOCAL
+      expect(subject.file).to be_a(CarrierWave::SanitizedFile)
+    else
+      raise 'Unexpected file type'
+    end
+  end
+
   it 'does nothing when migrating to the current store' do
     expect { migrate(from) }.not_to change { subject.object_store }.from(from)
   end
@@ -38,6 +51,42 @@ def checksum
     expect(File.exist?(original_file)).to be_falsey
   end
 
+  it 'can access to the original file during migration' do
+    file = subject.file
+
+    allow(subject).to receive(:delete_migrated_file) { } # Remove as a callback of :migrate
+    allow(subject).to receive(:record_upload) { } # Remove as a callback of :store (:record_upload)
+
+    expect(file.exists?).to be_truthy
+    expect { migrate(to) }.not_to change { file.exists? }
+  end
+
+  context 'when migrate! is not oqqupied by another process' do
+    it 'executes migrate!' do
+      expect(subject).to receive(:object_store=).at_least(1)
+
+      migrate(to)
+    end
+  end
+
+  context 'when migrate! is occupied by another process' do
+    let(:exclusive_lease_key) { "object_storage_migrate:#{subject.model.class}:#{subject.model.id}" }
+
+    before do
+      @uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain
+    end
+
+    it 'does not execute migrate!' do
+      expect(subject).not_to receive(:unsafe_migrate!)
+
+      expect { migrate(to) }.to raise_error('Already running')
+    end
+
+    after do
+      Gitlab::ExclusiveLease.cancel(exclusive_lease_key, @uuid)
+    end
+  end
+
   context 'migration is unsuccessful' do
     shared_examples "handles gracefully" do |error:|
       it 'does not update the object_store' do
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index 0bcf28f2c1c8..714b24985382 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -67,4 +67,14 @@
     it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") }
     it { is_expected.to end_with("ci_build_artifacts.zip") }
   end
+
+  describe "#migrate!" do
+    before do
+      uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/trace/sample_trace')))
+      stub_artifacts_object_storage
+    end
+
+    it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+    it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+  end
 end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index e01ad9af1dcd..64b59acb2869 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -128,6 +128,33 @@ def dynamic_segment
         expect(uploader.object_store).to eq(uploader.upload.store)
       end
     end
+
+    describe '#migrate!' do
+      let(:new_store) { ObjectStorage::Store::REMOTE }
+
+      before do
+        stub_uploads_object_storage(uploader: AvatarUploader)
+      end
+
+      subject { uploader.migrate!(new_store) }
+
+      it 'persist @object_store to the recorded upload' do
+        subject
+
+        expect(uploader.upload.store).to eq(new_store)
+      end
+
+      describe 'fails' do
+        it 'is handled gracefully' do
+          store = uploader.object_store
+          expect_any_instance_of(Upload).to receive(:save!).and_raise("An error")
+
+          expect { subject }.to raise_error("An error")
+          expect(uploader.exists?).to be_truthy
+          expect(uploader.upload.store).to eq(store)
+        end
+      end
+    end
   end
 
   # this means the model holds an <mounted_as>_store attribute directly
-- 
GitLab