Commit 51a85839 authored by Grant Young's avatar Grant Young

Switch to dynamic compare pipeline

Uses new dynamic child pipeline feature
parent 61d58be8
......@@ -11,3 +11,4 @@ tmp/
.idea
package-lock.json
.DS_Store
.gitlab/ci/gpt-k6-compare-jobs.yml
stages:
- check
- build
- test
- report
- stop
include:
- local: '.gitlab/ci/gpt-lint.yml'
- local: '.gitlab/ci/.gpt-k6-base.yml'
- local: '.gitlab/ci/gpt-k6.yml'
- local: '.gitlab/ci/gpt-k6-comparisons.yml'
- local: '.gitlab/ci/gpt-k6-compare.yml'
- local: '.gitlab/ci/gpt-docker.yml'
stages:
- check
- build
- test
- report
- stop
.k6-base:
image: ruby:2.6-alpine
before_script:
- apk add --no-cache build-base curl
- gem install bundler && bundle config without dev && bundle install
.k6-compare-base:
extends: .k6-base
services:
- docker:stable-dind
tags:
- performance
variables:
DOCKER_DRIVER: overlay2
DOCKER_HOST: tcp://docker:2375
OPTIONS_FILE: 60s_2rps
GPT_CP_DOCKER_IMAGE: registry.gitlab.com/gitlab-org/quality/performance-images/gitlab-ce-performance
script:
- apk add docker
- echo "$GPT_CP_DOCKER_IMAGE:$MAJOR_VERSION.$MINOR_VERSION.0-ce.0"
- bin/run-gitlab-docker --image $GPT_CP_DOCKER_IMAGE:$MAJOR_VERSION.$MINOR_VERSION.0-ce.0
- sleep $STARTUP_DELAY
- bin/run-k6 -s -e $ENVIRONMENT_NAME.json -o $OPTIONS_FILE.json -x api_v4_projects_repository_commits_sha_signature
artifacts:
when: always
paths:
- k6/results/
expire_in: 14d
retry:
max: 2
when:
- runner_system_failure
- stuck_or_timeout_failure
- unknown_failure
.report-k6-slack-base:
stage: report
extends: .k6-base
only:
refs:
- schedules
script: bin/ci-report-results-slack --channel $CI_SLACK_CHANNEL --test-name $TEST_TYPE --test-result $TEST_RESULT
except:
variables:
- $CI_SLACK_REPORT != "true"
......@@ -23,10 +57,8 @@
.report-k6-wiki-base:
stage: report
extends: .k6-base
script: bin/ci-report-results-wiki -p "$WIKI_PAGE"
when: always
only:
refs:
- schedules
except:
variables:
- $CI_WIKI_REPORT != "true"
......
......@@ -10,9 +10,9 @@
tags:
- docker
###########################
## Commit Checks ##
###########################
#####################
## Commit Checks ##
#####################
check:gpt-docker:
stage: check
extends: .gpt-docker-base
......@@ -20,12 +20,14 @@ check:gpt-docker:
- apk add -u --no-cache docker
- docker build -t gitlab/gitlab-performance-tool:test .
- docker run --rm -e ACCESS_TOKEN=$GPT_CHECK_STAGING_ACCESS_TOKEN gitlab/gitlab-performance-tool:test -e staging.json -t api_v4_user.js
except:
- schedules
only:
- master
- merge_requests
- tags
########################
## GPT Docker Build ##
########################
##########################
## GPT Docker Build ##
##########################
gpt-build-docker-tag:
stage: build
extends: .gpt-docker-base
......
# Depends: .gpt-k6-base.yml, .gpt-k6-compare-base.yml
##########################################
## k6 Load Tests - Compare Versions ##
##########################################
k6-compare-config:
stage: build
extends: .k6-base
script:
- bin/ci-generate-compare-config
only:
refs:
- schedules
variables:
- $TEST_TYPE == "k6 comparison"
artifacts:
paths:
- .gitlab/ci/gpt-k6-compare-jobs.yml
k6-compare:
stage: test
trigger:
include:
- local: .gitlab/ci/.gpt-k6-base.yml
- artifact: .gitlab/ci/gpt-k6-compare-jobs.yml
job: k6-compare-config
strategy: depend
variables: # We need to pass variables from Pipeline Schedule downstream
ACCESS_TOKEN: $ACCESS_TOKEN
CI_SLACK_CHANNEL: $CI_SLACK_CHANNEL
ENVIRONMENT_NAME: $ENVIRONMENT_NAME
GPT_IGNORE_RESULT: $GPT_IGNORE_RESULT
GPT_RESULTS_URL: $GPT_RESULTS_URL
TEST_TYPE: $TEST_TYPE
GPT_CP_DOCKER_IMAGE: $GPT_CP_DOCKER_IMAGE
only:
refs:
- schedules
variables:
- $TEST_TYPE == "k6 comparison"
# Depends: .gpt-k6-base.yml
##############################################
## k6 Load Tests - Version Comparisions ##
##############################################
.k6-compare-base:
stage: test
extends: .k6-base
services:
- docker:stable-dind
tags:
- performance
variables:
DOCKER_DRIVER: overlay2
DOCKER_HOST: tcp://docker:2375
OPTIONS_FILE: 45s_2rps
script:
- apk add docker
- export MINOR_VERSION=$((MINOR_VERSION_START + CI_NODE_INDEX - 1))
- echo "registry.gitlab.com/gitlab-org/quality/performance-images/gitlab-ce-performance:$MAJOR_VERSION.$MINOR_VERSION.0-ce.0"
- bin/run-gitlab-docker --image registry.gitlab.com/gitlab-org/quality/performance-images/gitlab-ce-performance:$MAJOR_VERSION.$MINOR_VERSION.0-ce.0
- sleep $(( 30 * CI_NODE_INDEX ))
- bin/run-k6 -s -e $ENVIRONMENT_NAME.json -o $OPTIONS_FILE.json -u $([ -n "$QUARANTINED" ] && echo "-q")
only:
refs:
- schedules
variables:
- $TEST_TYPE == "k6 comparison"
artifacts:
when: always
paths:
- k6/results/
expire_in: 14d
retry:
max: 2
when:
- runner_system_failure
- stuck_or_timeout_failure
- unknown_failure
k6-11-compare:
extends: .k6-compare-base
parallel: 5
variables:
MAJOR_VERSION: 11
MINOR_VERSION_START: 7
k6-12-compare:
extends: .k6-compare-base
# Increase this number every month by the 23rd to cover the new 12.x release
parallel: 10
variables:
MAJOR_VERSION: 12
MINOR_VERSION_START: 0
###############################
## Report Jobs - Slack ##
###############################
report-k6-compare-success-slack:
extends: .report-k6-slack-base
script:
- SUCCESS_TEST_RUN="true" bin/ci-report-results-slack -c $CI_SLACK_CHANNEL
only:
variables:
- $TEST_TYPE == "k6 comparison"
when: on_success
report-k6-compare-failure-slack:
extends: .report-k6-slack-base
script:
- SUCCESS_TEST_RUN="false" bin/ci-report-results-slack -c $CI_SLACK_CHANNEL
only:
variables:
- $TEST_TYPE == "k6 comparison"
when: on_failure
##############################
## Report Jobs - Wiki ##
##############################
report-k6-compare-results-wiki:
extends: .report-k6-wiki-base
script:
- bin/ci-report-results-wiki -p "Benchmarks/GitLab Versions"
only:
variables:
- $TEST_TYPE == "k6 comparison"
# Depends: .gpt-k6-base.yml
###########################
## Commit Checks ##
###########################
#####################
## Commit Checks ##
#####################
check:k6:
stage: check
extends: .k6-base
script:
- ACCESS_TOKEN=$GPT_CHECK_STAGING_ACCESS_TOKEN bin/run-k6 -e staging.json -t api_v4_user.js
except:
- schedules
- tags
only:
- master
- merge_requests
########################
## Environment Jobs ##
......@@ -78,8 +78,8 @@ k6:
###############################
report-k6-success-slack:
extends: .report-k6-slack-base
script:
- SUCCESS_TEST_RUN="true" bin/ci-report-results-slack -c $CI_SLACK_CHANNEL
variables:
TEST_RESULT: "passed"
only:
variables:
- $TEST_TYPE == "k6"
......@@ -87,8 +87,8 @@ report-k6-success-slack:
report-k6-failure-slack:
extends: .report-k6-slack-base
script:
- SUCCESS_TEST_RUN="false" bin/ci-report-results-slack -c $CI_SLACK_CHANNEL
variables:
TEST_RESULT: "failed"
only:
variables:
- $TEST_TYPE == "k6"
......@@ -99,8 +99,8 @@ report-k6-failure-slack:
##############################
report-k6-results-wiki:
extends: .report-k6-wiki-base
script:
- bin/ci-report-results-wiki -p "Benchmarks/Latest/$ENVIRONMENT_NAME"
variables:
WIKI_PAGE: "Benchmarks/Latest/$ENVIRONMENT_NAME"
only:
variables:
- $TEST_TYPE == "k6"
......
......@@ -9,9 +9,9 @@ check:rubocop:
- gem install bundler && bundle config without run && bundle install
script:
- rubocop --parallel
except:
- schedules
- tags
only:
- master
- merge_requests
check:yamllint:
stage: check
......@@ -19,9 +19,9 @@ check:yamllint:
image: pipelinecomponents/yamllint
script:
- yamllint .gitlab-ci.yml .gitlab/ci/
except:
- schedules
- tags
only:
- master
- merge_requests
check:eslint:
stage: check
......@@ -29,6 +29,6 @@ check:eslint:
script:
- npm install eslint --global
- eslint './**/*.js'
except:
- schedules
- tags
only:
- master
- merge_requests
# 1st Stage
FROM ruby:2.6-alpine AS build
ARG K6_VERSION="0.26.1"
ARG K6_VERSION="0.26.2"
ENV K6_VERSION="${K6_VERSION}"
ENV GEM_HOME="/usr/local/bundle"
ENV PATH $GEM_HOME/bin:$GEM_HOME/gems/bin:$PATH
......
#!/usr/bin/env ruby
$LOAD_PATH.unshift File.expand_path('../lib', __dir__)
require 'gpt_common'
require 'http'
require 'json'
require 'optimist'
require 'pathname'
require 'rainbow'
require 'semantic'
require 'yaml'
ci_dir = Pathname.new(File.expand_path('../.gitlab/ci', __dir__)).relative_path_from(Dir.pwd)
@opts = Optimist.options do
banner "Usage: ci-generate-compare-config [options]"
banner "\nGenerates config for GitLab Performance Test Comparision pipeline against custom docker images"
banner "\nOptions:"
opt :help, 'Show help message'
opt :target_docker_image, 'Latest GitLab version to target and test against. Defaults to latest release version', type: :string, default: ENV['GPT_CP_DOCKER_IMAGE'] || 'registry.gitlab.com/gitlab-org/quality/performance-images/gitlab-ce-performance'
opt :target_version, 'Latest GitLab docker version to target and test against. Defaults to latest release version', type: :string
opt :target_number, 'Number of previous GitLab docker versions to test and compare against target', type: :int, default: 5
opt :target_base_version, 'Base version that GitLab docker images were built from. This shouldn\'t be changed unless specifically required.', type: :string, default: ENV['GPT_CP_TARGET_BASE_VERSION'] || '12.5.0'
opt :extends, 'CI config key to extend from in generated jobs', type: :string, default: '.k6-compare-base'
opt :conf_file, 'Path where generated CI config file will be saved', type: :string, default: "#{ci_dir}/gpt-k6-compare-jobs.yml"
banner "\nEnvironment Variable(s):"
banner " CI_SLACK_REPORT Set jobs to report results to Slack (Default: nil)"
banner " CI_WIKI_REPORT Set jobs to report results to Wiki (Default: nil)"
end
def get_latest_gitlab_version(search_str: nil)
url = "https://gitlab.com/api/v4/projects/13083/repository/tags" + (search_str ? "?search=#{search_str}" : '')
res = GPTCommon.make_http_request(method: 'get', url: url)
latest_vers_list = JSON.parse(res.body.to_s).reject { |item| item['name'].match(/pre|rc/) }
raise "No GitLab versions found online" + (" with search string '#{search_str}'" if search_str) + '. ' + 'Exiting...' if latest_vers_list.empty?
latest_major_minor_ver = latest_vers_list.map { |ver| Semantic::Version.new(ver['name'].match(/v(\d+\.\d+\.\d+)/)[1]) }.uniq.max
latest_major_minor_ver.patch = 0
latest_major_minor_ver
end
def get_last_versions(target_ver:, target_base_version:, target_num:)
puts "Getting last #{target_num} versions from #{target_ver}...\n\n"
last_versions = []
target_num.times do |prev_ver_count|
ver_to_add = target_ver.dup
if ver_to_add.minor - prev_ver_count >= 0
ver_to_add.minor -= prev_ver_count
raise ArgumentError, Rainbow("Version to test, '#{ver_to_add}', is older than the minimum allowed version of '#{target_base_version}'. All versions to be tested must be higher than '#{target_base_version}'. Exiting...").red if ver_to_add < target_base_version
last_versions.prepend(ver_to_add)
elsif (ver_to_add.major - 1).positive?
prev_ver_to_add = get_latest_gitlab_version(search_str: "v#{ver_to_add.major - 1}")
last_versions += get_last_versions(target_ver: prev_ver_to_add, target_base_version: target_base_version, target_num: target_num - prev_ver_count)
break
end
end
last_versions.sort
end
def get_startup_delay(test_ver:, target_base_version:)
min_minor_ver = test_ver.major == target_base_version.major ? target_base_version.minor : 0
30 * (test_ver.minor - min_minor_ver)
end
target_base_version = Semantic::Version.new(@opts[:target_base_version])
target_ver = @opts[:target_version] ? Semantic::Version.new(@opts[:target_version]) : get_latest_gitlab_version
last_vers = get_last_versions(target_ver: target_ver, target_base_version: target_base_version, target_num: @opts[:target_number])
gpt_compare_conf = {}
last_vers.each do |ver|
gpt_compare_test_name = "gpt-#{ver.major}-#{ver.minor}-compare"
gpt_compare_conf[gpt_compare_test_name] = {}
gpt_compare_conf[gpt_compare_test_name]['extends'] = @opts[:extends] if @opts[:extends]
gpt_compare_conf[gpt_compare_test_name]['variables'] = {
'MAJOR_VERSION' => ver.major,
'MINOR_VERSION' => ver.minor,
'STARTUP_DELAY' => get_startup_delay(test_ver: ver, target_base_version: target_base_version)
}
end
if ENV['CI_SLACK_REPORT'] == 'true'
gpt_compare_conf['report-gpt-compare-success-slack'] = {
'extends' => '.report-k6-slack-base',
'variables' => {
'TEST_RESULT' => 'passed'
},
'when' => 'on_success'
}
gpt_compare_conf['report-gpt-compare-failure-slack'] = {
'extends' => '.report-k6-slack-base',
'variables' => {
'TEST_RESULT' => 'failed'
},
'when' => 'on_failure'
}
end
if ENV['CI_WIKI_REPORT'] == 'true'
gpt_compare_conf['report-gpt-compare-wiki'] = {
'extends' => '.report-k6-wiki-base',
'variables' => {
'WIKI_PAGE' => 'Benchmarks/GitLab Versions'
}
}
end
puts "Generated config:\n#{gpt_compare_conf.to_yaml}\n"
File.write(@opts[:conf_file], gpt_compare_conf.to_yaml)
puts "Saved GPT Comparsion CI config to #{@opts[:conf_file]}\n"
......@@ -7,11 +7,12 @@ require 'gpt_common'
require 'http'
require 'json'
require 'optimist'
require 'pathname'
require 'run_k6'
require 'time'
# Get parent folder(`k6`) path from the current file
k6_dir = File.expand_path('../k6', __dir__)
k6_dir = Pathname.new(File.expand_path('../k6', __dir__)).relative_path_from(Dir.pwd)
@opts = Optimist.options do
banner "Usage: ci-report-results-slack [options]"
......@@ -19,13 +20,19 @@ k6_dir = File.expand_path('../k6', __dir__)
banner "\nOptions:"
opt :help, 'Show help message'
opt :results_path, "Path of k6 test results files to report on. Can be a directory that will be searched recursively or a direct filepath.", type: :string, default: "#{k6_dir}/results"
opt :test_name, "Name of test that's being reported on", type: :string, default: ENV['TEST_TYPE']
opt :channel, "Slack channel to post results to", type: :string, default: "qa-performance"
opt :test_name, "Name of test that's being reported on.", type: :string, required: true
opt :test_result, "The result of the test. Must be either 'passed' or 'failed'.", type: :string, required: true
opt :channel, "Slack channel to post results to.", type: :string, default: "qa-performance"
banner "\nEnvironment Variable(s):"
banner " SLACK_BOT_TOKEN A valid Slack Token that belongs to a Bot that has permissions for the intended Slack instance. (Default: nil)"
banner " ENVIRONMENT_NAME Name of environment. (Default: nil)"
banner " ENVIRONMENT_GRAFANA_DASHBOARD_URL URL to environment's Grafana dashboard to show in Slack message. (Default: nil)"
banner " GPT_RESULTS_URL URL of Wiki page that also shows tests results to show in Slack message. (Default: nil)"
banner " GPT_KNOWN_ISSUES_URL URL of known GitLab performance issues list. (Default: nil)"
banner " SLACK_BOT_TOKEN A valid Slack Token that belongs to a Bot that has permissions for the intended Slack instance. (Default: nil)"
end
raise 'Environment Variable SLACK_BOT_TOKEN must be set to proceed. See command help for more info' unless ENV['SLACK_BOT_TOKEN']
raise 'Test Result must be either \'passed\' or \'failed\'. Exiting...' unless @opts[:test_result].match?(/passed|failed/)
def post_slack_message(message)
url = "https://slack.com/api/chat.postMessage"
......@@ -67,10 +74,10 @@ results_files = @opts[:results_path].include?(".json") ? Dir.glob(@opts[:results
results_file = results_files.length == 1 ? results_files.first : nil
if results_file && File.extname(results_file) == '.json'
puts "Posting result summary and uploading results to Slack:\n#{results_file}"
puts "Posting #{@opts[:test_result]} test result summary and uploading results to Slack:\n#{results_file}"
results_json = JSON.parse(File.read(results_file))
result = ENV['SUCCESS_TEST_RUN'] == "true" || results_json['overall_result']
result = @opts[:test_result] == "passed" || results_json['overall_result']
message = prepare_message_text(result, results_json)
results_summary = RunK6.generate_results_summary(results_json: results_json)
......@@ -85,7 +92,7 @@ if results_file && File.extname(results_file) == '.json'
else
puts "Posting result summary to Slack\n"
result = ENV['SUCCESS_TEST_RUN'] == "true"
result = @opts[:test_result] == "passed"
message = prepare_message_text(result)
post_slack_message(message)
end
......@@ -20,19 +20,20 @@ k6_dir = File.expand_path('../k6', __dir__)
banner "\nOptions:"
opt :help, 'Show help message'
opt :results_path, "Path of k6 test results files to report on. Can be a directory that will be searched recursively or a direct filepath.", type: :string, default: "#{k6_dir}/results"
opt :page_title, "Title to use for wiki page", type: :string, required: true
opt :page_title, "Title to use for wiki page", type: :string
opt :api_url, "GitLab wiki API URL", type: :string, default: "https://gitlab.com/api/v4/projects/gitlab-org%2Fquality%2Fperformance/wikis"
opt :dry_run, "Only generate Wiki page locally and don't post for testing purposes.", type: :flag
banner "\nEnvironment Variable(s):"
banner " CI_PROJECT_ACCESS_TOKEN A valid GitLab Personal Access Token that has access to the intended project where the wiki page will be posted. The token should come from a User that has admin access for the project(s) and have API permissions. (Default: nil)"
end
raise 'Environment Variable CI_PROJECT_ACCESS_TOKEN must be set to proceed. See command help for more info' unless ENV['CI_PROJECT_ACCESS_TOKEN']
def generate_comparision_table(results)
return nil if results.length <= 1
raise 'Environment Variable CI_PROJECT_ACCESS_TOKEN must be set to proceed. See command help for more info' unless ENV['CI_PROJECT_ACCESS_TOKEN'] || @opts[:dry_run]
raise 'Page Title must be specified' unless @opts[:page_title] || @opts[:dry_run]
def get_comparision_data(results)
results_comparision_data = {}
results_comparision_versions = []
ttfb_overall_target = ENV['GPT_TARGET_TTFB']&.dup.to_f || 500.0
results.each do |result|
results_comparision_versions |= [result["version"].tr('.', '-')]
......@@ -41,23 +42,29 @@ def generate_comparision_table(results)
results.each_with_index do |result, result_num|
result['test_results'].each do |test_result|
results_comparision_data[test_result["name"]] ||= Hash[results_comparision_versions.collect { |version| [version, "-"] }]
results_comparision_data[test_result["name"]]['target'] ||= "#{test_result['ttfb_p90_threshold']}ms"
test_result_score = ((test_result['ttfb_p90'].to_f / ttfb_overall_target) * 100)
last_test_result = results[result_num - 1]['test_results'].find { |res| res['name'] == test_result["name"] } unless result_num.zero?
if last_test_result&.dig('ttfb_p90')
last_test_difference = test_result['ttfb_p90'].to_f - last_test_result['ttfb_p90'].to_f
last_test_diff_perc = (last_test_difference * 100 / last_test_result['ttfb_p90'].to_f).round(2)
last_test_difference_str = last_test_difference.positive? ? "_+#{last_test_diff_perc}%_" : "**#{last_test_diff_perc}%**"
last_test_result_score = ((last_test_result['ttfb_p90'].to_f / ttfb_overall_target) * 100)
last_test_result_score_diff = (test_result_score - last_test_result_score).round(2)
last_test_result_score_diff_str = last_test_result_score_diff.positive? ? "_+#{last_test_result_score_diff}%_" : "**#{last_test_result_score_diff}%**"
results_comparision_data[test_result["name"]][result["version"].tr('.', '-')] = "#{test_result['ttfb_p90']}ms (#{last_test_difference_str})"
results_comparision_data[test_result["name"]][result["version"].tr('.', '-')] = "#{test_result['ttfb_p90']}ms (#{last_test_result_score_diff_str})"
else
results_comparision_data[test_result["name"]][result["version"].tr('.', '-')] = test_result['ttfb_p90'] ? "#{test_result['ttfb_p90']}ms (-)" : '-'
end
end
end
results_comparision_data
end
def generate_comparision_table(results_comparision_data)
results_comparision_table = []
results_comparision_data.sort.each do |name, results|
results_comparision_table << { 'name' => name }.merge(results)
results_comparision_table << { 'name' => name, 'target' => results['target'] }.merge(results)
end
tp.set(:max_width, 60)
......@@ -69,10 +76,11 @@ results_unsorted = results_files.map { |results_file| JSON.parse(File.read(resul
results_list = Naturally.sort_by(results_unsorted) { |results| results['version'] }
raise "\nNo results found in specified path(s):\n#{@opts[:results_path]}\nExiting..." if results_list.empty?
results_comparision_data = generate_comparision_table(results_list)
results_comparision_data = get_comparision_data(results_list)
results_comparision_table = generate_comparision_table(results_comparision_data)
results_comparision_contents = results_list.length > 1 ? "[[_TOC_]]\n" : ""
results_comparision_contents << "## Comparisions\n#{results_comparision_data}\n" if results_comparision_data
results_comparision_contents << "## Comparisions\n#{results_comparision_table}\n\nPercentages shown above are [percentage point changes](https://en.wikipedia.org/wiki/Percentage_point) calculated against the ideal target of 500ms unless specified otherwise.\n" if results_comparision_table
results_list.each do |results|
results_summary = RunK6.generate_results_summary(results_json: results)
......@@ -87,10 +95,12 @@ results_list.each do |results|
end
results_comparision_file = File.join(@opts[:results_path], "results_comparision.txt")
puts "\nSaving results to #{results_comparision_file}"
puts "Saving results to #{results_comparision_file}"
File.write(results_comparision_file, results_comparision_contents)
puts "\nPosting results to Wiki page"
exit if @opts[:dry_run]
puts "\nPosting results to Wiki page '#{@opts[:page_title]}'"
headers = {
'Authorization': "Bearer #{ENV['CI_PROJECT_ACCESS_TOKEN']}"
}
......
#!/usr/bin/env ruby
$LOAD_PATH.unshift File.expand_path('../lib', __dir__)
$stdout.sync = true
require 'gpt_common'
require 'open3'
......@@ -15,6 +16,7 @@ require 'tty-spinner'
opt :image, "Image to use for Gitlab Docker Container", short: :none, type: :string, required: true
opt :name, "Name to use for Gitlab Docker Container", short: :none, type: :string, default: "gitlab"
opt :hostname, "Address to use that resolves to Gitlab Docker Container", short: :none, type: :string, default: "docker"
opt :project_id, "ID or URL-encoded path of a project on the GitLab image to perform checks.", short: :none, type: :string, default: "qa-perf-testing%2Fgitlabhq"
opt :help, 'Show help message'
end
......@@ -23,7 +25,7 @@ raise "Docker not installed. Exiting..." unless Open3.capture2e("docker version"
start_time = Time.now
spinner = TTY::Spinner.new("[:spinner] Starting GitLab Docker")
spinner.auto_spin
gd_out, gd_status = Open3.capture2e("docker run -d --publish 80:80 --publish 443:443 --publish 22:22 --name #{@opts[:name]} --hostname #{@opts[:hostname]} --env GITLAB_OMNIBUS_CONFIG=\"gitlab_rails['initial_root_password'] = 'password'; gitlab_rails['monitoring_whitelist'] = ['0.0.0.0/0']\" #{@opts[:image]}")
gd_out, gd_status = Open3.capture2e("docker run -d --publish 80:80 --publish 443:443 --publish 22:22 --name #{@opts[:name]} --hostname #{@opts[:hostname]} --env GITLAB_OMNIBUS_CONFIG=\"gitlab_rails['monitoring_whitelist'] = ['0.0.0.0/0']\" #{@opts[:image]}")
raise "docker issue:\n#{gd_out}" unless gd_status.success?
spinner.success
......@@ -31,20 +33,25 @@ spinner.success
spinner = TTY::Spinner.new("[:spinner] Waiting for GitLab Container to be ready")
spinner.auto_spin
120.times do
docker_healthcheck = Open3.capture2e("docker inspect -f {{.State.Health.Status}} gitlab")[0].strip == 'healthy'
sleep 5
docker_healthcheck = Open3.capture2e("docker inspect -f {{.State.Health.Status}} #{@opts[:name]}")[0].strip == 'healthy'
next unless docker_healthcheck
gitlab_healthcheck = GPTCommon.make_http_request(method: 'get', url: "http://#{@opts[:hostname]}/-/liveness", fail_on_error: false).status.success?
next unless gitlab_healthcheck
homepage = GPTCommon.make_http_request(method: 'get', url: "http://#{@opts[:hostname]}", fail_on_error: false)
homepage_healthcheck = !homepage.body.to_s.include?("Deploy in progress") && homepage.status.success?
next unless homepage_healthcheck
api_healthcheck = true
100.times do
25.times do
api_healthcheck &&= GPTCommon.make_http_request(method: 'get', url: "http://#{@opts[:hostname]}/api/v4/groups", fail_on_error: false).status.success?
end
next unless api_healthcheck
break if docker_healthcheck && gitlab_healthcheck && homepage_healthcheck && api_healthcheck
sleep 5
break
rescue HTTP::ConnectionError
sleep 5
next
......@@ -55,6 +62,14 @@ rescue Interrupt
end
spinner.success
<