Skip to content
Snippets Groups Projects
Commit eaae864e authored by George Koltsov's avatar George Koltsov :two:
Browse files

Merge branch '409812-anthropic-client' into 'master'

Implement Gitlab::Llm::Anthropic::Client

See merge request !119651



Merged-by: default avatarGeorge Koltsov <gkoltsov@gitlab.com>
Approved-by: Nicolas Dular's avatarNicolas Dular <ndular@gitlab.com>
Approved-by: default avatarGeorge Koltsov <gkoltsov@gitlab.com>
Reviewed-by: Nicolas Dular's avatarNicolas Dular <ndular@gitlab.com>
Reviewed-by: Adam Hegyi's avatarAdam Hegyi <ahegyi@gitlab.com>
Co-authored-by: Patrick Bajao's avatarPatrick Bajao <ebajao@gitlab.com>
parents 98739b88 c28a1f46
No related branches found
No related tags found
2 merge requests!122597doc/gitaly: Remove references to removed metrics,!119651Implement Gitlab::Llm::Anthropic::Client
Pipeline #860379969 canceled
......@@ -715,6 +715,7 @@ def self.kroki_formats_attributes
attr_encrypted :product_analytics_clickhouse_connection_string, encryption_options_base_32_aes_256_gcm.merge(encode: false, encode_iv: false)
attr_encrypted :product_analytics_configurator_connection_string, encryption_options_base_32_aes_256_gcm.merge(encode: false, encode_iv: false)
attr_encrypted :openai_api_key, encryption_options_base_32_aes_256_gcm.merge(encode: false, encode_iv: false)
attr_encrypted :anthropic_api_key, encryption_options_base_32_aes_256_gcm.merge(encode: false, encode_iv: false)
# TOFA API integration settngs
attr_encrypted :tofa_client_library_args, encryption_options_base_32_aes_256_gcm.merge(encode: false, encode_iv: false)
attr_encrypted :tofa_client_library_class, encryption_options_base_32_aes_256_gcm.merge(encode: false, encode_iv: false)
......
# frozen_string_literal: true
class AddAnthropicApiKeyApplicationSetting < Gitlab::Database::Migration[2.1]
def change
add_column :application_settings, :encrypted_anthropic_api_key, :binary
add_column :application_settings, :encrypted_anthropic_api_key_iv, :binary
end
end
73e02a60ed38365748c54399ccdd7ccfdfaac901682e84c1f52b113b30f682e6
\ No newline at end of file
......@@ -11829,6 +11829,8 @@ CREATE TABLE application_settings (
encrypted_tofa_access_token_expires_in bytea,
encrypted_tofa_access_token_expires_in_iv bytea,
remember_me_enabled boolean DEFAULT true NOT NULL,
encrypted_anthropic_api_key bytea,
encrypted_anthropic_api_key_iv bytea,
CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size >= 0)),
CONSTRAINT app_settings_container_registry_pre_import_tags_rate_positive CHECK ((container_registry_pre_import_tags_rate >= (0)::numeric)),
CONSTRAINT app_settings_dep_proxy_ttl_policies_worker_capacity_positive CHECK ((dependency_proxy_ttl_group_policy_worker_capacity >= 0)),
......@@ -76,7 +76,8 @@ def visible_attributes
:telesign_customer_xid,
:telesign_api_key,
:openai_api_key,
:security_policy_global_group_approvers_enabled
:security_policy_global_group_approvers_enabled,
:anthropic_api_key
].tap do |settings|
next unless ::Gitlab.com?
......
......@@ -63,6 +63,7 @@ module SettingsHelpers
optional :git_rate_limit_users_alertlist, type: Array[Integer], desc: 'List of user ids who will be emailed when Git abuse rate limit is exceeded'
optional :auto_ban_user_on_excessive_projects_download, type: Grape::API::Boolean, desc: 'Ban users from the application when they exceed maximum number of unique projects download in the specified time period'
optional :openai_api_key, type: String, desc: "OpenAI API key"
optional :anthropic_api_key, type: String, desc: "Anthropic API key"
end
end
......
# frozen_string_literal: true
module Gitlab
module Llm
module Anthropic
class Client
include Gitlab::Llm::Concerns::ExponentialBackoff
URL = 'https://api.anthropic.com'
DEFAULT_MODEL = 'claude-v1.3'
DEFAULT_TEMPERATURE = 0.7
DEFAULT_MAX_TOKENS = 16
def initialize(user)
@user = user
end
def complete(prompt:, **options)
return unless enabled?
Gitlab::HTTP.post(
URI.join(URL, '/v1/complete'),
headers: request_headers,
body: request_body(prompt: prompt, options: options).to_json
)
end
private
attr_reader :user
retry_methods_with_exponential_backoff :complete
def enabled?
api_key.present?
end
def api_key
@api_key ||= ::Gitlab::CurrentSettings.anthropic_api_key
end
def request_headers
{
'Accept' => 'application/json',
'Content-Type' => 'application/json',
'x-api-key' => api_key
}
end
def request_body(prompt:, options: {})
{
prompt: prompt,
model: DEFAULT_MODEL,
max_tokens_to_sample: DEFAULT_MAX_TOKENS,
temperature: DEFAULT_TEMPERATURE
}.merge(options)
end
end
end
end
end
......@@ -40,6 +40,10 @@
it 'contains openai_api_key value' do
expect(visible_attributes).to include(*%i(openai_api_key))
end
it 'contains anthropic_api_key value' do
expect(visible_attributes).to include(*%i(anthropic_api_key))
end
end
describe '.registration_features_can_be_prompted?' do
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Llm::Anthropic::Client, feature_category: :shared do
include StubRequests
let_it_be(:user) { create(:user) }
let(:api_key) { 'api-key' }
let(:options) { {} }
let(:expected_request_headers) do
{
'Accept' => 'application/json',
'Content-Type' => 'application/json',
'x-api-key' => api_key
}
end
let(:expected_request_body) do
{
prompt: 'anything',
model: described_class::DEFAULT_MODEL,
max_tokens_to_sample: described_class::DEFAULT_MAX_TOKENS,
temperature: described_class::DEFAULT_TEMPERATURE
}
end
let(:expected_response) do
{
'completion' => 'Response',
'stop' => nil,
'stop_reason' => 'max_tokens',
'truncated' => false,
'log_id' => 'b454d92a4e108ab78dcccbcc6c83f7ba',
'model' => 'claude-v1.3',
'exception' => nil
}
end
before do
stub_application_setting(anthropic_api_key: api_key)
stub_full_request("#{described_class::URL}/v1/complete", method: :post)
.with(
body: expected_request_body,
headers: expected_request_headers
)
.to_return(
status: 200,
body: expected_response.to_json,
headers: { 'Content-Type' => 'application/json' }
)
end
describe '#complete' do
subject(:complete) { described_class.new(user).complete(prompt: 'anything', **options) }
context 'when feature flag and API key is set' do
it 'returns response' do
expect(complete.parsed_response).to eq(expected_response)
end
end
context 'when using options' do
let(:options) { { temperature: 0.1 } }
let(:expected_request_body) do
{
prompt: 'anything',
model: described_class::DEFAULT_MODEL,
max_tokens_to_sample: described_class::DEFAULT_MAX_TOKENS,
temperature: options[:temperature]
}
end
it 'returns response' do
expect(complete.parsed_response).to eq(expected_response)
end
end
context 'when the API key is not present' do
let(:api_key) { nil }
it { is_expected.to be_nil }
end
end
end
......@@ -136,6 +136,15 @@
expect(json_response['openai_api_key']).to eq('OPENAI_API_KEY')
end
end
context 'anthropic api key setting' do
it 'updates anthropic_api_key' do
put api('/application/settings', admin, admin_mode: true), params: { anthropic_api_key: 'ANTHROPIC_API_KEY' }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['anthropic_api_key']).to eq('ANTHROPIC_API_KEY')
end
end
end
shared_examples 'settings for licensed features' do
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment