Skip to content
Snippets Groups Projects
Verified Commit d52fd21f authored by Vitali Tatarintev's avatar Vitali Tatarintev Committed by GitLab
Browse files

Store additional context into chat history

parent 91a6debf
No related branches found
No related tags found
1 merge request!164197Store additional context into chat history
Showing
with 294 additions and 57 deletions
......@@ -17328,6 +17328,19 @@ Information about a connected Agent.
| <a id="aggregationstatusestimatednextupdateat"></a>`estimatedNextUpdateAt` | [`Time`](#time) | Estimated time when the next incremental update will happen. |
| <a id="aggregationstatuslastupdateat"></a>`lastUpdateAt` | [`Time`](#time) | Last incremental update time. |
 
### `AiAdditionalContext`
Additional context for AI message.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="aiadditionalcontextcategory"></a>`category` | [`AiAdditionalContextCategory!`](#aiadditionalcontextcategory) | Category of the additional context. |
| <a id="aiadditionalcontextcontent"></a>`content` | [`String!`](#string) | Content of the additional context. |
| <a id="aiadditionalcontextid"></a>`id` | [`ID!`](#id) | ID of the additional context. |
| <a id="aiadditionalcontextmetadata"></a>`metadata` | [`JSON`](#json) | Metadata of the additional context. |
### `AiAgent`
 
An AI agent.
......@@ -17364,6 +17377,7 @@ AI features communication message.
 
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="aimessageadditionalcontext"></a>`additionalContext` | [`[AiAdditionalContext!]`](#aiadditionalcontext) | Additional context for the message. |
| <a id="aimessageagentversionid"></a>`agentVersionId` | [`AiAgentVersionID`](#aiagentversionid) | Global ID of the agent version to answer the message. |
| <a id="aimessagechunkid"></a>`chunkId` | [`Int`](#int) | Incremental ID for a chunk from a streamed message. Null when it is not a streamed message. |
| <a id="aimessagecontent"></a>`content` | [`String`](#string) | Raw response content. |
# frozen_string_literal: true
module Types
module Ai
# rubocop: disable Graphql/AuthorizeTypes -- Same reason as AIMessageExtrasType
class AdditionalContextType < Types::BaseObject
graphql_name 'AiAdditionalContext'
description "Additional context for AI message."
def self.authorization_scopes
[:api, :read_api, :ai_features]
end
field :category, Types::Ai::AdditionalContextCategoryEnum,
null: false,
description: 'Category of the additional context.'
field :id, GraphQL::Types::ID, # rubocop:disable GraphQL/FieldHashKey -- We want to use the `id` field name https://gitlab.com/gitlab-org/gitlab/-/issues/481548
null: false,
description: 'ID of the additional context.'
field :content, GraphQL::Types::String,
null: false,
description: 'Content of the additional context.'
field :metadata, GraphQL::Types::JSON, # rubocop:disable Graphql/JSONType -- As per discussion on https://gitlab.com/gitlab-org/gitlab/-/issues/481548, we want metadata to be a flexible unstructured field
null: true,
description: 'Metadata of the additional context.'
def id
object['id']
end
end
# rubocop: enable Graphql/AuthorizeTypes
end
end
......@@ -73,6 +73,12 @@ def self.authorization_scopes
scopes: [:api, :read_api, :ai_features],
description: 'Global ID of the agent version to answer the message.'
field :additional_context,
[Types::Ai::AdditionalContextType],
null: true,
scopes: [:api, :read_api, :ai_features],
description: 'Additional context for the message.'
def id
object['id']
end
......
......@@ -60,13 +60,15 @@ def ai_action
def build_prompt_message(attributes = options)
action_name = attributes[:ai_action] || ai_action
message_attributes = {
request_id: SecureRandom.uuid,
content: content(action_name),
role: ::Gitlab::Llm::AiMessage::ROLE_USER,
ai_action: action_name,
user: user,
context: ::Gitlab::Llm::AiMessageContext.new(resource: resource, user_agent: attributes[:user_agent])
context: ::Gitlab::Llm::AiMessageContext.new(resource: resource, user_agent: attributes[:user_agent]),
additional_context: ::Gitlab::Llm::AiMessageAdditionalContext.new(attributes[:additional_context])
}.merge(attributes)
::Gitlab::Llm::AiMessage.for(action: action_name).new(message_attributes)
end
......
......@@ -19,7 +19,8 @@ def execute(safe_params: {})
ai_action: action_name,
user: current_user,
context: ::Gitlab::Llm::AiMessageContext.new(resource: resource),
client_subscription_id: safe_params[:client_subscription_id]
client_subscription_id: safe_params[:client_subscription_id],
additional_context: ::Gitlab::Llm::AiMessageAdditionalContext.new(safe_params[:additional_context])
}
reset_chat(action_name, message_attributes) if safe_params[:with_clean_history]
......
......@@ -13,7 +13,7 @@ class AiMessage
ATTRIBUTES_LIST = [
:id, :request_id, :content, :role, :timestamp, :errors, :extras,
:user, :ai_action, :client_subscription_id, :type, :chunk_id, :context,
:agent_version_id, :referer_url, :platform_origin
:agent_version_id, :referer_url, :platform_origin, :additional_context
].freeze
SLASH_COMMAND_TOOLS = [
......@@ -48,6 +48,7 @@ def initialize(attributes = {})
@timestamp ||= Time.current
@errors ||= []
@extras ||= {}
@additional_context ||= []
end
def to_h
......
# frozen_string_literal: true
module Gitlab
module Llm
class AiMessageAdditionalContext
include ActiveModel::AttributeAssignment
def initialize(data = [])
@data = Array.wrap(data).map do |item_attrs|
AiMessageAdditionalContextItem.new(item_attrs)
end
end
def to_a
@data.map(&:to_h)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Llm
class AiMessageAdditionalContextItem
include ActiveModel::AttributeAssignment
ATTRIBUTES_LIST = [
:id,
:category,
:content,
:metadata
].freeze
attr_accessor(*ATTRIBUTES_LIST)
delegate :[], :[]=, to: :attributes
def initialize(attributes = {})
assign_attributes(attributes.with_indifferent_access.slice(*ATTRIBUTES_LIST))
end
def to_h
ATTRIBUTES_LIST.index_with do |attr|
public_send(attr) # rubocop:disable GitlabSecurity/PublicSend -- to avoid duplication with ATTRIBUTES_LIST.
end.compact.with_indifferent_access
end
end
end
end
......@@ -125,6 +125,7 @@ def dump_message(message)
result['extras'] = message.extras&.to_json
result['timestamp'] = message.timestamp&.to_s
result['content'] = result['content'][0, MAX_TEXT_LIMIT] if result['content']
result['additional_context'] = message.additional_context.to_a.to_json if message.additional_context.present?
result.compact
end
......@@ -136,6 +137,7 @@ def load_message(data)
data['ai_action'] = 'chat'
data['user'] = user
data['agent_version_id'] = agent_version_id
data['additional_context'] = ::Gitlab::Json.parse(data['additional_context']) if data['additional_context']
ChatMessage.new(data)
end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSchema.types['AiAdditionalContext'], feature_category: :duo_chat do
describe '.authorization' do
it 'allows ai_features scope token' do
expect(described_class.authorization.permitted_scopes).to include(:ai_features)
end
end
end
......@@ -10,7 +10,9 @@
it { expect(described_class.graphql_name).to eq('AiMessage') }
it 'has the expected fields' do
expected_fields = %w[id request_id content content_html role timestamp errors type chunk_id agent_version_id]
expected_fields = %w[
id request_id content content_html role timestamp errors type chunk_id agent_version_id additional_context
]
expect(described_class).to include_graphql_fields(*expected_fields)
end
......
......@@ -6,8 +6,23 @@
let(:current_user) { create(:user) }
let(:request_id) { 'uuid' }
let(:content) { 'Explain this code' }
let(:options) { {} }
let(:completions_params) { { request_id: request_id, client_subscription_id: nil, content: content }.merge(options) }
let(:options) do
{
additional_context: [
{ category: 'file', id: 'additonial_context.rb', content: 'puts "additional context"' },
{ category: 'snippet', id: 'print_context_method', content: 'def additional_context; puts "context"; end' }
]
}
end
let(:completions_params) do
{
request_id: request_id,
client_subscription_id: nil,
content: content
}.merge(options)
end
let(:referer_url) { 'http://127.0.0.1:3000/gitlab-org/gitlab-shell/-/blob/main/cmd/gitlab-shell/main.go?ref_type=heads' }
let(:chat) { instance_double(Llm::Internal::CompletionService) }
let(:blob) { instance_double(Gitlab::Git::Blob) }
......@@ -21,7 +36,10 @@
ai_action: 'chat',
user: current_user,
context: an_object_having_attributes(resource: resource),
client_subscription_id: nil
client_subscription_id: nil,
additional_context: an_object_having_attributes(
to_a: Array.wrap(completions_params[:additional_context]).map(&:stringify_keys)
)
}
end
......@@ -33,12 +51,14 @@
allow(SecureRandom).to receive(:uuid).and_return('uuid')
end
it 'saves question in the chat storage' do
it 'saves question in the chat storage', :aggregate_failures do
chat_completions
expect(Gitlab::Llm::ChatStorage.new(current_user)
.last_conversation
.reverse.find { |message| message.role == 'user' }.content).to eq(content)
last_user_message = Gitlab::Llm::ChatStorage.new(current_user)
.last_conversation.reverse.find { |message| message.role == 'user' }
expect(last_user_message.content).to eq(content)
expect(last_user_message.additional_context).to eq(completions_params[:additional_context].map(&:stringify_keys))
end
context 'with a referer URL' do
......@@ -55,10 +75,7 @@
end
end
context 'with an issue' do
let_it_be(:issue) { create(:issue) }
let(:resource) { issue }
shared_examples 'sending resource to the chat' do
it 'sends resource to the chat' do
expect(chat_message).to receive(:save!)
expect(Gitlab::Llm::ChatMessage).to receive(:new).with(chat_message_params).and_return(chat_message)
......@@ -69,6 +86,13 @@
end
end
context 'with an issue' do
let_it_be(:issue) { create(:issue) }
let(:resource) { issue }
it_behaves_like 'sending resource to the chat'
end
context 'with an epic' do
let(:epic) { create(:epic) }
let(:resource) { epic }
......@@ -77,56 +101,28 @@
stub_licensed_features(epics: true)
end
it 'sends resource to the chat' do
expect(chat_message).to receive(:save!)
expect(Gitlab::Llm::ChatMessage).to receive(:new).with(chat_message_params).and_return(chat_message)
expect(Llm::Internal::CompletionService).to receive(:new).with(chat_message, options).and_return(chat)
expect(chat).to receive(:execute)
chat_completions
end
it_behaves_like 'sending resource to the chat'
end
context 'with project' do
let_it_be(:project) { create(:project) }
let(:resource) { project }
it 'sends resource to the chat' do
expect(chat_message).to receive(:save!)
expect(Gitlab::Llm::ChatMessage).to receive(:new).with(chat_message_params).and_return(chat_message)
expect(Llm::Internal::CompletionService).to receive(:new).with(chat_message, options).and_return(chat)
expect(chat).to receive(:execute)
chat_completions
end
it_behaves_like 'sending resource to the chat'
end
context 'with group' do
let_it_be(:group) { create(:group) }
let(:resource) { group }
it 'sends resource to the chat' do
expect(chat_message).to receive(:save!)
expect(Gitlab::Llm::ChatMessage).to receive(:new).with(chat_message_params).and_return(chat_message)
expect(Llm::Internal::CompletionService).to receive(:new).with(chat_message, options).and_return(chat)
expect(chat).to receive(:execute)
chat_completions
end
it_behaves_like 'sending resource to the chat'
end
context 'without resource' do
let(:params) { { content: content } }
let(:resource) { current_user }
it 'sends resource to the chat' do
expect(chat_message).to receive(:save!)
expect(Gitlab::Llm::ChatMessage).to receive(:new).with(chat_message_params).and_return(chat_message)
expect(Llm::Internal::CompletionService).to receive(:new).with(chat_message, options).and_return(chat)
expect(chat).to receive(:execute)
chat_completions
end
it_behaves_like 'sending resource to the chat'
end
context 'with reset_history' do
......@@ -142,7 +138,7 @@
expect(chat_message).to receive(:save!)
expect(reset_message).to receive(:save!).twice
expect(Gitlab::Llm::ChatMessage).to receive(:new).with(chat_message_params).and_return(chat_message)
expect(Llm::Internal::CompletionService).to receive(:new).with(chat_message, options).and_return(chat)
expect(Llm::Internal::CompletionService).to receive(:new).with(chat_message, {}).and_return(chat)
expect(chat).to receive(:execute)
chat_completions
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Llm::AiMessageAdditionalContextItem, feature_category: :duo_chat do
subject(:item) { described_class.new(data) }
let(:data) do
{
category: 'file',
id: 'additonial_context.rb',
content: 'puts "additional context"',
metadata: { 'something' => 'something' }.to_json
}
end
describe '#to_h' do
it 'returns hash with all attributes' do
expect(item.to_h).to eq(data.stringify_keys)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Llm::AiMessageAdditionalContext, feature_category: :duo_chat do
subject(:message_additional_context) { described_class.new(data) }
let(:data) do
[
{
category: 'file',
id: 'additonial_context.rb',
content: 'puts "additional context"',
metadata: { 'something' => 'something' }.to_json
},
{
category: 'snippet',
id: 'print_context_method',
content: 'def additional_context; puts "context"; end',
metadata: { 'something' => 'something else' }.to_json
}
]
end
describe '#to_a' do
it 'returns list of hashes with all attributes' do
expect(message_additional_context.to_a).to eq(data.map(&:stringify_keys))
end
end
end
......@@ -23,7 +23,13 @@
type: 'tool',
context: Gitlab::Llm::AiMessageContext.new(resource: user),
agent_version_id: 1,
referer_url: 'http://127.0.0.1:3000'
referer_url: 'http://127.0.0.1:3000',
additional_context: Gitlab::Llm::AiMessageAdditionalContext.new(
[
{ category: 'file', id: 'additonial_context.rb', content: 'puts "additional context"' },
{ category: 'snippet', id: 'print_context_method', content: 'def additional_context; puts "context"; end' }
]
)
}
end
......
......@@ -15,7 +15,13 @@
role: 'user',
content: 'response',
user: user,
referer_url: 'http://127.0.0.1:3000'
referer_url: 'http://127.0.0.1:3000',
additional_context: Gitlab::Llm::AiMessageAdditionalContext.new(
[
{ category: 'file', id: 'additonial_context.rb', content: 'puts "additional context"' },
{ category: 'snippet', id: 'print_context_method', content: 'def additional_context; puts "context"; end' }
]
)
}
end
......@@ -50,6 +56,7 @@
expect(last.ai_action).to eq('chat')
expect(last.timestamp).not_to be_nil
expect(last.referer_url).to eq('http://127.0.0.1:3000')
expect(last.additional_context.to_a).to eq(payload[:additional_context].to_a)
end
context 'with MAX_MESSAGES limit' do
......@@ -90,6 +97,10 @@
it 'returns all records for this user' do
expect(subject.messages.map(&:content)).to eq(%w[msg1 msg2 msg3])
end
it 'a message contains additional context' do
expect(subject.messages.last.additional_context.to_a).to eq(payload[:additional_context].to_a)
end
end
describe '#messages_by' do
......
......@@ -160,7 +160,10 @@
ai_action: 'chat',
user: current_user,
context: an_object_having_attributes(resource: resource),
client_subscription_id: nil
client_subscription_id: nil,
additional_context: an_object_having_attributes(
to_a: Array.wrap(params[:additional_context]).map(&:stringify_keys)
)
}
end
......@@ -311,7 +314,8 @@
let(:additional_context) { [{ type: "file", name: "test.py", content: "print('hello world')" }] }
let(:options) { { additional_context: additional_context } }
it 'sends additional context to the chat' do
xit 'sends additional context to the chat' do
pending 'Fix in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/164944/'
expect(chat_message).to receive(:save!)
expect(Gitlab::Llm::ChatMessage).to receive(:new).with(chat_message_params).and_return(chat_message)
expect(Llm::Internal::CompletionService).to receive(:new).with(chat_message, options).and_return(chat)
......
......@@ -22,6 +22,12 @@
role
timestamp
errors
additionalContext {
category
id
content
metadata
}
}
GRAPHQL
end
......@@ -36,7 +42,16 @@
subject { graphql_data.dig('aiMessages', 'nodes') }
before do
create(:ai_chat_message, request_id: 'uuid1', role: 'user', content: 'question 1', user: user)
create(
:ai_chat_message,
request_id: 'uuid1',
role: 'user',
content: 'question 1',
user: user,
additional_context: [
{ category: 'file', id: 'hello.rb', content: 'puts "hello"', metadata: '{"file_name":"hello.rb"}' }
]
)
create(:ai_chat_message, request_id: 'uuid1', role: 'assistant', content: response_content, user: user)
# should not be included in response because it's for other user
create(:ai_chat_message, request_id: 'uuid1', role: 'user', content: 'question 2', user: other_user)
......@@ -59,19 +74,32 @@
post_graphql(query, current_user: current_user)
expect(subject).to eq([
{ 'requestId' => 'uuid1',
{
'requestId' => 'uuid1',
'content' => 'question 1',
'contentHtml' => '<p data-sourcepos="1:1-1:10" dir="auto">question 1</p>',
'role' => 'USER',
'errors' => [],
'timestamp' => Time.current.iso8601 },
{ 'requestId' => 'uuid1',
'timestamp' => Time.current.iso8601,
'additionalContext' => [
{
'category' => 'FILE',
'id' => 'hello.rb',
'content' => 'puts "hello"',
'metadata' => '{"file_name":"hello.rb"}'
}
]
},
{
'requestId' => 'uuid1',
'content' => response_content,
'contentHtml' => "<p data-sourcepos=\"1:1-1:#{response_content.size}\" dir=\"auto\">response " \
"<a href=\"#{external_issue_url}+\">#{external_issue_url}+</a></p>",
'role' => 'ASSISTANT',
'errors' => [],
'timestamp' => Time.current.iso8601 }
'timestamp' => Time.current.iso8601,
'additionalContext' => []
}
])
end
end
......
......@@ -137,5 +137,21 @@
expect(graphql_mutation_response(:ai_action)['errors']).to eq([])
end
it 'stores additional context into chat history' do
expect(Llm::CompletionWorker).to receive(:perform_for).with(
an_object_having_attributes(
user: current_user,
resource: resource,
ai_action: :chat,
content: "summarize"),
hash_including(additional_context: expected_additional_context)
)
post_graphql_mutation(mutation, current_user: current_user)
last_message = Gitlab::Llm::ChatStorage.new(current_user).messages.last
expect(last_message.additional_context).to eq(expected_additional_context.map(&:stringify_keys))
end
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment