GraphQL query to get the configurable AI feature with their metadata
Summary
Build the GraphQL query to support frontend retrieving the configurable AI features with their metadata.
Implementation plan
See this draft to find all the type definitions and implementations for this.
The query has to look like this:
query aiFeatureSettings {
aiFeatureSettings {
nodes { # type definition in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165722/diffs#70468c60a569d35dad0b65d045ca6994f0546d4c
feature
title
mainFeature
compatibleLlms
provider
releaseState
selfHostedModel {
name
endpoint
model
}
}
}
}
and respond
{
"data": {
"aiFeatureSettings": {
"nodes": [
{
"feature": "code_generations",
"title": "Code Generation",
"mainFeature": "Code Suggestions",
"compatibleLlms": [
"mistral",
"mixtral_8x22b",
"mixtral",
"codestral",
"codellama",
"codegemma",
"deepseekcoder"
],
"provider": "self_hosted",
"releaseState": "GA",
"selfHostedModel": {
"name": "codegemma-local-litellm-ollama",
"endpoint": "http://0.0.0.0:4000",
"model": "codegemma_7b"
}
},
{
"feature": "code_completions",
"title": "Code Completion",
"mainFeature": "Code Suggestions",
"compatibleLlms": [
"codegemma_2b",
"codegemma_7b",
"codellama_13b_code",
"codestral",
"deepseekcoder"
],
"provider": "vendored",
"releaseState": "GA",
"selfHostedModel": {
"name": "codegemma-local-litellm-ollama",
"endpoint": "http://0.0.0.0:4000",
"model": "codegemma_7b"
}
},
{
"feature": "duo_chat",
"title": "Duo Chat",
"mainFeature": "Duo Chat",
"compatibleLlms": [
"mistral",
"mixtral_8x22b",
"mixtral"
],
"provider": "self_hosted",
"releaseState": "BETA",
"selfHostedModel": {
"name": "mistral-local-ollama-",
"endpoint": "http://0.0.0.0:4000",
"model": "mistral"
}
}
]
}
}
}