refactor: Support model listing now is capabilities flag

This commit is contained in:
Petr Mironychev
2026-03-31 02:15:38 +02:00
parent 1c12d6d45c
commit 666aa94e81
23 changed files with 22 additions and 71 deletions

View File

@ -64,11 +64,6 @@ QString ClaudeProvider::chatEndpoint() const
return "/v1/messages";
}
bool ClaudeProvider::supportsModelListing() const
{
return true;
}
void ClaudeProvider::prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,
@ -252,7 +247,8 @@ void ClaudeProvider::sendRequest(
PluginLLMCore::ProviderCapabilities ClaudeProvider::capabilities() const
{
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Thinking
| PluginLLMCore::ProviderCapability::Image;
| PluginLLMCore::ProviderCapability::Image
| PluginLLMCore::ProviderCapability::ModelListing;
}
void ClaudeProvider::cancelRequest(const PluginLLMCore::RequestID &requestId)

View File

@ -37,7 +37,6 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,

View File

@ -33,9 +33,9 @@ QString CodestralProvider::url() const
return "https://codestral.mistral.ai";
}
bool CodestralProvider::supportsModelListing() const
PluginLLMCore::ProviderCapabilities CodestralProvider::capabilities() const
{
return false;
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image;
}
QString CodestralProvider::apiKey() const

View File

@ -28,8 +28,8 @@ class CodestralProvider : public MistralAIProvider
public:
QString name() const override;
QString url() const override;
bool supportsModelListing() const override;
QString apiKey() const override;
PluginLLMCore::ProviderCapabilities capabilities() const override;
};
} // namespace QodeAssist::Providers

View File

@ -64,11 +64,6 @@ QString GoogleAIProvider::chatEndpoint() const
return {};
}
bool GoogleAIProvider::supportsModelListing() const
{
return true;
}
void GoogleAIProvider::prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,
@ -268,7 +263,8 @@ void GoogleAIProvider::sendRequest(
PluginLLMCore::ProviderCapabilities GoogleAIProvider::capabilities() const
{
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Thinking
| PluginLLMCore::ProviderCapability::Image;
| PluginLLMCore::ProviderCapability::Image
| PluginLLMCore::ProviderCapability::ModelListing;
}
void GoogleAIProvider::cancelRequest(const PluginLLMCore::RequestID &requestId)

View File

@ -37,7 +37,6 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,

View File

@ -63,11 +63,6 @@ QString LMStudioProvider::chatEndpoint() const
return "/v1/chat/completions";
}
bool LMStudioProvider::supportsModelListing() const
{
return true;
}
QFuture<QList<QString>> LMStudioProvider::getInstalledModels(const QString &url)
{
m_client->setUrl(url);
@ -179,7 +174,8 @@ void LMStudioProvider::sendRequest(
PluginLLMCore::ProviderCapabilities LMStudioProvider::capabilities() const
{
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image;
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image
| PluginLLMCore::ProviderCapability::ModelListing;
}
void LMStudioProvider::cancelRequest(const PluginLLMCore::RequestID &requestId)

View File

@ -36,7 +36,6 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,

View File

@ -61,11 +61,6 @@ QString LlamaCppProvider::chatEndpoint() const
return "/v1/chat/completions";
}
bool LlamaCppProvider::supportsModelListing() const
{
return false;
}
void LlamaCppProvider::prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,

View File

@ -37,7 +37,6 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,

View File

@ -62,11 +62,6 @@ QString MistralAIProvider::chatEndpoint() const
return "/v1/chat/completions";
}
bool MistralAIProvider::supportsModelListing() const
{
return true;
}
QFuture<QList<QString>> MistralAIProvider::getInstalledModels(const QString &url)
{
m_client->setUrl(url);
@ -191,7 +186,8 @@ void MistralAIProvider::sendRequest(
PluginLLMCore::ProviderCapabilities MistralAIProvider::capabilities() const
{
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image;
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image
| PluginLLMCore::ProviderCapability::ModelListing;
}
void MistralAIProvider::cancelRequest(const PluginLLMCore::RequestID &requestId)

View File

@ -36,7 +36,6 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,

View File

@ -63,11 +63,6 @@ QString OllamaProvider::chatEndpoint() const
return "/api/chat";
}
bool OllamaProvider::supportsModelListing() const
{
return true;
}
void OllamaProvider::prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,
@ -275,7 +270,8 @@ void OllamaProvider::sendRequest(
PluginLLMCore::ProviderCapabilities OllamaProvider::capabilities() const
{
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Thinking
| PluginLLMCore::ProviderCapability::Image;
| PluginLLMCore::ProviderCapability::Image
| PluginLLMCore::ProviderCapability::ModelListing;
}
void OllamaProvider::cancelRequest(const PluginLLMCore::RequestID &requestId)

View File

@ -37,7 +37,6 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,

View File

@ -62,11 +62,6 @@ QString OpenAICompatProvider::chatEndpoint() const
return "/v1/chat/completions";
}
bool OpenAICompatProvider::supportsModelListing() const
{
return false;
}
void OpenAICompatProvider::prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,

View File

@ -36,7 +36,6 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,

View File

@ -62,11 +62,6 @@ QString OpenAIProvider::chatEndpoint() const
return "/v1/chat/completions";
}
bool OpenAIProvider::supportsModelListing() const
{
return true;
}
void OpenAIProvider::prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,
@ -256,7 +251,8 @@ void OpenAIProvider::sendRequest(
PluginLLMCore::ProviderCapabilities OpenAIProvider::capabilities() const
{
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image;
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image
| PluginLLMCore::ProviderCapability::ModelListing;
}
void OpenAIProvider::cancelRequest(const PluginLLMCore::RequestID &requestId)

View File

@ -36,7 +36,6 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,

View File

@ -62,11 +62,6 @@ QString OpenAIResponsesProvider::chatEndpoint() const
return "/v1/responses";
}
bool OpenAIResponsesProvider::supportsModelListing() const
{
return true;
}
void OpenAIResponsesProvider::prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,
@ -295,7 +290,8 @@ void OpenAIResponsesProvider::sendRequest(
PluginLLMCore::ProviderCapabilities OpenAIResponsesProvider::capabilities() const
{
return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Thinking
| PluginLLMCore::ProviderCapability::Image;
| PluginLLMCore::ProviderCapability::Image
| PluginLLMCore::ProviderCapability::ModelListing;
}
void OpenAIResponsesProvider::cancelRequest(const PluginLLMCore::RequestID &requestId)

View File

@ -36,7 +36,6 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(
QJsonObject &request,
PluginLLMCore::PromptTemplate *prompt,