diff --git a/ConfigurationManager.cpp b/ConfigurationManager.cpp index 93e790b..f1172f6 100644 --- a/ConfigurationManager.cpp +++ b/ConfigurationManager.cpp @@ -176,7 +176,7 @@ void ConfigurationManager::selectModel() : m_generalSettings.caModel); if (auto provider = m_providersManager.getProviderByName(providerName)) { - if (!provider->supportsModelListing()) { + if (!provider->capabilities().testFlag(PluginLLMCore::ProviderCapability::ModelListing)) { m_generalSettings.showModelsNotSupportedDialog(*targetSettings); return; } diff --git a/pluginllmcore/Provider.hpp b/pluginllmcore/Provider.hpp index 9986324..9a63215 100644 --- a/pluginllmcore/Provider.hpp +++ b/pluginllmcore/Provider.hpp @@ -40,9 +40,10 @@ class QJsonObject; namespace QodeAssist::PluginLLMCore { enum class ProviderCapability { - Tools = 0x1, - Thinking = 0x2, - Image = 0x4, + Tools = 0x1, + Thinking = 0x2, + Image = 0x4, + ModelListing = 0x8, }; Q_DECLARE_FLAGS(ProviderCapabilities, ProviderCapability) Q_DECLARE_OPERATORS_FOR_FLAGS(ProviderCapabilities) @@ -59,7 +60,6 @@ public: virtual QString url() const = 0; virtual QString completionEndpoint() const = 0; virtual QString chatEndpoint() const = 0; - virtual bool supportsModelListing() const = 0; virtual void prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/ClaudeProvider.cpp b/providers/ClaudeProvider.cpp index 2ff303a..127430b 100644 --- a/providers/ClaudeProvider.cpp +++ b/providers/ClaudeProvider.cpp @@ -64,11 +64,6 @@ QString ClaudeProvider::chatEndpoint() const return "/v1/messages"; } -bool ClaudeProvider::supportsModelListing() const -{ - return true; -} - void ClaudeProvider::prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, @@ -252,7 +247,8 @@ void ClaudeProvider::sendRequest( PluginLLMCore::ProviderCapabilities ClaudeProvider::capabilities() const { return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Thinking - | PluginLLMCore::ProviderCapability::Image; + | PluginLLMCore::ProviderCapability::Image + | PluginLLMCore::ProviderCapability::ModelListing; } void ClaudeProvider::cancelRequest(const PluginLLMCore::RequestID &requestId) diff --git a/providers/ClaudeProvider.hpp b/providers/ClaudeProvider.hpp index f0d77b8..2e78a64 100644 --- a/providers/ClaudeProvider.hpp +++ b/providers/ClaudeProvider.hpp @@ -37,7 +37,6 @@ public: QString url() const override; QString completionEndpoint() const override; QString chatEndpoint() const override; - bool supportsModelListing() const override; void prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/CodestralProvider.cpp b/providers/CodestralProvider.cpp index 8c2902f..63a87db 100644 --- a/providers/CodestralProvider.cpp +++ b/providers/CodestralProvider.cpp @@ -33,9 +33,9 @@ QString CodestralProvider::url() const return "https://codestral.mistral.ai"; } -bool CodestralProvider::supportsModelListing() const +PluginLLMCore::ProviderCapabilities CodestralProvider::capabilities() const { - return false; + return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image; } QString CodestralProvider::apiKey() const diff --git a/providers/CodestralProvider.hpp b/providers/CodestralProvider.hpp index e4ea63c..a907827 100644 --- a/providers/CodestralProvider.hpp +++ b/providers/CodestralProvider.hpp @@ -28,8 +28,8 @@ class CodestralProvider : public MistralAIProvider public: QString name() const override; QString url() const override; - bool supportsModelListing() const override; QString apiKey() const override; + PluginLLMCore::ProviderCapabilities capabilities() const override; }; } // namespace QodeAssist::Providers diff --git a/providers/GoogleAIProvider.cpp b/providers/GoogleAIProvider.cpp index 7315983..8acc99d 100644 --- a/providers/GoogleAIProvider.cpp +++ b/providers/GoogleAIProvider.cpp @@ -64,11 +64,6 @@ QString GoogleAIProvider::chatEndpoint() const return {}; } -bool GoogleAIProvider::supportsModelListing() const -{ - return true; -} - void GoogleAIProvider::prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, @@ -268,7 +263,8 @@ void GoogleAIProvider::sendRequest( PluginLLMCore::ProviderCapabilities GoogleAIProvider::capabilities() const { return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Thinking - | PluginLLMCore::ProviderCapability::Image; + | PluginLLMCore::ProviderCapability::Image + | PluginLLMCore::ProviderCapability::ModelListing; } void GoogleAIProvider::cancelRequest(const PluginLLMCore::RequestID &requestId) diff --git a/providers/GoogleAIProvider.hpp b/providers/GoogleAIProvider.hpp index 86e1f0b..a0037b3 100644 --- a/providers/GoogleAIProvider.hpp +++ b/providers/GoogleAIProvider.hpp @@ -37,7 +37,6 @@ public: QString url() const override; QString completionEndpoint() const override; QString chatEndpoint() const override; - bool supportsModelListing() const override; void prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/LMStudioProvider.cpp b/providers/LMStudioProvider.cpp index 204a23c..744ad0f 100644 --- a/providers/LMStudioProvider.cpp +++ b/providers/LMStudioProvider.cpp @@ -63,11 +63,6 @@ QString LMStudioProvider::chatEndpoint() const return "/v1/chat/completions"; } -bool LMStudioProvider::supportsModelListing() const -{ - return true; -} - QFuture> LMStudioProvider::getInstalledModels(const QString &url) { m_client->setUrl(url); @@ -179,7 +174,8 @@ void LMStudioProvider::sendRequest( PluginLLMCore::ProviderCapabilities LMStudioProvider::capabilities() const { - return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image; + return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image + | PluginLLMCore::ProviderCapability::ModelListing; } void LMStudioProvider::cancelRequest(const PluginLLMCore::RequestID &requestId) diff --git a/providers/LMStudioProvider.hpp b/providers/LMStudioProvider.hpp index c4f12fa..3bad21f 100644 --- a/providers/LMStudioProvider.hpp +++ b/providers/LMStudioProvider.hpp @@ -36,7 +36,6 @@ public: QString url() const override; QString completionEndpoint() const override; QString chatEndpoint() const override; - bool supportsModelListing() const override; void prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/LlamaCppProvider.cpp b/providers/LlamaCppProvider.cpp index 4f6526e..b53c7a1 100644 --- a/providers/LlamaCppProvider.cpp +++ b/providers/LlamaCppProvider.cpp @@ -61,11 +61,6 @@ QString LlamaCppProvider::chatEndpoint() const return "/v1/chat/completions"; } -bool LlamaCppProvider::supportsModelListing() const -{ - return false; -} - void LlamaCppProvider::prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/LlamaCppProvider.hpp b/providers/LlamaCppProvider.hpp index 9edfb63..50dc1e6 100644 --- a/providers/LlamaCppProvider.hpp +++ b/providers/LlamaCppProvider.hpp @@ -37,7 +37,6 @@ public: QString url() const override; QString completionEndpoint() const override; QString chatEndpoint() const override; - bool supportsModelListing() const override; void prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/MistralAIProvider.cpp b/providers/MistralAIProvider.cpp index b9a3b46..b914f1b 100644 --- a/providers/MistralAIProvider.cpp +++ b/providers/MistralAIProvider.cpp @@ -62,11 +62,6 @@ QString MistralAIProvider::chatEndpoint() const return "/v1/chat/completions"; } -bool MistralAIProvider::supportsModelListing() const -{ - return true; -} - QFuture> MistralAIProvider::getInstalledModels(const QString &url) { m_client->setUrl(url); @@ -191,7 +186,8 @@ void MistralAIProvider::sendRequest( PluginLLMCore::ProviderCapabilities MistralAIProvider::capabilities() const { - return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image; + return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image + | PluginLLMCore::ProviderCapability::ModelListing; } void MistralAIProvider::cancelRequest(const PluginLLMCore::RequestID &requestId) diff --git a/providers/MistralAIProvider.hpp b/providers/MistralAIProvider.hpp index 4fe781e..7ed9222 100644 --- a/providers/MistralAIProvider.hpp +++ b/providers/MistralAIProvider.hpp @@ -36,7 +36,6 @@ public: QString url() const override; QString completionEndpoint() const override; QString chatEndpoint() const override; - bool supportsModelListing() const override; void prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/OllamaProvider.cpp b/providers/OllamaProvider.cpp index 064e499..22d4580 100644 --- a/providers/OllamaProvider.cpp +++ b/providers/OllamaProvider.cpp @@ -63,11 +63,6 @@ QString OllamaProvider::chatEndpoint() const return "/api/chat"; } -bool OllamaProvider::supportsModelListing() const -{ - return true; -} - void OllamaProvider::prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, @@ -275,7 +270,8 @@ void OllamaProvider::sendRequest( PluginLLMCore::ProviderCapabilities OllamaProvider::capabilities() const { return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Thinking - | PluginLLMCore::ProviderCapability::Image; + | PluginLLMCore::ProviderCapability::Image + | PluginLLMCore::ProviderCapability::ModelListing; } void OllamaProvider::cancelRequest(const PluginLLMCore::RequestID &requestId) diff --git a/providers/OllamaProvider.hpp b/providers/OllamaProvider.hpp index e0027ab..d28164b 100644 --- a/providers/OllamaProvider.hpp +++ b/providers/OllamaProvider.hpp @@ -37,7 +37,6 @@ public: QString url() const override; QString completionEndpoint() const override; QString chatEndpoint() const override; - bool supportsModelListing() const override; void prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/OpenAICompatProvider.cpp b/providers/OpenAICompatProvider.cpp index 1b247a3..bb1d2d1 100644 --- a/providers/OpenAICompatProvider.cpp +++ b/providers/OpenAICompatProvider.cpp @@ -62,11 +62,6 @@ QString OpenAICompatProvider::chatEndpoint() const return "/v1/chat/completions"; } -bool OpenAICompatProvider::supportsModelListing() const -{ - return false; -} - void OpenAICompatProvider::prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/OpenAICompatProvider.hpp b/providers/OpenAICompatProvider.hpp index 10c0775..f53e635 100644 --- a/providers/OpenAICompatProvider.hpp +++ b/providers/OpenAICompatProvider.hpp @@ -36,7 +36,6 @@ public: QString url() const override; QString completionEndpoint() const override; QString chatEndpoint() const override; - bool supportsModelListing() const override; void prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/OpenAIProvider.cpp b/providers/OpenAIProvider.cpp index 8cb8cfe..a21ec2a 100644 --- a/providers/OpenAIProvider.cpp +++ b/providers/OpenAIProvider.cpp @@ -62,11 +62,6 @@ QString OpenAIProvider::chatEndpoint() const return "/v1/chat/completions"; } -bool OpenAIProvider::supportsModelListing() const -{ - return true; -} - void OpenAIProvider::prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, @@ -256,7 +251,8 @@ void OpenAIProvider::sendRequest( PluginLLMCore::ProviderCapabilities OpenAIProvider::capabilities() const { - return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image; + return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Image + | PluginLLMCore::ProviderCapability::ModelListing; } void OpenAIProvider::cancelRequest(const PluginLLMCore::RequestID &requestId) diff --git a/providers/OpenAIProvider.hpp b/providers/OpenAIProvider.hpp index 0d05b17..3e52464 100644 --- a/providers/OpenAIProvider.hpp +++ b/providers/OpenAIProvider.hpp @@ -36,7 +36,6 @@ public: QString url() const override; QString completionEndpoint() const override; QString chatEndpoint() const override; - bool supportsModelListing() const override; void prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/providers/OpenAIResponsesProvider.cpp b/providers/OpenAIResponsesProvider.cpp index 03fab1b..47ec307 100644 --- a/providers/OpenAIResponsesProvider.cpp +++ b/providers/OpenAIResponsesProvider.cpp @@ -62,11 +62,6 @@ QString OpenAIResponsesProvider::chatEndpoint() const return "/v1/responses"; } -bool OpenAIResponsesProvider::supportsModelListing() const -{ - return true; -} - void OpenAIResponsesProvider::prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, @@ -295,7 +290,8 @@ void OpenAIResponsesProvider::sendRequest( PluginLLMCore::ProviderCapabilities OpenAIResponsesProvider::capabilities() const { return PluginLLMCore::ProviderCapability::Tools | PluginLLMCore::ProviderCapability::Thinking - | PluginLLMCore::ProviderCapability::Image; + | PluginLLMCore::ProviderCapability::Image + | PluginLLMCore::ProviderCapability::ModelListing; } void OpenAIResponsesProvider::cancelRequest(const PluginLLMCore::RequestID &requestId) diff --git a/providers/OpenAIResponsesProvider.hpp b/providers/OpenAIResponsesProvider.hpp index 4dfd124..e6d0e24 100644 --- a/providers/OpenAIResponsesProvider.hpp +++ b/providers/OpenAIResponsesProvider.hpp @@ -36,7 +36,6 @@ public: QString url() const override; QString completionEndpoint() const override; QString chatEndpoint() const override; - bool supportsModelListing() const override; void prepareRequest( QJsonObject &request, PluginLLMCore::PromptTemplate *prompt, diff --git a/test/LLMClientInterfaceTests.cpp b/test/LLMClientInterfaceTests.cpp index 1f93507..dab3bb9 100644 --- a/test/LLMClientInterfaceTests.cpp +++ b/test/LLMClientInterfaceTests.cpp @@ -62,8 +62,6 @@ public: QString url() const override { return "https://mock_url"; } QString completionEndpoint() const override { return "/v1/completions"; } QString chatEndpoint() const override { return "/v1/chat/completions"; } - bool supportsModelListing() const override { return false; } - void prepareRequest( QJsonObject &request, LLMCore::PromptTemplate *promptTemplate,