From db82fb08e811c41d051e965953ac5d2e0aae531b Mon Sep 17 00:00:00 2001 From: Petr Mironychev <9195189+Palm1r@users.noreply.github.com> Date: Fri, 31 Oct 2025 16:09:38 +0100 Subject: [PATCH] feat: Add chat-agent switcher in chat ui (#247) * feat: Add chat-agent switcher in chat ui fix: qml errors refactor: Change top bar layout fix: default value * fix: update github action for qtc --- .github/workflows/build_cmake.yml | 2 +- ChatView/ChatRootView.cpp | 35 ++++- ChatView/ChatRootView.hpp | 11 ++ ChatView/ClientInterface.cpp | 13 +- ChatView/ClientInterface.hpp | 3 +- ChatView/qml/RootItem.qml | 13 +- ChatView/qml/parts/TopBar.qml | 222 +++++++++++++++++------------ LLMClientInterface.cpp | 3 +- QuickRefactorHandler.cpp | 6 +- UIControls/qml/QoATextSlider.qml | 1 - llmcore/Provider.hpp | 3 +- providers/ClaudeProvider.cpp | 6 +- providers/ClaudeProvider.hpp | 3 +- providers/GoogleAIProvider.cpp | 6 +- providers/GoogleAIProvider.hpp | 3 +- providers/LMStudioProvider.cpp | 6 +- providers/LMStudioProvider.hpp | 3 +- providers/LlamaCppProvider.cpp | 6 +- providers/LlamaCppProvider.hpp | 3 +- providers/MistralAIProvider.cpp | 6 +- providers/MistralAIProvider.hpp | 3 +- providers/OllamaProvider.cpp | 6 +- providers/OllamaProvider.hpp | 3 +- providers/OpenAICompatProvider.cpp | 6 +- providers/OpenAICompatProvider.hpp | 3 +- providers/OpenAIProvider.cpp | 6 +- providers/OpenAIProvider.hpp | 3 +- 27 files changed, 244 insertions(+), 140 deletions(-) diff --git a/.github/workflows/build_cmake.yml b/.github/workflows/build_cmake.yml index 1c48fe9..32e53a0 100644 --- a/.github/workflows/build_cmake.yml +++ b/.github/workflows/build_cmake.yml @@ -199,7 +199,7 @@ jobs: endif() - name: Download Qt Creator - uses: qt-creator/install-dev-package@4046eda2efa77c0fe61d4cde7e622c050a4d65af + uses: qt-creator/install-dev-package@1460787a21551eb3d867b0de30e8d3f1aadef5ac with: version: ${{ matrix.qt_config.qt_creator_version }} unzip-to: 'qtcreator' diff --git a/ChatView/ChatRootView.cpp b/ChatView/ChatRootView.cpp index b709f72..55268b2 100644 --- a/ChatView/ChatRootView.cpp +++ b/ChatView/ChatRootView.cpp @@ -49,6 +49,7 @@ ChatRootView::ChatRootView(QQuickItem *parent) , m_promptProvider(LLMCore::PromptTemplateManager::instance()) , m_clientInterface(new ClientInterface(m_chatModel, &m_promptProvider, this)) , m_isRequestInProgress(false) + , m_isAgentMode(false) { m_isSyncOpenFiles = Settings::chatAssistantSettings().linkOpenFiles(); connect( @@ -142,12 +143,20 @@ ChatRootView::ChatRootView(QQuickItem *parent) updateInputTokensCount(); refreshRules(); - // Refresh rules when project changes connect( ProjectExplorer::ProjectManager::instance(), &ProjectExplorer::ProjectManager::startupProjectChanged, this, &ChatRootView::refreshRules); + + QSettings appSettings; + m_isAgentMode = appSettings.value("QodeAssist/Chat/AgentMode", true).toBool(); + + connect( + &Settings::generalSettings().useTools, + &Utils::BaseAspect::changed, + this, + &ChatRootView::toolsSupportEnabledChanged); } ChatModel *ChatRootView::chatModel() const @@ -173,7 +182,7 @@ void ChatRootView::sendMessage(const QString &message) } } - m_clientInterface->sendMessage(message, m_attachmentFiles, m_linkedFiles); + m_clientInterface->sendMessage(message, m_attachmentFiles, m_linkedFiles, m_isAgentMode); clearAttachmentFiles(); setRequestProgressStatus(true); } @@ -704,4 +713,26 @@ void ChatRootView::refreshRules() emit activeRulesCountChanged(); } +bool ChatRootView::isAgentMode() const +{ + return m_isAgentMode; +} + +void ChatRootView::setIsAgentMode(bool newIsAgentMode) +{ + if (m_isAgentMode != newIsAgentMode) { + m_isAgentMode = newIsAgentMode; + + QSettings settings; + settings.setValue("QodeAssist/Chat/AgentMode", newIsAgentMode); + + emit isAgentModeChanged(); + } +} + +bool ChatRootView::toolsSupportEnabled() const +{ + return Settings::generalSettings().useTools(); +} + } // namespace QodeAssist::Chat diff --git a/ChatView/ChatRootView.hpp b/ChatView/ChatRootView.hpp index 7d94a29..f6c3042 100644 --- a/ChatView/ChatRootView.hpp +++ b/ChatView/ChatRootView.hpp @@ -47,6 +47,9 @@ class ChatRootView : public QQuickItem Q_PROPERTY(QString lastErrorMessage READ lastErrorMessage NOTIFY lastErrorMessageChanged FINAL) Q_PROPERTY(QVariantList activeRules READ activeRules NOTIFY activeRulesChanged FINAL) Q_PROPERTY(int activeRulesCount READ activeRulesCount NOTIFY activeRulesCountChanged FINAL) + Q_PROPERTY(bool isAgentMode READ isAgentMode WRITE setIsAgentMode NOTIFY isAgentModeChanged FINAL) + Q_PROPERTY( + bool toolsSupportEnabled READ toolsSupportEnabled NOTIFY toolsSupportEnabledChanged FINAL) QML_ELEMENT @@ -107,6 +110,10 @@ public: Q_INVOKABLE QString getRuleContent(int index); Q_INVOKABLE void refreshRules(); + bool isAgentMode() const; + void setIsAgentMode(bool newIsAgentMode); + bool toolsSupportEnabled() const; + public slots: void sendMessage(const QString &message); void copyToClipboard(const QString &text); @@ -134,6 +141,9 @@ signals: void activeRulesChanged(); void activeRulesCountChanged(); + void isAgentModeChanged(); + void toolsSupportEnabledChanged(); + private: QString getChatsHistoryDir() const; QString getSuggestedFileName() const; @@ -152,6 +162,7 @@ private: bool m_isRequestInProgress; QString m_lastErrorMessage; QVariantList m_activeRules; + bool m_isAgentMode; }; } // namespace QodeAssist::Chat diff --git a/ChatView/ClientInterface.cpp b/ChatView/ClientInterface.cpp index 951b495..835ed5a 100644 --- a/ChatView/ClientInterface.cpp +++ b/ChatView/ClientInterface.cpp @@ -55,7 +55,10 @@ ClientInterface::ClientInterface( ClientInterface::~ClientInterface() = default; void ClientInterface::sendMessage( - const QString &message, const QList &attachments, const QList &linkedFiles) + const QString &message, + const QList &attachments, + const QList &linkedFiles, + bool useAgentMode) { cancelRequest(); m_accumulatedResponses.clear(); @@ -83,10 +86,12 @@ void ClientInterface::sendMessage( LLMCore::ContextData context; + const bool isToolsEnabled = Settings::generalSettings().useTools() && useAgentMode; + if (chatAssistantSettings.useSystemPrompt()) { QString systemPrompt = chatAssistantSettings.systemPrompt(); - if (Settings::generalSettings().useTools()) { + if (isToolsEnabled) { systemPrompt += "\n\n# Tool Usage Guidelines\n\n" "**Multi-tool workflows:**\n" "- Code structure: search_project (symbol mode) → find_and_read_file\n" @@ -140,8 +145,8 @@ void ClientInterface::sendMessage( config.apiKey = provider->apiKey(); - config.provider - ->prepareRequest(config.providerRequest, promptTemplate, context, LLMCore::RequestType::Chat); + config.provider->prepareRequest( + config.providerRequest, promptTemplate, context, LLMCore::RequestType::Chat, isToolsEnabled); QString requestId = QUuid::createUuid().toString(); QJsonObject request{{"id", requestId}}; diff --git a/ChatView/ClientInterface.hpp b/ChatView/ClientInterface.hpp index f01616c..a8a4e63 100644 --- a/ChatView/ClientInterface.hpp +++ b/ChatView/ClientInterface.hpp @@ -42,7 +42,8 @@ public: void sendMessage( const QString &message, const QList &attachments = {}, - const QList &linkedFiles = {}); + const QList &linkedFiles = {}, + bool useAgentMode = false); void clearMessages(); void cancelRequest(); diff --git a/ChatView/qml/RootItem.qml b/ChatView/qml/RootItem.qml index aaea3b3..95263a7 100644 --- a/ChatView/qml/RootItem.qml +++ b/ChatView/qml/RootItem.qml @@ -65,7 +65,7 @@ ChatRootView { id: topBar Layout.preferredWidth: parent.width - Layout.preferredHeight: 40 + Layout.preferredHeight: childrenRect.height + 10 saveButton.onClicked: root.showSaveDialog() loadButton.onClicked: root.showLoadDialog() @@ -74,7 +74,7 @@ ChatRootView { text: qsTr("%1/%2").arg(root.inputTokensCount).arg(root.chatModel.tokensThreshold) } recentPath { - text: qsTr("Latest chat file name: %1").arg(root.chatFileName.length > 0 ? root.chatFileName : "Unsaved") + text: qsTr("Сhat name: %1").arg(root.chatFileName.length > 0 ? root.chatFileName : "Unsaved") } openChatHistory.onClicked: root.openChatHistoryFolder() rulesButton.onClicked: rulesViewer.open() @@ -84,6 +84,13 @@ ChatRootView { checked: typeof _chatview !== 'undefined' ? _chatview.isPin : false onCheckedChanged: _chatview.isPin = topBar.pinButton.checked } + agentModeSwitch { + checked: root.isAgentMode + enabled: root.toolsSupportEnabled + onCheckedChanged: { + root.isAgentMode = agentModeSwitch.checked + } + } } ListView { @@ -108,7 +115,7 @@ ChatRootView { if (model.roleType === ChatModel.Tool) { return toolMessageComponent } else if (model.roleType === ChatModel.FileEdit) { - return fileEditSuggestionComponent + return toolMessageComponent } else { return chatItemComponent } diff --git a/ChatView/qml/parts/TopBar.qml b/ChatView/qml/parts/TopBar.qml index cf944ef..72e506b 100644 --- a/ChatView/qml/parts/TopBar.qml +++ b/ChatView/qml/parts/TopBar.qml @@ -34,21 +34,20 @@ Rectangle { property alias openChatHistory: openChatHistoryId property alias pinButton: pinButtonId property alias rulesButton: rulesButtonId + property alias agentModeSwitch: agentModeSwitchId property alias activeRulesCount: activeRulesCountId.text color: palette.window.hslLightness > 0.5 ? Qt.darker(palette.window, 1.1) : Qt.lighter(palette.window, 1.1) - RowLayout { + Flow { anchors { left: parent.left - leftMargin: 5 right: parent.right - rightMargin: 5 verticalCenter: parent.verticalCenter + margins: 5 } - spacing: 10 QoAButton { @@ -69,107 +68,144 @@ Rectangle { : qsTr("Pin chat window to the top") } - QoAButton { - id: saveButtonId + QoATextSlider { + id: agentModeSwitchId - icon { - source: "qrc:/qt/qml/ChatView/icons/save-chat-dark.svg" - height: 15 - width: 8 - } - ToolTip.visible: hovered - ToolTip.delay: 250 - ToolTip.text: qsTr("Save chat to *.json file") - } - - QoAButton { - id: loadButtonId - - icon { - source: "qrc:/qt/qml/ChatView/icons/load-chat-dark.svg" - height: 15 - width: 8 - } - ToolTip.visible: hovered - ToolTip.delay: 250 - ToolTip.text: qsTr("Load chat from *.json file") - } - - QoAButton { - id: clearButtonId - - icon { - source: "qrc:/qt/qml/ChatView/icons/clean-icon-dark.svg" - height: 15 - width: 8 - } - ToolTip.visible: hovered - ToolTip.delay: 250 - ToolTip.text: qsTr("Clean chat") - } - - Text { - id: recentPathId - - elide: Text.ElideMiddle - color: palette.text - } - - QoAButton { - id: openChatHistoryId - - icon { - source: "qrc:/qt/qml/ChatView/icons/file-in-system.svg" - height: 15 - width: 15 - } - ToolTip.visible: hovered - ToolTip.delay: 250 - ToolTip.text: qsTr("Show in system") - } - - QoAButton { - id: rulesButtonId - - icon { - source: "qrc:/qt/qml/ChatView/icons/rules-icon.svg" - height: 15 - width: 15 - } - text: " " + leftText: "chat" + rightText: "AI Agent" ToolTip.visible: hovered ToolTip.delay: 250 - ToolTip.text: root.activeRulesCount > 0 - ? qsTr("View active project rules (%1)").arg(root.activeRulesCount) - : qsTr("View active project rules (no rules found)") - - Text { - id: activeRulesCountId - - anchors { - bottom: parent.bottom - bottomMargin: 2 - right: parent.right - rightMargin: 4 + ToolTip.text: { + if (!agentModeSwitchId.enabled) { + return qsTr("Tools are disabled in General Settings") } - - color: palette.text - font.pixelSize: 10 - font.bold: true + return checked + ? qsTr("Agent Mode: AI can use tools to read files, search project, and build code") + : qsTr("Chat Mode: Simple conversation without tool access") } } Item { - Layout.fillWidth: true + height: agentModeSwitchId.height + width: recentPathId.width + + Text { + id: recentPathId + + anchors.verticalCenter: parent.verticalCenter + width: Math.min(implicitWidth, root.width) + elide: Text.ElideMiddle + color: palette.text + font.pixelSize: 12 + + MouseArea { + anchors.fill: parent + hoverEnabled: true + + ToolTip.visible: containsMouse + ToolTip.delay: 500 + ToolTip.text: recentPathId.text + } + } } - Badge { - id: tokensBadgeId + RowLayout { + Layout.preferredWidth: root.width - ToolTip.visible: hovered - ToolTip.delay: 250 - ToolTip.text: qsTr("Current amount tokens in chat and LLM limit threshold") + spacing: 10 + + QoAButton { + id: saveButtonId + + icon { + source: "qrc:/qt/qml/ChatView/icons/save-chat-dark.svg" + height: 15 + width: 8 + } + ToolTip.visible: hovered + ToolTip.delay: 250 + ToolTip.text: qsTr("Save chat to *.json file") + } + + QoAButton { + id: loadButtonId + + icon { + source: "qrc:/qt/qml/ChatView/icons/load-chat-dark.svg" + height: 15 + width: 8 + } + ToolTip.visible: hovered + ToolTip.delay: 250 + ToolTip.text: qsTr("Load chat from *.json file") + } + + QoAButton { + id: clearButtonId + + icon { + source: "qrc:/qt/qml/ChatView/icons/clean-icon-dark.svg" + height: 15 + width: 8 + } + ToolTip.visible: hovered + ToolTip.delay: 250 + ToolTip.text: qsTr("Clean chat") + } + + QoAButton { + id: openChatHistoryId + + icon { + source: "qrc:/qt/qml/ChatView/icons/file-in-system.svg" + height: 15 + width: 15 + } + ToolTip.visible: hovered + ToolTip.delay: 250 + ToolTip.text: qsTr("Show in system") + } + + QoAButton { + id: rulesButtonId + + icon { + source: "qrc:/qt/qml/ChatView/icons/rules-icon.svg" + height: 15 + width: 15 + } + text: " " + + ToolTip.visible: hovered + ToolTip.delay: 250 + ToolTip.text: root.activeRulesCount > 0 + ? qsTr("View active project rules (%1)").arg(root.activeRulesCount) + : qsTr("View active project rules (no rules found)") + + Text { + id: activeRulesCountId + + anchors { + bottom: parent.bottom + bottomMargin: 2 + right: parent.right + rightMargin: 4 + } + + color: palette.text + font.pixelSize: 10 + font.bold: true + } + } + + Badge { + id: tokensBadgeId + + ToolTip.visible: hovered + ToolTip.delay: 250 + ToolTip.text: qsTr("Current amount tokens in chat and LLM limit threshold") + } } } } diff --git a/LLMClientInterface.cpp b/LLMClientInterface.cpp index 52360bc..db233d6 100644 --- a/LLMClientInterface.cpp +++ b/LLMClientInterface.cpp @@ -292,7 +292,8 @@ void LLMClientInterface::handleCompletion(const QJsonObject &request) config.providerRequest, promptTemplate, updatedContext, - LLMCore::RequestType::CodeCompletion); + LLMCore::RequestType::CodeCompletion, + false); auto errors = config.provider->validateRequest(config.providerRequest, promptTemplate->type()); if (!errors.isEmpty()) { diff --git a/QuickRefactorHandler.cpp b/QuickRefactorHandler.cpp index 410a595..c416d92 100644 --- a/QuickRefactorHandler.cpp +++ b/QuickRefactorHandler.cpp @@ -145,7 +145,11 @@ void QuickRefactorHandler::prepareAndSendRequest( LLMCore::ContextData context = prepareContext(editor, range, instructions); provider->prepareRequest( - config.providerRequest, promptTemplate, context, LLMCore::RequestType::QuickRefactoring); + config.providerRequest, + promptTemplate, + context, + LLMCore::RequestType::QuickRefactoring, + false); QString requestId = QUuid::createUuid().toString(); m_lastRequestId = requestId; diff --git a/UIControls/qml/QoATextSlider.qml b/UIControls/qml/QoATextSlider.qml index b51abf7..3ac0b5c 100644 --- a/UIControls/qml/QoATextSlider.qml +++ b/UIControls/qml/QoATextSlider.qml @@ -143,7 +143,6 @@ Item { anchors.fill: parent hoverEnabled: true cursorShape: root.enabled ? Qt.PointingHandCursor : Qt.ArrowCursor - enabled: root.enabled onClicked: { if (root.enabled) { diff --git a/llmcore/Provider.hpp b/llmcore/Provider.hpp index 075350c..797772f 100644 --- a/llmcore/Provider.hpp +++ b/llmcore/Provider.hpp @@ -52,7 +52,8 @@ public: QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) + LLMCore::RequestType type, + bool isToolsEnabled) = 0; virtual QList getInstalledModels(const QString &url) = 0; virtual QList validateRequest(const QJsonObject &request, TemplateType type) = 0; diff --git a/providers/ClaudeProvider.cpp b/providers/ClaudeProvider.cpp index 1d30aa4..b9f4159 100644 --- a/providers/ClaudeProvider.cpp +++ b/providers/ClaudeProvider.cpp @@ -75,7 +75,8 @@ void ClaudeProvider::prepareRequest( QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) + LLMCore::RequestType type, + bool isToolsEnabled) { if (!prompt->isSupportProvider(providerID())) { LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name())); @@ -99,8 +100,7 @@ void ClaudeProvider::prepareRequest( applyModelParams(Settings::chatAssistantSettings()); } - if (supportsTools() && type == LLMCore::RequestType::Chat - && Settings::generalSettings().useTools()) { + if (isToolsEnabled) { auto toolsDefinitions = m_toolsManager->getToolsDefinitions( LLMCore::ToolSchemaFormat::Claude); if (!toolsDefinitions.isEmpty()) { diff --git a/providers/ClaudeProvider.hpp b/providers/ClaudeProvider.hpp index 9ac65e5..9f49957 100644 --- a/providers/ClaudeProvider.hpp +++ b/providers/ClaudeProvider.hpp @@ -41,7 +41,8 @@ public: QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) override; + LLMCore::RequestType type, + bool isToolsEnabled) override; QList getInstalledModels(const QString &url) override; QList validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override; QString apiKey() const override; diff --git a/providers/GoogleAIProvider.cpp b/providers/GoogleAIProvider.cpp index e5a4f11..e062d84 100644 --- a/providers/GoogleAIProvider.cpp +++ b/providers/GoogleAIProvider.cpp @@ -75,7 +75,8 @@ void GoogleAIProvider::prepareRequest( QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) + LLMCore::RequestType type, + bool isToolsEnabled) { if (!prompt->isSupportProvider(providerID())) { LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name())); @@ -102,8 +103,7 @@ void GoogleAIProvider::prepareRequest( applyModelParams(Settings::chatAssistantSettings()); } - if (supportsTools() && type == LLMCore::RequestType::Chat - && Settings::generalSettings().useTools()) { + if (isToolsEnabled) { auto toolsDefinitions = m_toolsManager->getToolsDefinitions( LLMCore::ToolSchemaFormat::Google); if (!toolsDefinitions.isEmpty()) { diff --git a/providers/GoogleAIProvider.hpp b/providers/GoogleAIProvider.hpp index 5f0c157..5893342 100644 --- a/providers/GoogleAIProvider.hpp +++ b/providers/GoogleAIProvider.hpp @@ -40,7 +40,8 @@ public: QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) override; + LLMCore::RequestType type, + bool isToolsEnabled) override; QList getInstalledModels(const QString &url) override; QList validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override; QString apiKey() const override; diff --git a/providers/LMStudioProvider.cpp b/providers/LMStudioProvider.cpp index dae3fd4..1e03953 100644 --- a/providers/LMStudioProvider.cpp +++ b/providers/LMStudioProvider.cpp @@ -222,7 +222,8 @@ void LMStudioProvider::prepareRequest( QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) + LLMCore::RequestType type, + bool isToolsEnabled) { if (!prompt->isSupportProvider(providerID())) { LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name())); @@ -250,8 +251,7 @@ void LMStudioProvider::prepareRequest( applyModelParams(Settings::chatAssistantSettings()); } - if (supportsTools() && type == LLMCore::RequestType::Chat - && Settings::generalSettings().useTools()) { + if (isToolsEnabled) { auto toolsDefinitions = m_toolsManager->getToolsDefinitions( LLMCore::ToolSchemaFormat::OpenAI); if (!toolsDefinitions.isEmpty()) { diff --git a/providers/LMStudioProvider.hpp b/providers/LMStudioProvider.hpp index cf838e9..5042a51 100644 --- a/providers/LMStudioProvider.hpp +++ b/providers/LMStudioProvider.hpp @@ -40,7 +40,8 @@ public: QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) override; + LLMCore::RequestType type, + bool isToolsEnabled) override; QList getInstalledModels(const QString &url) override; QList validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override; QString apiKey() const override; diff --git a/providers/LlamaCppProvider.cpp b/providers/LlamaCppProvider.cpp index b416cc6..318f719 100644 --- a/providers/LlamaCppProvider.cpp +++ b/providers/LlamaCppProvider.cpp @@ -73,7 +73,8 @@ void LlamaCppProvider::prepareRequest( QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) + LLMCore::RequestType type, + bool isToolsEnabled) { if (!prompt->isSupportProvider(providerID())) { LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name())); @@ -101,8 +102,7 @@ void LlamaCppProvider::prepareRequest( applyModelParams(Settings::chatAssistantSettings()); } - if (supportsTools() && type == LLMCore::RequestType::Chat - && Settings::generalSettings().useTools()) { + if (isToolsEnabled) { auto toolsDefinitions = m_toolsManager->getToolsDefinitions( LLMCore::ToolSchemaFormat::OpenAI); if (!toolsDefinitions.isEmpty()) { diff --git a/providers/LlamaCppProvider.hpp b/providers/LlamaCppProvider.hpp index ec6135e..b104dac 100644 --- a/providers/LlamaCppProvider.hpp +++ b/providers/LlamaCppProvider.hpp @@ -40,7 +40,8 @@ public: QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) override; + LLMCore::RequestType type, + bool isToolsEnabled) override; QList getInstalledModels(const QString &url) override; QList validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override; QString apiKey() const override; diff --git a/providers/MistralAIProvider.cpp b/providers/MistralAIProvider.cpp index ad7aff6..7dde7ad 100644 --- a/providers/MistralAIProvider.cpp +++ b/providers/MistralAIProvider.cpp @@ -243,7 +243,8 @@ void MistralAIProvider::prepareRequest( QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) + LLMCore::RequestType type, + bool isToolsEnabled) { if (!prompt->isSupportProvider(providerID())) { LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name())); @@ -271,8 +272,7 @@ void MistralAIProvider::prepareRequest( applyModelParams(Settings::chatAssistantSettings()); } - if (supportsTools() && type == LLMCore::RequestType::Chat - && Settings::generalSettings().useTools()) { + if (isToolsEnabled) { auto toolsDefinitions = m_toolsManager->getToolsDefinitions( LLMCore::ToolSchemaFormat::OpenAI); if (!toolsDefinitions.isEmpty()) { diff --git a/providers/MistralAIProvider.hpp b/providers/MistralAIProvider.hpp index 34fed7a..bb41c14 100644 --- a/providers/MistralAIProvider.hpp +++ b/providers/MistralAIProvider.hpp @@ -40,7 +40,8 @@ public: QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) override; + LLMCore::RequestType type, + bool isToolsEnabled) override; QList getInstalledModels(const QString &url) override; QList validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override; QString apiKey() const override; diff --git a/providers/OllamaProvider.cpp b/providers/OllamaProvider.cpp index 6be7ca5..3e91b00 100644 --- a/providers/OllamaProvider.cpp +++ b/providers/OllamaProvider.cpp @@ -74,7 +74,8 @@ void OllamaProvider::prepareRequest( QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) + LLMCore::RequestType type, + bool isToolsEnabled) { if (!prompt->isSupportProvider(providerID())) { LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name())); @@ -107,8 +108,7 @@ void OllamaProvider::prepareRequest( applySettings(Settings::chatAssistantSettings()); } - if (supportsTools() && type == LLMCore::RequestType::Chat - && Settings::generalSettings().useTools()) { + if (isToolsEnabled) { auto toolsDefinitions = m_toolsManager->toolsFactory()->getToolsDefinitions( LLMCore::ToolSchemaFormat::Ollama); if (!toolsDefinitions.isEmpty()) { diff --git a/providers/OllamaProvider.hpp b/providers/OllamaProvider.hpp index 1a975c4..84fb2b3 100644 --- a/providers/OllamaProvider.hpp +++ b/providers/OllamaProvider.hpp @@ -41,7 +41,8 @@ public: QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) override; + LLMCore::RequestType type, + bool isToolsEnabled) override; QList getInstalledModels(const QString &url) override; QList validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override; QString apiKey() const override; diff --git a/providers/OpenAICompatProvider.cpp b/providers/OpenAICompatProvider.cpp index 6f478cc..507a5c2 100644 --- a/providers/OpenAICompatProvider.cpp +++ b/providers/OpenAICompatProvider.cpp @@ -73,7 +73,8 @@ void OpenAICompatProvider::prepareRequest( QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) + LLMCore::RequestType type, + bool isToolsEnabled) { if (!prompt->isSupportProvider(providerID())) { LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name())); @@ -101,8 +102,7 @@ void OpenAICompatProvider::prepareRequest( applyModelParams(Settings::chatAssistantSettings()); } - if (supportsTools() && type == LLMCore::RequestType::Chat - && Settings::generalSettings().useTools()) { + if (isToolsEnabled) { auto toolsDefinitions = m_toolsManager->getToolsDefinitions( LLMCore::ToolSchemaFormat::OpenAI); if (!toolsDefinitions.isEmpty()) { diff --git a/providers/OpenAICompatProvider.hpp b/providers/OpenAICompatProvider.hpp index 679703b..a84a907 100644 --- a/providers/OpenAICompatProvider.hpp +++ b/providers/OpenAICompatProvider.hpp @@ -40,7 +40,8 @@ public: QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) override; + LLMCore::RequestType type, + bool isToolsEnabled) override; QList getInstalledModels(const QString &url) override; QList validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override; QString apiKey() const override; diff --git a/providers/OpenAIProvider.cpp b/providers/OpenAIProvider.cpp index d945bf3..e12d341 100644 --- a/providers/OpenAIProvider.cpp +++ b/providers/OpenAIProvider.cpp @@ -74,7 +74,8 @@ void OpenAIProvider::prepareRequest( QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) + LLMCore::RequestType type, + bool isToolsEnabled) { if (!prompt->isSupportProvider(providerID())) { LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name())); @@ -102,8 +103,7 @@ void OpenAIProvider::prepareRequest( applyModelParams(Settings::chatAssistantSettings()); } - if (supportsTools() && type == LLMCore::RequestType::Chat - && Settings::generalSettings().useTools()) { + if (isToolsEnabled) { auto toolsDefinitions = m_toolsManager->getToolsDefinitions( LLMCore::ToolSchemaFormat::OpenAI); if (!toolsDefinitions.isEmpty()) { diff --git a/providers/OpenAIProvider.hpp b/providers/OpenAIProvider.hpp index d90e589..5daf5ff 100644 --- a/providers/OpenAIProvider.hpp +++ b/providers/OpenAIProvider.hpp @@ -40,7 +40,8 @@ public: QJsonObject &request, LLMCore::PromptTemplate *prompt, LLMCore::ContextData context, - LLMCore::RequestType type) override; + LLMCore::RequestType type, + bool isToolsEnabled) override; QList getInstalledModels(const QString &url) override; QList validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override; QString apiKey() const override;