feat: Add chat-agent switcher in chat ui (#247)

* feat: Add chat-agent switcher in chat ui

fix: qml errors

refactor: Change top bar layout

fix: default value

* fix: update github action for qtc
This commit is contained in:
Petr Mironychev
2025-10-31 16:09:38 +01:00
committed by GitHub
parent 9117572f82
commit db82fb08e8
27 changed files with 244 additions and 140 deletions

View File

@ -75,7 +75,8 @@ void ClaudeProvider::prepareRequest(
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type)
LLMCore::RequestType type,
bool isToolsEnabled)
{
if (!prompt->isSupportProvider(providerID())) {
LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name()));
@ -99,8 +100,7 @@ void ClaudeProvider::prepareRequest(
applyModelParams(Settings::chatAssistantSettings());
}
if (supportsTools() && type == LLMCore::RequestType::Chat
&& Settings::generalSettings().useTools()) {
if (isToolsEnabled) {
auto toolsDefinitions = m_toolsManager->getToolsDefinitions(
LLMCore::ToolSchemaFormat::Claude);
if (!toolsDefinitions.isEmpty()) {

View File

@ -41,7 +41,8 @@ public:
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type) override;
LLMCore::RequestType type,
bool isToolsEnabled) override;
QList<QString> getInstalledModels(const QString &url) override;
QList<QString> validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override;
QString apiKey() const override;

View File

@ -75,7 +75,8 @@ void GoogleAIProvider::prepareRequest(
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type)
LLMCore::RequestType type,
bool isToolsEnabled)
{
if (!prompt->isSupportProvider(providerID())) {
LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name()));
@ -102,8 +103,7 @@ void GoogleAIProvider::prepareRequest(
applyModelParams(Settings::chatAssistantSettings());
}
if (supportsTools() && type == LLMCore::RequestType::Chat
&& Settings::generalSettings().useTools()) {
if (isToolsEnabled) {
auto toolsDefinitions = m_toolsManager->getToolsDefinitions(
LLMCore::ToolSchemaFormat::Google);
if (!toolsDefinitions.isEmpty()) {

View File

@ -40,7 +40,8 @@ public:
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type) override;
LLMCore::RequestType type,
bool isToolsEnabled) override;
QList<QString> getInstalledModels(const QString &url) override;
QList<QString> validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override;
QString apiKey() const override;

View File

@ -222,7 +222,8 @@ void LMStudioProvider::prepareRequest(
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type)
LLMCore::RequestType type,
bool isToolsEnabled)
{
if (!prompt->isSupportProvider(providerID())) {
LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name()));
@ -250,8 +251,7 @@ void LMStudioProvider::prepareRequest(
applyModelParams(Settings::chatAssistantSettings());
}
if (supportsTools() && type == LLMCore::RequestType::Chat
&& Settings::generalSettings().useTools()) {
if (isToolsEnabled) {
auto toolsDefinitions = m_toolsManager->getToolsDefinitions(
LLMCore::ToolSchemaFormat::OpenAI);
if (!toolsDefinitions.isEmpty()) {

View File

@ -40,7 +40,8 @@ public:
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type) override;
LLMCore::RequestType type,
bool isToolsEnabled) override;
QList<QString> getInstalledModels(const QString &url) override;
QList<QString> validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override;
QString apiKey() const override;

View File

@ -73,7 +73,8 @@ void LlamaCppProvider::prepareRequest(
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type)
LLMCore::RequestType type,
bool isToolsEnabled)
{
if (!prompt->isSupportProvider(providerID())) {
LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name()));
@ -101,8 +102,7 @@ void LlamaCppProvider::prepareRequest(
applyModelParams(Settings::chatAssistantSettings());
}
if (supportsTools() && type == LLMCore::RequestType::Chat
&& Settings::generalSettings().useTools()) {
if (isToolsEnabled) {
auto toolsDefinitions = m_toolsManager->getToolsDefinitions(
LLMCore::ToolSchemaFormat::OpenAI);
if (!toolsDefinitions.isEmpty()) {

View File

@ -40,7 +40,8 @@ public:
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type) override;
LLMCore::RequestType type,
bool isToolsEnabled) override;
QList<QString> getInstalledModels(const QString &url) override;
QList<QString> validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override;
QString apiKey() const override;

View File

@ -243,7 +243,8 @@ void MistralAIProvider::prepareRequest(
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type)
LLMCore::RequestType type,
bool isToolsEnabled)
{
if (!prompt->isSupportProvider(providerID())) {
LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name()));
@ -271,8 +272,7 @@ void MistralAIProvider::prepareRequest(
applyModelParams(Settings::chatAssistantSettings());
}
if (supportsTools() && type == LLMCore::RequestType::Chat
&& Settings::generalSettings().useTools()) {
if (isToolsEnabled) {
auto toolsDefinitions = m_toolsManager->getToolsDefinitions(
LLMCore::ToolSchemaFormat::OpenAI);
if (!toolsDefinitions.isEmpty()) {

View File

@ -40,7 +40,8 @@ public:
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type) override;
LLMCore::RequestType type,
bool isToolsEnabled) override;
QList<QString> getInstalledModels(const QString &url) override;
QList<QString> validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override;
QString apiKey() const override;

View File

@ -74,7 +74,8 @@ void OllamaProvider::prepareRequest(
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type)
LLMCore::RequestType type,
bool isToolsEnabled)
{
if (!prompt->isSupportProvider(providerID())) {
LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name()));
@ -107,8 +108,7 @@ void OllamaProvider::prepareRequest(
applySettings(Settings::chatAssistantSettings());
}
if (supportsTools() && type == LLMCore::RequestType::Chat
&& Settings::generalSettings().useTools()) {
if (isToolsEnabled) {
auto toolsDefinitions = m_toolsManager->toolsFactory()->getToolsDefinitions(
LLMCore::ToolSchemaFormat::Ollama);
if (!toolsDefinitions.isEmpty()) {

View File

@ -41,7 +41,8 @@ public:
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type) override;
LLMCore::RequestType type,
bool isToolsEnabled) override;
QList<QString> getInstalledModels(const QString &url) override;
QList<QString> validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override;
QString apiKey() const override;

View File

@ -73,7 +73,8 @@ void OpenAICompatProvider::prepareRequest(
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type)
LLMCore::RequestType type,
bool isToolsEnabled)
{
if (!prompt->isSupportProvider(providerID())) {
LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name()));
@ -101,8 +102,7 @@ void OpenAICompatProvider::prepareRequest(
applyModelParams(Settings::chatAssistantSettings());
}
if (supportsTools() && type == LLMCore::RequestType::Chat
&& Settings::generalSettings().useTools()) {
if (isToolsEnabled) {
auto toolsDefinitions = m_toolsManager->getToolsDefinitions(
LLMCore::ToolSchemaFormat::OpenAI);
if (!toolsDefinitions.isEmpty()) {

View File

@ -40,7 +40,8 @@ public:
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type) override;
LLMCore::RequestType type,
bool isToolsEnabled) override;
QList<QString> getInstalledModels(const QString &url) override;
QList<QString> validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override;
QString apiKey() const override;

View File

@ -74,7 +74,8 @@ void OpenAIProvider::prepareRequest(
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type)
LLMCore::RequestType type,
bool isToolsEnabled)
{
if (!prompt->isSupportProvider(providerID())) {
LOG_MESSAGE(QString("Template %1 doesn't support %2 provider").arg(name(), prompt->name()));
@ -102,8 +103,7 @@ void OpenAIProvider::prepareRequest(
applyModelParams(Settings::chatAssistantSettings());
}
if (supportsTools() && type == LLMCore::RequestType::Chat
&& Settings::generalSettings().useTools()) {
if (isToolsEnabled) {
auto toolsDefinitions = m_toolsManager->getToolsDefinitions(
LLMCore::ToolSchemaFormat::OpenAI);
if (!toolsDefinitions.isEmpty()) {

View File

@ -40,7 +40,8 @@ public:
QJsonObject &request,
LLMCore::PromptTemplate *prompt,
LLMCore::ContextData context,
LLMCore::RequestType type) override;
LLMCore::RequestType type,
bool isToolsEnabled) override;
QList<QString> getInstalledModels(const QString &url) override;
QList<QString> validateRequest(const QJsonObject &request, LLMCore::TemplateType type) override;
QString apiKey() const override;