Version 0.3.3

- Add streaming response to chat
- Add stopping chat request and button
This commit is contained in:
Petr Mironychev 2024-10-17 00:10:47 +02:00 committed by GitHub
commit 40a568ebd9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 88 additions and 50 deletions

View File

@ -1,6 +1,6 @@
{ {
"Name" : "QodeAssist", "Name" : "QodeAssist",
"Version" : "0.3.2", "Version" : "0.3.3",
"CompatVersion" : "${IDE_VERSION_COMPAT}", "CompatVersion" : "${IDE_VERSION_COMPAT}",
"Vendor" : "Petr Mironychev", "Vendor" : "Petr Mironychev",
"Copyright" : "(C) ${IDE_COPYRIGHT_YEAR} Petr Mironychev, (C) ${IDE_COPYRIGHT_YEAR} The Qt Company Ltd", "Copyright" : "(C) ${IDE_COPYRIGHT_YEAR} Petr Mironychev, (C) ${IDE_COPYRIGHT_YEAR} The Qt Company Ltd",

View File

@ -68,6 +68,28 @@ QHash<int, QByteArray> ChatModel::roleNames() const
return roles; return roles;
} }
void ChatModel::addMessage(const QString &content, ChatRole role, const QString &id)
{
int tokenCount = estimateTokenCount(content);
if (!m_messages.isEmpty() && !id.isEmpty() && m_messages.last().id == id) {
Message &lastMessage = m_messages.last();
int oldTokenCount = lastMessage.tokenCount;
lastMessage.content = content;
lastMessage.tokenCount = tokenCount;
m_totalTokens += (tokenCount - oldTokenCount);
emit dataChanged(index(m_messages.size() - 1), index(m_messages.size() - 1));
} else {
beginInsertRows(QModelIndex(), m_messages.size(), m_messages.size());
m_messages.append({role, content, tokenCount, id});
m_totalTokens += tokenCount;
endInsertRows();
}
trim();
emit totalTokensChanged();
}
QVector<ChatModel::Message> ChatModel::getChatHistory() const QVector<ChatModel::Message> ChatModel::getChatHistory() const
{ {
return m_messages; return m_messages;
@ -92,17 +114,6 @@ int ChatModel::estimateTokenCount(const QString &text) const
return text.length() / 4; return text.length() / 4;
} }
void ChatModel::addMessage(const QString &content, ChatRole role)
{
int tokenCount = estimateTokenCount(content);
beginInsertRows(QModelIndex(), m_messages.size(), m_messages.size());
m_messages.append({role, content, tokenCount});
m_totalTokens += tokenCount;
endInsertRows();
trim();
emit totalTokensChanged();
}
void ChatModel::clear() void ChatModel::clear()
{ {
beginResetModel(); beginResetModel();
@ -176,4 +187,9 @@ int ChatModel::tokensThreshold() const
return settings.chatTokensThreshold(); return settings.chatTokensThreshold();
} }
QString ChatModel::lastMessageId() const
{
return !m_messages.isEmpty() ? m_messages.last().id : "";
}
} // namespace QodeAssist::Chat } // namespace QodeAssist::Chat

View File

@ -46,6 +46,7 @@ public:
ChatRole role; ChatRole role;
QString content; QString content;
int tokenCount; int tokenCount;
QString id;
}; };
explicit ChatModel(QObject *parent = nullptr); explicit ChatModel(QObject *parent = nullptr);
@ -54,7 +55,7 @@ public:
QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override; QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override;
QHash<int, QByteArray> roleNames() const override; QHash<int, QByteArray> roleNames() const override;
Q_INVOKABLE void addMessage(const QString &content, ChatRole role); Q_INVOKABLE void addMessage(const QString &content, ChatRole role, const QString &id);
Q_INVOKABLE void clear(); Q_INVOKABLE void clear();
Q_INVOKABLE QList<MessagePart> processMessageContent(const QString &content) const; Q_INVOKABLE QList<MessagePart> processMessageContent(const QString &content) const;
@ -65,6 +66,7 @@ public:
int tokensThreshold() const; int tokensThreshold() const;
QString currentModel() const; QString currentModel() const;
QString lastMessageId() const;
signals: signals:
void totalTokensChanged(); void totalTokensChanged();

View File

@ -60,6 +60,11 @@ void ChatRootView::copyToClipboard(const QString &text)
QGuiApplication::clipboard()->setText(text); QGuiApplication::clipboard()->setText(text);
} }
void ChatRootView::cancelRequest()
{
m_clientInterface->cancelRequest();
}
void ChatRootView::generateColors() void ChatRootView::generateColors()
{ {
QColor baseColor = backgroundColor(); QColor baseColor = backgroundColor();

View File

@ -52,6 +52,7 @@ public:
public slots: public slots:
void sendMessage(const QString &message) const; void sendMessage(const QString &message) const;
void copyToClipboard(const QString &text); void copyToClipboard(const QString &text);
void cancelRequest();
signals: signals:
void chatModelChanged(); void chatModelChanged();

View File

@ -38,8 +38,8 @@ ClientInterface::ClientInterface(ChatModel *chatModel, QObject *parent)
connect(m_requestHandler, connect(m_requestHandler,
&LLMCore::RequestHandler::completionReceived, &LLMCore::RequestHandler::completionReceived,
this, this,
[this](const QString &completion, const QJsonObject &, bool isComplete) { [this](const QString &completion, const QJsonObject &request, bool isComplete) {
handleLLMResponse(completion, isComplete); handleLLMResponse(completion, request, isComplete);
}); });
connect(m_requestHandler, connect(m_requestHandler,
@ -56,6 +56,8 @@ ClientInterface::~ClientInterface() = default;
void ClientInterface::sendMessage(const QString &message) void ClientInterface::sendMessage(const QString &message)
{ {
cancelRequest();
LOG_MESSAGE("Sending message: " + message); LOG_MESSAGE("Sending message: " + message);
LOG_MESSAGE("chatProvider " + Settings::generalSettings().chatLlmProviders.stringValue()); LOG_MESSAGE("chatProvider " + Settings::generalSettings().chatLlmProviders.stringValue());
LOG_MESSAGE("chatTemplate " + Settings::generalSettings().chatPrompts.stringValue()); LOG_MESSAGE("chatTemplate " + Settings::generalSettings().chatPrompts.stringValue());
@ -74,6 +76,9 @@ void ClientInterface::sendMessage(const QString &message)
providerRequest["stream"] = true; providerRequest["stream"] = true;
providerRequest["messages"] = m_chatModel->prepareMessagesForRequest(context); providerRequest["messages"] = m_chatModel->prepareMessagesForRequest(context);
if (!chatTemplate || !chatProvider) {
LOG_MESSAGE("Check settings, provider or template are not set");
}
chatTemplate->prepareRequest(providerRequest, context); chatTemplate->prepareRequest(providerRequest, context);
chatProvider->prepareRequest(providerRequest, LLMCore::RequestType::Chat); chatProvider->prepareRequest(providerRequest, LLMCore::RequestType::Chat);
@ -89,28 +94,31 @@ void ClientInterface::sendMessage(const QString &message)
QJsonObject request; QJsonObject request;
request["id"] = QUuid::createUuid().toString(); request["id"] = QUuid::createUuid().toString();
m_accumulatedResponse.clear(); m_chatModel->addMessage(message, ChatModel::ChatRole::User, "");
m_chatModel->addMessage(message, ChatModel::ChatRole::User);
m_requestHandler->sendLLMRequest(config, request); m_requestHandler->sendLLMRequest(config, request);
} }
void ClientInterface::clearMessages() void ClientInterface::clearMessages()
{ {
m_chatModel->clear(); m_chatModel->clear();
m_accumulatedResponse.clear();
LOG_MESSAGE("Chat history cleared"); LOG_MESSAGE("Chat history cleared");
} }
void ClientInterface::handleLLMResponse(const QString &response, bool isComplete) void ClientInterface::cancelRequest()
{ {
m_accumulatedResponse += response; auto id = m_chatModel->lastMessageId();
m_requestHandler->cancelRequest(id);
}
void ClientInterface::handleLLMResponse(const QString &response,
const QJsonObject &request,
bool isComplete)
{
QString messageId = request["id"].toString();
m_chatModel->addMessage(response.trimmed(), ChatModel::ChatRole::Assistant, messageId);
if (isComplete) { if (isComplete) {
LOG_MESSAGE("Message completed. Final response: " + m_accumulatedResponse); LOG_MESSAGE("Message completed. Final response for message " + messageId + ": " + response);
emit messageReceived(m_accumulatedResponse.trimmed());
m_chatModel->addMessage(m_accumulatedResponse.trimmed(), ChatModel::ChatRole::Assistant);
m_accumulatedResponse.clear();
} }
} }

View File

@ -38,16 +38,15 @@ public:
void sendMessage(const QString &message); void sendMessage(const QString &message);
void clearMessages(); void clearMessages();
void cancelRequest();
signals: signals:
void messageReceived(const QString &message);
void errorOccurred(const QString &error); void errorOccurred(const QString &error);
private: private:
void handleLLMResponse(const QString &response, bool isComplete); void handleLLMResponse(const QString &response, const QJsonObject &request, bool isComplete);
LLMCore::RequestHandler *m_requestHandler; LLMCore::RequestHandler *m_requestHandler;
QString m_accumulatedResponse;
ChatModel *m_chatModel; ChatModel *m_chatModel;
}; };

View File

@ -121,6 +121,15 @@ ChatRootView {
text: qsTr("Send") text: qsTr("Send")
onClicked: sendChatMessage() onClicked: sendChatMessage()
} }
Button {
id: stopButton
Layout.alignment: Qt.AlignBottom
text: qsTr("Stop")
onClicked: root.cancelRequest()
}
Button { Button {
id: clearButton id: clearButton

View File

@ -43,8 +43,8 @@ void PromptTemplateManager::setCurrentFimTemplate(const QString &name)
PromptTemplate *PromptTemplateManager::getCurrentFimTemplate() PromptTemplate *PromptTemplateManager::getCurrentFimTemplate()
{ {
if (m_currentFimTemplate == nullptr) { if (m_currentFimTemplate == nullptr) {
LOG_MESSAGE("Current fim provider is null"); LOG_MESSAGE("Current fim provider is null, return first");
return nullptr; return m_fimTemplates.first();
} }
return m_currentFimTemplate; return m_currentFimTemplate;
@ -63,8 +63,10 @@ void PromptTemplateManager::setCurrentChatTemplate(const QString &name)
PromptTemplate *PromptTemplateManager::getCurrentChatTemplate() PromptTemplate *PromptTemplateManager::getCurrentChatTemplate()
{ {
if (m_currentChatTemplate == nullptr) if (m_currentChatTemplate == nullptr) {
LOG_MESSAGE("Current chat provider is null"); LOG_MESSAGE("Current chat provider is null, return first");
return m_chatTemplates.first();
}
return m_currentChatTemplate; return m_currentChatTemplate;
} }

View File

@ -56,8 +56,8 @@ Provider *ProvidersManager::setCurrentChatProvider(const QString &name)
Provider *ProvidersManager::getCurrentFimProvider() Provider *ProvidersManager::getCurrentFimProvider()
{ {
if (m_currentFimProvider == nullptr) { if (m_currentFimProvider == nullptr) {
LOG_MESSAGE("Current fim provider is null"); LOG_MESSAGE("Current fim provider is null, return first");
return nullptr; return m_providers.first();
} }
return m_currentFimProvider; return m_currentFimProvider;
@ -66,8 +66,8 @@ Provider *ProvidersManager::getCurrentFimProvider()
Provider *ProvidersManager::getCurrentChatProvider() Provider *ProvidersManager::getCurrentChatProvider()
{ {
if (m_currentChatProvider == nullptr) { if (m_currentChatProvider == nullptr) {
LOG_MESSAGE("Current chat provider is null"); LOG_MESSAGE("Current chat provider is null, return first");
return nullptr; return m_providers.first();
} }
return m_currentChatProvider; return m_currentChatProvider;

View File

@ -80,22 +80,18 @@ void RequestHandler::handleLLMResponse(QNetworkReply *reply,
&& processSingleLineCompletion(reply, request, accumulatedResponse, config)) { && processSingleLineCompletion(reply, request, accumulatedResponse, config)) {
return; return;
} }
if (isComplete) {
auto cleanedCompletion = removeStopWords(accumulatedResponse,
config.promptTemplate->stopWords());
emit completionReceived(cleanedCompletion, request, true);
}
} else if (config.requestType == RequestType::Chat) {
emit completionReceived(accumulatedResponse, request, isComplete);
} }
if (isComplete || reply->isFinished()) { if (isComplete)
if (isComplete) {
if (config.requestType == RequestType::Fim) {
auto cleanedCompletion = removeStopWords(accumulatedResponse,
config.promptTemplate->stopWords());
emit completionReceived(cleanedCompletion, request, true);
} else {
emit completionReceived(accumulatedResponse, request, true);
}
} else {
emit completionReceived(accumulatedResponse, request, false);
}
m_accumulatedResponses.remove(reply); m_accumulatedResponses.remove(reply);
}
} }
bool RequestHandler::cancelRequest(const QString &id) bool RequestHandler::cancelRequest(const QString &id)

View File

@ -134,7 +134,7 @@ GeneralSettings::GeneralSettings()
chatTokensThreshold.setToolTip(Tr::tr("Maximum number of tokens in chat history. When " chatTokensThreshold.setToolTip(Tr::tr("Maximum number of tokens in chat history. When "
"exceeded, oldest messages will be removed.")); "exceeded, oldest messages will be removed."));
chatTokensThreshold.setRange(1000, 16000); chatTokensThreshold.setRange(1000, 16000);
chatTokensThreshold.setDefaultValue(4000); chatTokensThreshold.setDefaultValue(8000);
loadProviders(); loadProviders();
loadPrompts(); loadPrompts();