fix: Remove replace message after complete receiving

This commit is contained in:
Petr Mironychev
2025-11-12 20:32:01 +01:00
parent a26d475806
commit fea9ecddc8
4 changed files with 7 additions and 25 deletions

View File

@ -251,20 +251,13 @@ void ClientInterface::cancelRequest()
LOG_MESSAGE("All requests cancelled and state cleared");
}
void ClientInterface::handleLLMResponse(
const QString &response, const QJsonObject &request, bool isComplete)
void ClientInterface::handleLLMResponse(const QString &response, const QJsonObject &request)
{
const auto message = response.trimmed();
if (!message.isEmpty()) {
QString messageId = request["id"].toString();
m_chatModel->addMessage(message, ChatModel::ChatRole::Assistant, messageId);
if (isComplete) {
LOG_MESSAGE(
"Message completed. Final response for message " + messageId + ": " + response);
emit messageReceivedCompletely();
}
}
}
@ -323,7 +316,7 @@ void ClientInterface::handlePartialResponse(const QString &requestId, const QStr
m_accumulatedResponses[requestId] += partialText;
const RequestContext &ctx = it.value();
handleLLMResponse(m_accumulatedResponses[requestId], ctx.originalRequest, false);
handleLLMResponse(m_accumulatedResponses[requestId], ctx.originalRequest);
}
void ClientInterface::handleFullResponse(const QString &requestId, const QString &fullText)
@ -345,10 +338,11 @@ void ClientInterface::handleFullResponse(const QString &requestId, const QString
.arg(requestId, applyError));
}
handleLLMResponse(finalText, ctx.originalRequest, true);
m_activeRequests.erase(it);
m_accumulatedResponses.remove(requestId);
LOG_MESSAGE("Message completed. Final response for message " + ctx.originalRequest["id"].toString() + ": " + finalText);
emit messageReceivedCompletely();
}
void ClientInterface::handleRequestFailed(const QString &requestId, const QString &error)

View File

@ -61,7 +61,7 @@ private slots:
void handleCleanAccumulatedData(const QString &requestId);
private:
void handleLLMResponse(const QString &response, const QJsonObject &request, bool isComplete);
void handleLLMResponse(const QString &response, const QJsonObject &request);
QString getCurrentFileContext() const;
QString getSystemPromptWithLinkedFiles(
const QString &basePrompt, const QList<QString> &linkedFiles) const;

View File

@ -215,7 +215,6 @@ ChatRootView {
ThinkingStatusItem {
width: parent.width
thinkingContent: {
// Extract thinking content and signature
let content = model.content
let signatureStart = content.indexOf("\n[Signature:")
if (signatureStart >= 0) {
@ -223,17 +222,6 @@ ChatRootView {
}
return content
}
signature: {
let content = model.content
let signatureStart = content.indexOf("\n[Signature: ")
if (signatureStart >= 0) {
let signatureEnd = content.indexOf("...]", signatureStart)
if (signatureEnd >= 0) {
return content.substring(signatureStart + 13, signatureEnd)
}
}
return ""
}
isRedacted: model.isRedacted !== undefined ? model.isRedacted : false
}
}

View File

@ -24,7 +24,7 @@ Rectangle {
id: root
property string thinkingContent: ""
property string signature: ""
// property string signature: ""
property bool isRedacted: false
property bool expanded: false