fix: Cleanup accumulated text in one request

This commit is contained in:
Petr Mironychev
2025-10-10 16:45:23 +02:00
parent 84770abb20
commit ebd71daf3d
9 changed files with 41 additions and 4 deletions

View File

@ -196,9 +196,7 @@ LLMCore::ProviderID ClaudeProvider::providerID() const
void ClaudeProvider::sendRequest(
const LLMCore::RequestID &requestId, const QUrl &url, const QJsonObject &payload)
{
if (m_dataBuffers.contains(requestId)) {
m_dataBuffers[requestId].responseContent.clear();
} else {
if (!m_messages.contains(requestId)) {
m_dataBuffers[requestId].clear();
}
@ -340,6 +338,7 @@ void ClaudeProvider::processStreamEvent(const QString &requestId, const QJsonObj
if (eventType == "message_start") {
message->startNewContinuation();
emit continuationStarted(requestId);
LOG_MESSAGE(QString("Starting NEW continuation for request %1").arg(requestId));
} else if (eventType == "content_block_start") {

View File

@ -320,6 +320,11 @@ void LMStudioProvider::processStreamChunk(const QString &requestId, const QJsonO
message = new OpenAIMessage(this);
m_messages[requestId] = message;
LOG_MESSAGE(QString("Created NEW OpenAIMessage for request %1").arg(requestId));
if (m_dataBuffers.contains(requestId)) {
emit continuationStarted(requestId);
LOG_MESSAGE(QString("Starting continuation for request %1").arg(requestId));
}
}
if (delta.contains("content") && !delta["content"].isNull()) {

View File

@ -324,6 +324,11 @@ void LlamaCppProvider::processStreamChunk(const QString &requestId, const QJsonO
message = new OpenAIMessage(this);
m_messages[requestId] = message;
LOG_MESSAGE(QString("Created NEW OpenAIMessage for llama.cpp request %1").arg(requestId));
if (m_dataBuffers.contains(requestId)) {
emit continuationStarted(requestId);
LOG_MESSAGE(QString("Starting continuation for request %1").arg(requestId));
}
}
if (delta.contains("content") && !delta["content"].isNull()) {

View File

@ -341,6 +341,11 @@ void MistralAIProvider::processStreamChunk(const QString &requestId, const QJson
message = new OpenAIMessage(this);
m_messages[requestId] = message;
LOG_MESSAGE(QString("Created NEW OpenAIMessage for Mistral request %1").arg(requestId));
if (m_dataBuffers.contains(requestId)) {
emit continuationStarted(requestId);
LOG_MESSAGE(QString("Starting continuation for request %1").arg(requestId));
}
}
if (delta.contains("content") && !delta["content"].isNull()) {

View File

@ -298,6 +298,11 @@ void OpenAICompatProvider::processStreamChunk(const QString &requestId, const QJ
message = new OpenAIMessage(this);
m_messages[requestId] = message;
LOG_MESSAGE(QString("Created NEW OpenAIMessage for request %1").arg(requestId));
if (m_dataBuffers.contains(requestId)) {
emit continuationStarted(requestId);
LOG_MESSAGE(QString("Starting continuation for request %1").arg(requestId));
}
}
if (delta.contains("content") && !delta["content"].isNull()) {

View File

@ -334,6 +334,11 @@ void OpenAIProvider::processStreamChunk(const QString &requestId, const QJsonObj
message = new OpenAIMessage(this);
m_messages[requestId] = message;
LOG_MESSAGE(QString("Created NEW OpenAIAPIMessage for request %1").arg(requestId));
if (m_dataBuffers.contains(requestId)) {
emit continuationStarted(requestId);
LOG_MESSAGE(QString("Starting continuation for request %1").arg(requestId));
}
}
if (delta.contains("content") && !delta["content"].isNull()) {