fix: Handling full input message from OpenAI compatible providers

This commit is contained in:
Petr Mironychev
2025-01-19 01:16:33 +01:00
parent 61fded34ea
commit c97c0f62e8
3 changed files with 68 additions and 30 deletions

View File

@ -108,15 +108,22 @@ bool LMStudioProvider::handleResponse(QNetworkReply *reply, QString &accumulated
return false;
}
QByteArrayList chunks = data.split('\n');
for (const QByteArray &chunk : chunks) {
if (chunk.trimmed().isEmpty() || chunk == "data: [DONE]") {
bool isDone = false;
QByteArrayList lines = data.split('\n');
for (const QByteArray &line : lines) {
if (line.trimmed().isEmpty()) {
continue;
}
QByteArray jsonData = chunk;
if (chunk.startsWith("data: ")) {
jsonData = chunk.mid(6);
if (line == "data: [DONE]") {
isDone = true;
continue;
}
QByteArray jsonData = line;
if (line.startsWith("data: ")) {
jsonData = line.mid(6);
}
QJsonParseError error;
@ -128,15 +135,21 @@ bool LMStudioProvider::handleResponse(QNetworkReply *reply, QString &accumulated
auto message = LLMCore::OpenAIMessage::fromJson(doc.object());
if (message.hasError()) {
LOG_MESSAGE("Error in LMStudioProvider response: " + message.error);
LOG_MESSAGE("Error in OpenAI response: " + message.error);
continue;
}
accumulatedResponse += message.getContent();
return message.isDone();
QString content = message.getContent();
if (!content.isEmpty()) {
accumulatedResponse += content;
}
if (message.isDone()) {
isDone = true;
}
}
return false;
return isDone;
}
QList<QString> LMStudioProvider::getInstalledModels(const QString &url)

View File

@ -109,15 +109,22 @@ bool OpenAICompatProvider::handleResponse(QNetworkReply *reply, QString &accumul
return false;
}
QByteArrayList chunks = data.split('\n');
for (const QByteArray &chunk : chunks) {
if (chunk.trimmed().isEmpty() || chunk == "data: [DONE]") {
bool isDone = false;
QByteArrayList lines = data.split('\n');
for (const QByteArray &line : lines) {
if (line.trimmed().isEmpty()) {
continue;
}
QByteArray jsonData = chunk;
if (chunk.startsWith("data: ")) {
jsonData = chunk.mid(6);
if (line == "data: [DONE]") {
isDone = true;
continue;
}
QByteArray jsonData = line;
if (line.startsWith("data: ")) {
jsonData = line.mid(6);
}
QJsonParseError error;
@ -133,11 +140,17 @@ bool OpenAICompatProvider::handleResponse(QNetworkReply *reply, QString &accumul
continue;
}
accumulatedResponse += message.getContent();
return message.isDone();
QString content = message.getContent();
if (!content.isEmpty()) {
accumulatedResponse += content;
}
if (message.isDone()) {
isDone = true;
}
}
return false;
return isDone;
}
QList<QString> OpenAICompatProvider::getInstalledModels(const QString &url)

View File

@ -93,16 +93,22 @@ bool OpenRouterProvider::handleResponse(QNetworkReply *reply, QString &accumulat
return false;
}
QByteArrayList chunks = data.split('\n');
for (const QByteArray &chunk : chunks) {
if (chunk.trimmed().isEmpty() || chunk.contains("OPENROUTER PROCESSING")
|| chunk == "data: [DONE]") {
bool isDone = false;
QByteArrayList lines = data.split('\n');
for (const QByteArray &line : lines) {
if (line.trimmed().isEmpty() || line.contains("OPENROUTER PROCESSING")) {
continue;
}
QByteArray jsonData = chunk;
if (chunk.startsWith("data: ")) {
jsonData = chunk.mid(6);
if (line == "data: [DONE]") {
isDone = true;
continue;
}
QByteArray jsonData = line;
if (line.startsWith("data: ")) {
jsonData = line.mid(6);
}
QJsonParseError error;
@ -114,15 +120,21 @@ bool OpenRouterProvider::handleResponse(QNetworkReply *reply, QString &accumulat
auto message = LLMCore::OpenAIMessage::fromJson(doc.object());
if (message.hasError()) {
LOG_MESSAGE("Error in OpenRouter response: " + message.error);
LOG_MESSAGE("Error in OpenAI response: " + message.error);
continue;
}
accumulatedResponse += message.getContent();
return message.isDone();
QString content = message.getContent();
if (!content.isEmpty()) {
accumulatedResponse += content;
}
if (message.isDone()) {
isDone = true;
}
}
return false;
return isDone;
}
QString OpenRouterProvider::apiKey() const