mirror of
https://github.com/Palm1r/QodeAssist.git
synced 2025-06-03 09:08:21 -04:00
♻️ refactor: Improve response handler for LMStudio
This commit is contained in:
parent
b692402897
commit
882047d7b2
@ -56,7 +56,6 @@ add_qtc_plugin(QodeAssist
|
||||
providers/OllamaProvider.hpp providers/OllamaProvider.cpp
|
||||
providers/LMStudioProvider.hpp providers/LMStudioProvider.cpp
|
||||
providers/OpenAICompatProvider.hpp providers/OpenAICompatProvider.cpp
|
||||
providers/OllamaMessage.hpp providers/OllamaMessage.cpp
|
||||
QodeAssist.qrc
|
||||
LSPCompletion.hpp
|
||||
LLMSuggestion.hpp LLMSuggestion.cpp
|
||||
|
@ -7,6 +7,8 @@ add_library(LLMCore STATIC
|
||||
PromptTemplateManager.hpp PromptTemplateManager.cpp
|
||||
RequestConfig.hpp
|
||||
RequestHandler.hpp RequestHandler.cpp
|
||||
OllamaMessage.hpp OllamaMessage.cpp
|
||||
OpenAIMessage.hpp OpenAIMessage.cpp
|
||||
)
|
||||
|
||||
target_link_libraries(LLMCore
|
||||
|
@ -18,12 +18,38 @@
|
||||
*/
|
||||
|
||||
#include "OllamaMessage.hpp"
|
||||
#include <QJsonArray>
|
||||
#include <QJsonDocument>
|
||||
|
||||
namespace QodeAssist::Providers {
|
||||
namespace QodeAssist::LLMCore {
|
||||
|
||||
OllamaMessage OllamaMessage::fromJson(const QJsonObject &obj, Type type)
|
||||
QJsonObject OllamaMessage::parseJsonFromData(const QByteArray &data)
|
||||
{
|
||||
QByteArrayList lines = data.split('\n');
|
||||
for (const QByteArray &line : lines) {
|
||||
if (line.trimmed().isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
QJsonParseError error;
|
||||
QJsonDocument doc = QJsonDocument::fromJson(line, &error);
|
||||
if (!doc.isNull() && error.error == QJsonParseError::NoError) {
|
||||
return doc.object();
|
||||
}
|
||||
}
|
||||
return QJsonObject();
|
||||
}
|
||||
|
||||
OllamaMessage OllamaMessage::fromJson(const QByteArray &data, Type type)
|
||||
{
|
||||
OllamaMessage msg;
|
||||
QJsonObject obj = parseJsonFromData(data);
|
||||
|
||||
if (obj.isEmpty()) {
|
||||
msg.error = "Invalid JSON response";
|
||||
return msg;
|
||||
}
|
||||
|
||||
msg.model = obj["model"].toString();
|
||||
msg.createdAt = QDateTime::fromString(obj["created_at"].toString(), Qt::ISODate);
|
||||
msg.done = obj["done"].toBool();
|
||||
@ -73,4 +99,4 @@ bool OllamaMessage::hasError() const
|
||||
return !error.isEmpty();
|
||||
}
|
||||
|
||||
} // namespace QodeAssist::Providers
|
||||
} // namespace QodeAssist::LLMCore
|
@ -20,11 +20,10 @@
|
||||
#pragma once
|
||||
|
||||
#include <QDateTime>
|
||||
#include <QJsonArray>
|
||||
#include <QJsonObject>
|
||||
#include <QObject>
|
||||
|
||||
namespace QodeAssist::Providers {
|
||||
namespace QodeAssist::LLMCore {
|
||||
|
||||
class OllamaMessage
|
||||
{
|
||||
@ -58,14 +57,15 @@ public:
|
||||
std::variant<GenerateResponse, ChatResponse> response;
|
||||
bool done{false};
|
||||
QString doneReason;
|
||||
Metrics metrics;
|
||||
QString error;
|
||||
Metrics metrics;
|
||||
|
||||
static OllamaMessage fromJson(const QJsonObject &obj, Type type);
|
||||
|
||||
static OllamaMessage fromJson(const QByteArray &data, Type type);
|
||||
QString getContent() const;
|
||||
|
||||
bool hasError() const;
|
||||
|
||||
private:
|
||||
static QJsonObject parseJsonFromData(const QByteArray &data);
|
||||
};
|
||||
|
||||
} // namespace QodeAssist::Providers
|
||||
} // namespace QodeAssist::LLMCore
|
113
llmcore/OpenAIMessage.cpp
Normal file
113
llmcore/OpenAIMessage.cpp
Normal file
@ -0,0 +1,113 @@
|
||||
/*
|
||||
* Copyright (C) 2024 Petr Mironychev
|
||||
*
|
||||
* This file is part of QodeAssist.
|
||||
*
|
||||
* QodeAssist is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* QodeAssist is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with QodeAssist. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include "OpenAIMessage.hpp"
|
||||
#include <QJsonArray>
|
||||
#include <QJsonDocument>
|
||||
#include <QJsonObject>
|
||||
|
||||
namespace QodeAssist::LLMCore {
|
||||
|
||||
OpenAIMessage OpenAIMessage::fromJson(const QByteArray &data)
|
||||
{
|
||||
OpenAIMessage msg;
|
||||
|
||||
QByteArrayList lines = data.split('\n');
|
||||
QByteArray jsonData;
|
||||
|
||||
for (const QByteArray &line : lines) {
|
||||
if (line.trimmed().isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.trimmed() == "data: [DONE]") {
|
||||
msg.done = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.startsWith("data: ")) {
|
||||
jsonData = line.mid(6);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (jsonData.isEmpty()) {
|
||||
jsonData = data;
|
||||
}
|
||||
|
||||
QJsonParseError error;
|
||||
QJsonDocument doc = QJsonDocument::fromJson(jsonData, &error);
|
||||
if (doc.isNull()) {
|
||||
msg.error = QString("Invalid JSON response: %1").arg(error.errorString());
|
||||
return msg;
|
||||
}
|
||||
|
||||
QJsonObject obj = doc.object();
|
||||
|
||||
if (obj.contains("error")) {
|
||||
msg.error = obj["error"].toObject()["message"].toString();
|
||||
return msg;
|
||||
}
|
||||
|
||||
if (obj.contains("choices")) {
|
||||
auto choices = obj["choices"].toArray();
|
||||
if (!choices.isEmpty()) {
|
||||
auto choiceObj = choices[0].toObject();
|
||||
|
||||
if (choiceObj.contains("message")) {
|
||||
msg.choice.content = choiceObj["message"].toObject()["content"].toString();
|
||||
} else if (choiceObj.contains("delta")) {
|
||||
msg.choice.content = choiceObj["delta"].toObject()["content"].toString();
|
||||
}
|
||||
|
||||
msg.choice.finishReason = choiceObj["finish_reason"].toString();
|
||||
if (!msg.choice.finishReason.isEmpty()) {
|
||||
msg.done = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (obj.contains("usage")) {
|
||||
QJsonObject usage = obj["usage"].toObject();
|
||||
msg.usage.promptTokens = usage["prompt_tokens"].toInt();
|
||||
msg.usage.completionTokens = usage["completion_tokens"].toInt();
|
||||
msg.usage.totalTokens = usage["total_tokens"].toInt();
|
||||
}
|
||||
|
||||
return msg;
|
||||
}
|
||||
|
||||
QString OpenAIMessage::getContent() const
|
||||
{
|
||||
return choice.content;
|
||||
}
|
||||
|
||||
bool OpenAIMessage::hasError() const
|
||||
{
|
||||
return !error.isEmpty();
|
||||
}
|
||||
|
||||
bool OpenAIMessage::isDone() const
|
||||
{
|
||||
return done
|
||||
|| (!choice.finishReason.isEmpty()
|
||||
&& (choice.finishReason == "stop" || choice.finishReason == "length"));
|
||||
}
|
||||
|
||||
} // namespace QodeAssist::LLMCore
|
58
llmcore/OpenAIMessage.hpp
Normal file
58
llmcore/OpenAIMessage.hpp
Normal file
@ -0,0 +1,58 @@
|
||||
/*
|
||||
* Copyright (C) 2024 Petr Mironychev
|
||||
*
|
||||
* This file is part of QodeAssist.
|
||||
*
|
||||
* QodeAssist is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* QodeAssist is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with QodeAssist. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <QByteArray>
|
||||
#include <QJsonObject>
|
||||
#include <QString>
|
||||
|
||||
namespace QodeAssist::LLMCore {
|
||||
|
||||
class OpenAIMessage
|
||||
{
|
||||
public:
|
||||
struct Choice
|
||||
{
|
||||
QString content;
|
||||
QString finishReason;
|
||||
};
|
||||
|
||||
struct Usage
|
||||
{
|
||||
int promptTokens{0};
|
||||
int completionTokens{0};
|
||||
int totalTokens{0};
|
||||
};
|
||||
|
||||
Choice choice;
|
||||
QString error;
|
||||
bool done{false};
|
||||
Usage usage;
|
||||
|
||||
static OpenAIMessage fromJson(const QByteArray &data);
|
||||
QString getContent() const;
|
||||
bool hasError() const;
|
||||
bool isDone() const;
|
||||
|
||||
private:
|
||||
static OpenAIMessage fromJsonObject(const QJsonObject &obj);
|
||||
};
|
||||
|
||||
} // namespace QodeAssist::LLMCore
|
@ -25,6 +25,7 @@
|
||||
#include <QJsonObject>
|
||||
#include <QNetworkReply>
|
||||
|
||||
#include "llmcore/OpenAIMessage.hpp"
|
||||
#include "logger/Logger.hpp"
|
||||
#include "settings/ChatAssistantSettings.hpp"
|
||||
#include "settings/CodeCompletionSettings.hpp"
|
||||
@ -101,43 +102,19 @@ void LMStudioProvider::prepareRequest(QJsonObject &request, LLMCore::RequestType
|
||||
|
||||
bool LMStudioProvider::handleResponse(QNetworkReply *reply, QString &accumulatedResponse)
|
||||
{
|
||||
bool isComplete = false;
|
||||
while (reply->canReadLine()) {
|
||||
QByteArray line = reply->readLine().trimmed();
|
||||
if (line.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
if (line == "data: [DONE]") {
|
||||
isComplete = true;
|
||||
break;
|
||||
}
|
||||
if (line.startsWith("data: ")) {
|
||||
line = line.mid(6); // Remove "data: " prefix
|
||||
}
|
||||
QJsonDocument jsonResponse = QJsonDocument::fromJson(line);
|
||||
if (jsonResponse.isNull()) {
|
||||
qWarning() << "Invalid JSON response from LM Studio:" << line;
|
||||
continue;
|
||||
}
|
||||
QJsonObject responseObj = jsonResponse.object();
|
||||
if (responseObj.contains("choices")) {
|
||||
QJsonArray choices = responseObj["choices"].toArray();
|
||||
if (!choices.isEmpty()) {
|
||||
QJsonObject choice = choices.first().toObject();
|
||||
QJsonObject delta = choice["delta"].toObject();
|
||||
if (delta.contains("content")) {
|
||||
QString completion = delta["content"].toString();
|
||||
|
||||
accumulatedResponse += completion;
|
||||
}
|
||||
if (choice["finish_reason"].toString() == "stop") {
|
||||
isComplete = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
QByteArray data = reply->readAll();
|
||||
if (data.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
return isComplete;
|
||||
|
||||
auto message = LLMCore::OpenAIMessage::fromJson(data);
|
||||
if (message.hasError()) {
|
||||
LOG_MESSAGE("Error in OpenAI response: " + message.error);
|
||||
return false;
|
||||
}
|
||||
|
||||
accumulatedResponse += message.getContent();
|
||||
return message.isDone();
|
||||
}
|
||||
|
||||
QList<QString> LMStudioProvider::getInstalledModels(const QString &url)
|
||||
|
@ -25,7 +25,7 @@
|
||||
#include <QNetworkReply>
|
||||
#include <QtCore/qeventloop.h>
|
||||
|
||||
#include "OllamaMessage.hpp"
|
||||
#include "llmcore/OllamaMessage.hpp"
|
||||
#include "logger/Logger.hpp"
|
||||
#include "settings/ChatAssistantSettings.hpp"
|
||||
#include "settings/CodeCompletionSettings.hpp"
|
||||
@ -88,41 +88,23 @@ void OllamaProvider::prepareRequest(QJsonObject &request, LLMCore::RequestType t
|
||||
|
||||
bool OllamaProvider::handleResponse(QNetworkReply *reply, QString &accumulatedResponse)
|
||||
{
|
||||
const QString endpoint = reply->url().path();
|
||||
auto messageType = endpoint == completionEndpoint() ? OllamaMessage::Type::Generate
|
||||
: OllamaMessage::Type::Chat;
|
||||
|
||||
auto processMessage =
|
||||
[&accumulatedResponse](const QJsonDocument &doc, OllamaMessage::Type messageType) {
|
||||
if (doc.isNull()) {
|
||||
LOG_MESSAGE("Invalid JSON response from Ollama");
|
||||
return false;
|
||||
}
|
||||
|
||||
auto message = OllamaMessage::fromJson(doc.object(), messageType);
|
||||
if (message.hasError()) {
|
||||
LOG_MESSAGE("Error in Ollama response: " + message.error);
|
||||
return false;
|
||||
}
|
||||
|
||||
accumulatedResponse += message.getContent();
|
||||
return message.done;
|
||||
};
|
||||
|
||||
if (reply->canReadLine()) {
|
||||
while (reply->canReadLine()) {
|
||||
QByteArray line = reply->readLine().trimmed();
|
||||
if (line.isEmpty())
|
||||
continue;
|
||||
|
||||
if (processMessage(QJsonDocument::fromJson(line), messageType)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
QByteArray data = reply->readAll();
|
||||
if (data.isEmpty()) {
|
||||
return false;
|
||||
} else {
|
||||
return processMessage(QJsonDocument::fromJson(reply->readAll()), messageType);
|
||||
}
|
||||
|
||||
const QString endpoint = reply->url().path();
|
||||
auto messageType = endpoint == completionEndpoint() ? LLMCore::OllamaMessage::Type::Generate
|
||||
: LLMCore::OllamaMessage::Type::Chat;
|
||||
|
||||
auto message = LLMCore::OllamaMessage::fromJson(data, messageType);
|
||||
if (message.hasError()) {
|
||||
LOG_MESSAGE("Error in Ollama response: " + message.error);
|
||||
return false;
|
||||
}
|
||||
|
||||
accumulatedResponse += message.getContent();
|
||||
return message.done;
|
||||
}
|
||||
|
||||
QList<QString> OllamaProvider::getInstalledModels(const QString &url)
|
||||
|
Loading…
x
Reference in New Issue
Block a user