mirror of
https://github.com/Palm1r/QodeAssist.git
synced 2025-07-18 04:54:30 -04:00
✨ feat: Add OpenRouter provider
This commit is contained in:
@ -26,6 +26,7 @@
|
||||
#include <QJsonObject>
|
||||
#include <QNetworkReply>
|
||||
|
||||
#include "llmcore/OpenAIMessage.hpp"
|
||||
#include "logger/Logger.hpp"
|
||||
|
||||
namespace QodeAssist::Providers {
|
||||
@ -100,74 +101,40 @@ void OpenAICompatProvider::prepareRequest(QJsonObject &request, LLMCore::Request
|
||||
|
||||
bool OpenAICompatProvider::handleResponse(QNetworkReply *reply, QString &accumulatedResponse)
|
||||
{
|
||||
bool isComplete = false;
|
||||
QString tempResponse = accumulatedResponse;
|
||||
|
||||
while (reply->canReadLine()) {
|
||||
QByteArray line = reply->readLine().trimmed();
|
||||
if (line.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!line.startsWith("data:")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
line = line.mid(6);
|
||||
|
||||
if (line == "[DONE]") {
|
||||
isComplete = true;
|
||||
break;
|
||||
}
|
||||
|
||||
QJsonDocument jsonResponse = QJsonDocument::fromJson(line);
|
||||
if (jsonResponse.isNull()) {
|
||||
LOG_MESSAGE(
|
||||
"Invalid JSON response from OpenAI compatible provider: " + QString::fromUtf8(line));
|
||||
continue;
|
||||
}
|
||||
|
||||
QJsonObject responseObj = jsonResponse.object();
|
||||
|
||||
if (responseObj.contains("error")) {
|
||||
LOG_MESSAGE(
|
||||
"OpenAI compatible provider error: "
|
||||
+ QString::fromUtf8(QJsonDocument(responseObj).toJson(QJsonDocument::Indented)));
|
||||
return false;
|
||||
}
|
||||
|
||||
if (responseObj.contains("choices")) {
|
||||
QJsonArray choices = responseObj["choices"].toArray();
|
||||
if (!choices.isEmpty()) {
|
||||
QJsonObject choice = choices.first().toObject();
|
||||
QJsonObject delta = choice["delta"].toObject();
|
||||
if (delta.contains("content")) {
|
||||
QString completion = delta["content"].toString();
|
||||
if (!completion.isEmpty()) {
|
||||
tempResponse += completion;
|
||||
}
|
||||
}
|
||||
QString finishReason = choice["finish_reason"].toString();
|
||||
if (!finishReason.isNull() && finishReason == "stop") {
|
||||
isComplete = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (responseObj.contains("usage")) {
|
||||
QJsonObject usage = responseObj["usage"].toObject();
|
||||
LOG_MESSAGE(QString("Token usage - Prompt: %1, Completion: %2, Total: %3")
|
||||
.arg(usage["prompt_tokens"].toInt())
|
||||
.arg(usage["completion_tokens"].toInt())
|
||||
.arg(usage["total_tokens"].toInt()));
|
||||
}
|
||||
QByteArray data = reply->readAll();
|
||||
if (data.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!tempResponse.isEmpty()) {
|
||||
accumulatedResponse = tempResponse;
|
||||
QByteArrayList chunks = data.split('\n');
|
||||
for (const QByteArray &chunk : chunks) {
|
||||
if (chunk.trimmed().isEmpty() || chunk == "data: [DONE]") {
|
||||
continue;
|
||||
}
|
||||
|
||||
QByteArray jsonData = chunk;
|
||||
if (chunk.startsWith("data: ")) {
|
||||
jsonData = chunk.mid(6);
|
||||
}
|
||||
|
||||
QJsonParseError error;
|
||||
QJsonDocument doc = QJsonDocument::fromJson(jsonData, &error);
|
||||
|
||||
if (doc.isNull()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto message = LLMCore::OpenAIMessage::fromJson(doc.object());
|
||||
if (message.hasError()) {
|
||||
LOG_MESSAGE("Error in OpenAI response: " + message.error);
|
||||
continue;
|
||||
}
|
||||
|
||||
accumulatedResponse += message.getContent();
|
||||
return message.isDone();
|
||||
}
|
||||
|
||||
return isComplete;
|
||||
return false;
|
||||
}
|
||||
|
||||
QList<QString> OpenAICompatProvider::getInstalledModels(const QString &url)
|
||||
|
Reference in New Issue
Block a user