Impove UX general setting by added helpers dialogs for user (#42)

- Added dialogs for selecting url, model and custom model when provider doesn't provide model list or setup of qode assist is not finishing
This commit is contained in:
Petr Mironychev
2024-11-16 15:25:28 +01:00
committed by GitHub
parent 5e813ba402
commit f209cb75a2
14 changed files with 359 additions and 20 deletions

View File

@ -53,6 +53,11 @@ QString LMStudioProvider::chatEndpoint() const
return "/v1/chat/completions";
}
bool LMStudioProvider::supportsModelListing() const
{
return true;
}
void LMStudioProvider::prepareRequest(QJsonObject &request, LLMCore::RequestType type)
{
auto prepareMessages = [](QJsonObject &req) -> QJsonArray {

View File

@ -32,6 +32,7 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(QJsonObject &request, LLMCore::RequestType type) override;
bool handleResponse(QNetworkReply *reply, QString &accumulatedResponse) override;
QList<QString> getInstalledModels(const QString &url) override;

View File

@ -53,6 +53,11 @@ QString OllamaProvider::chatEndpoint() const
return "/api/chat";
}
bool OllamaProvider::supportsModelListing() const
{
return true;
}
void OllamaProvider::prepareRequest(QJsonObject &request, LLMCore::RequestType type)
{
auto applySettings = [&request](const auto &settings) {

View File

@ -32,6 +32,7 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(QJsonObject &request, LLMCore::RequestType type) override;
bool handleResponse(QNetworkReply *reply, QString &accumulatedResponse) override;
QList<QString> getInstalledModels(const QString &url) override;

View File

@ -50,6 +50,11 @@ QString OpenAICompatProvider::chatEndpoint() const
return "/v1/chat/completions";
}
bool OpenAICompatProvider::supportsModelListing() const
{
return false;
}
void OpenAICompatProvider::prepareRequest(QJsonObject &request, LLMCore::RequestType type)
{
auto prepareMessages = [](QJsonObject &req) -> QJsonArray {

View File

@ -32,6 +32,7 @@ public:
QString url() const override;
QString completionEndpoint() const override;
QString chatEndpoint() const override;
bool supportsModelListing() const override;
void prepareRequest(QJsonObject &request, LLMCore::RequestType type) override;
bool handleResponse(QNetworkReply *reply, QString &accumulatedResponse) override;
QList<QString> getInstalledModels(const QString &url) override;