mirror of
https://github.com/Palm1r/QodeAssist.git
synced 2026-04-02 10:52:50 -04:00
feat: Rename old llmcore module to pluginllmcore
This commit is contained in:
@ -19,21 +19,21 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
#include <QJsonArray>
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class Alpaca : public LLMCore::PromptTemplate
|
||||
class Alpaca : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
QString name() const override { return "Alpaca"; }
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Chat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::Chat; }
|
||||
QStringList stopWords() const override
|
||||
{
|
||||
return QStringList() << "### Instruction:" << "### Response:";
|
||||
}
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray messages;
|
||||
|
||||
@ -72,14 +72,14 @@ public:
|
||||
"}\n\n"
|
||||
"Combines all messages into a single formatted prompt.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case LLMCore::ProviderID::Ollama:
|
||||
case LLMCore::ProviderID::LMStudio:
|
||||
case LLMCore::ProviderID::OpenRouter:
|
||||
case LLMCore::ProviderID::OpenAICompatible:
|
||||
case LLMCore::ProviderID::LlamaCpp:
|
||||
case PluginLLMCore::ProviderID::Ollama:
|
||||
case PluginLLMCore::ProviderID::LMStudio:
|
||||
case PluginLLMCore::ProviderID::OpenRouter:
|
||||
case PluginLLMCore::ProviderID::OpenAICompatible:
|
||||
case PluginLLMCore::ProviderID::LlamaCpp:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -21,20 +21,20 @@
|
||||
|
||||
#include <QJsonArray>
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class ChatML : public LLMCore::PromptTemplate
|
||||
class ChatML : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
QString name() const override { return "ChatML"; }
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Chat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::Chat; }
|
||||
QStringList stopWords() const override
|
||||
{
|
||||
return QStringList() << "<|im_start|>" << "<|im_end|>";
|
||||
}
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray messages;
|
||||
|
||||
@ -73,14 +73,14 @@ public:
|
||||
"}\n\n"
|
||||
"Compatible with multiple providers supporting the ChatML token format.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case LLMCore::ProviderID::Ollama:
|
||||
case LLMCore::ProviderID::LMStudio:
|
||||
case LLMCore::ProviderID::OpenRouter:
|
||||
case LLMCore::ProviderID::OpenAICompatible:
|
||||
case LLMCore::ProviderID::LlamaCpp:
|
||||
case PluginLLMCore::ProviderID::Ollama:
|
||||
case PluginLLMCore::ProviderID::LMStudio:
|
||||
case PluginLLMCore::ProviderID::OpenRouter:
|
||||
case PluginLLMCore::ProviderID::OpenAICompatible:
|
||||
case PluginLLMCore::ProviderID::LlamaCpp:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -21,17 +21,17 @@
|
||||
|
||||
#include <QJsonArray>
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class Claude : public LLMCore::PromptTemplate
|
||||
class Claude : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Chat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::Chat; }
|
||||
QString name() const override { return "Claude"; }
|
||||
QStringList stopWords() const override { return QStringList(); }
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray messages;
|
||||
|
||||
@ -111,10 +111,10 @@ public:
|
||||
"}\n\n"
|
||||
"Formats content according to Claude API specifications.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case QodeAssist::LLMCore::ProviderID::Claude:
|
||||
case QodeAssist::PluginLLMCore::ProviderID::Claude:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -19,20 +19,20 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class CodeLlamaFim : public LLMCore::PromptTemplate
|
||||
class CodeLlamaFim : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::FIM; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::FIM; }
|
||||
QString name() const override { return "CodeLlama FIM"; }
|
||||
QStringList stopWords() const override
|
||||
{
|
||||
return QStringList() << "<EOT>" << "<PRE>" << "<SUF" << "<MID>";
|
||||
}
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
request["prompt"] = QString("<PRE> %1 <SUF>%2 <MID>")
|
||||
.arg(context.prefix.value_or(""), context.suffix.value_or(""));
|
||||
@ -47,10 +47,10 @@ public:
|
||||
"}\n\n"
|
||||
"Optimized for code completion with CodeLlama models.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case QodeAssist::LLMCore::ProviderID::Ollama:
|
||||
case QodeAssist::PluginLLMCore::ProviderID::Ollama:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -19,21 +19,21 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class CodeLlamaQMLFim : public LLMCore::PromptTemplate
|
||||
class CodeLlamaQMLFim : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::FIM; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::FIM; }
|
||||
QString name() const override { return "CodeLlama QML FIM"; }
|
||||
QStringList stopWords() const override
|
||||
{
|
||||
return QStringList() << "<SUF>" << "<PRE>" << "</PRE>" << "</SUF>" << "< EOT >" << "\\end"
|
||||
<< "<MID>" << "</MID>" << "##";
|
||||
}
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
request["prompt"] = QString("<SUF>%1<PRE>%2<MID>")
|
||||
.arg(context.suffix.value_or(""), context.prefix.value_or(""));
|
||||
@ -48,10 +48,10 @@ public:
|
||||
"}\n\n"
|
||||
"Specifically optimized for QML/JavaScript code completion.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case QodeAssist::LLMCore::ProviderID::Ollama:
|
||||
case QodeAssist::PluginLLMCore::ProviderID::Ollama:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -22,18 +22,18 @@
|
||||
#include <QJsonArray>
|
||||
#include <QJsonObject>
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class GoogleAI : public LLMCore::PromptTemplate
|
||||
class GoogleAI : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Chat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::Chat; }
|
||||
QString name() const override { return "Google AI"; }
|
||||
QStringList stopWords() const override { return QStringList(); }
|
||||
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray contents;
|
||||
|
||||
@ -128,9 +128,9 @@ public:
|
||||
"Supports proper role mapping (model/user roles), images, and thinking blocks.";
|
||||
}
|
||||
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
return id == QodeAssist::LLMCore::ProviderID::GoogleAI;
|
||||
return id == QodeAssist::PluginLLMCore::ProviderID::GoogleAI;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -19,18 +19,18 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
#include <QJsonArray>
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class Llama2 : public LLMCore::PromptTemplate
|
||||
class Llama2 : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
QString name() const override { return "Llama 2"; }
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Chat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::Chat; }
|
||||
QStringList stopWords() const override { return QStringList() << "[INST]"; }
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray messages;
|
||||
|
||||
@ -70,14 +70,14 @@ public:
|
||||
"}\n\n"
|
||||
"Compatible with Ollama, LM Studio, and other services for Llama 2.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case LLMCore::ProviderID::Ollama:
|
||||
case LLMCore::ProviderID::LMStudio:
|
||||
case LLMCore::ProviderID::OpenRouter:
|
||||
case LLMCore::ProviderID::OpenAICompatible:
|
||||
case LLMCore::ProviderID::LlamaCpp:
|
||||
case PluginLLMCore::ProviderID::Ollama:
|
||||
case PluginLLMCore::ProviderID::LMStudio:
|
||||
case PluginLLMCore::ProviderID::OpenRouter:
|
||||
case PluginLLMCore::ProviderID::OpenAICompatible:
|
||||
case PluginLLMCore::ProviderID::LlamaCpp:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -21,20 +21,20 @@
|
||||
|
||||
#include <QJsonArray>
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class Llama3 : public LLMCore::PromptTemplate
|
||||
class Llama3 : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
QString name() const override { return "Llama 3"; }
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Chat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::Chat; }
|
||||
QStringList stopWords() const override
|
||||
{
|
||||
return QStringList() << "<|start_header_id|>" << "<|end_header_id|>" << "<|eot_id|>";
|
||||
}
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray messages;
|
||||
|
||||
@ -77,14 +77,14 @@ public:
|
||||
"}\n\n"
|
||||
"Compatible with Ollama, LM Studio, and OpenAI-compatible services for Llama 3.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case LLMCore::ProviderID::Ollama:
|
||||
case LLMCore::ProviderID::LMStudio:
|
||||
case LLMCore::ProviderID::OpenRouter:
|
||||
case LLMCore::ProviderID::OpenAICompatible:
|
||||
case LLMCore::ProviderID::LlamaCpp:
|
||||
case PluginLLMCore::ProviderID::Ollama:
|
||||
case PluginLLMCore::ProviderID::LMStudio:
|
||||
case PluginLLMCore::ProviderID::OpenRouter:
|
||||
case PluginLLMCore::ProviderID::OpenAICompatible:
|
||||
case PluginLLMCore::ProviderID::LlamaCpp:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -21,18 +21,18 @@
|
||||
|
||||
#include <QJsonArray>
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class LlamaCppFim : public LLMCore::PromptTemplate
|
||||
class LlamaCppFim : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::FIM; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::FIM; }
|
||||
QString name() const override { return "llama.cpp FIM"; }
|
||||
QStringList stopWords() const override { return {}; }
|
||||
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
request["input_prefix"] = context.prefix.value_or("");
|
||||
request["input_suffix"] = context.suffix.value_or("");
|
||||
@ -60,9 +60,9 @@ public:
|
||||
"Recommended for models with FIM capability.";
|
||||
}
|
||||
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
return id == QodeAssist::LLMCore::ProviderID::LlamaCpp;
|
||||
return id == QodeAssist::PluginLLMCore::ProviderID::LlamaCpp;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -21,17 +21,17 @@
|
||||
|
||||
#include <QJsonArray>
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class MistralAIFim : public LLMCore::PromptTemplate
|
||||
class MistralAIFim : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::FIM; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::FIM; }
|
||||
QString name() const override { return "Mistral AI FIM"; }
|
||||
QStringList stopWords() const override { return QStringList(); }
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
request["prompt"] = context.prefix.value_or("");
|
||||
request["suffix"] = context.suffix.value_or("");
|
||||
@ -45,10 +45,10 @@ public:
|
||||
"}\n\n"
|
||||
"Optimized for code completion with MistralAI models.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case QodeAssist::LLMCore::ProviderID::MistralAI:
|
||||
case QodeAssist::PluginLLMCore::ProviderID::MistralAI:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
@ -56,14 +56,14 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
class MistralAIChat : public LLMCore::PromptTemplate
|
||||
class MistralAIChat : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Chat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::Chat; }
|
||||
QString name() const override { return "Mistral AI Chat"; }
|
||||
QStringList stopWords() const override { return QStringList(); }
|
||||
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray messages;
|
||||
|
||||
@ -116,10 +116,10 @@ public:
|
||||
"}\n\n"
|
||||
"Supports system messages, conversation history, and images.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case QodeAssist::LLMCore::ProviderID::MistralAI:
|
||||
case QodeAssist::PluginLLMCore::ProviderID::MistralAI:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -21,17 +21,17 @@
|
||||
|
||||
#include <QJsonArray>
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class OllamaFim : public LLMCore::PromptTemplate
|
||||
class OllamaFim : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::FIM; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::FIM; }
|
||||
QString name() const override { return "Ollama FIM"; }
|
||||
QStringList stopWords() const override { return QStringList() << "<EOT>"; }
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
request["prompt"] = context.prefix.value_or("");
|
||||
request["suffix"] = context.suffix.value_or("");
|
||||
@ -47,10 +47,10 @@ public:
|
||||
"}\n\n"
|
||||
"Recommended for Ollama models with FIM capability.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case QodeAssist::LLMCore::ProviderID::Ollama:
|
||||
case QodeAssist::PluginLLMCore::ProviderID::Ollama:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
@ -58,14 +58,14 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
class OllamaChat : public LLMCore::PromptTemplate
|
||||
class OllamaChat : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Chat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::Chat; }
|
||||
QString name() const override { return "Ollama Chat"; }
|
||||
QStringList stopWords() const override { return QStringList(); }
|
||||
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray messages;
|
||||
|
||||
@ -107,10 +107,10 @@ public:
|
||||
"Recommended for Ollama models with chat capability.\n"
|
||||
"Supports images for multimodal models (e.g., llava).";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case QodeAssist::LLMCore::ProviderID::Ollama:
|
||||
case QodeAssist::PluginLLMCore::ProviderID::Ollama:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -21,17 +21,17 @@
|
||||
|
||||
#include <QJsonArray>
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class OpenAI : public LLMCore::PromptTemplate
|
||||
class OpenAI : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Chat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::Chat; }
|
||||
QString name() const override { return "OpenAI"; }
|
||||
QStringList stopWords() const override { return QStringList(); }
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray messages;
|
||||
|
||||
@ -84,10 +84,10 @@ public:
|
||||
"}\n\n"
|
||||
"Standard Chat API format for OpenAI.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case QodeAssist::LLMCore::ProviderID::OpenAI:
|
||||
case QodeAssist::PluginLLMCore::ProviderID::OpenAI:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -21,17 +21,17 @@
|
||||
|
||||
#include <QJsonArray>
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class OpenAICompatible : public LLMCore::PromptTemplate
|
||||
class OpenAICompatible : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::Chat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::Chat; }
|
||||
QString name() const override { return "OpenAI Compatible"; }
|
||||
QStringList stopWords() const override { return QStringList(); }
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray messages;
|
||||
|
||||
@ -85,13 +85,13 @@ public:
|
||||
"Works with any service implementing the OpenAI Chat API specification.\n"
|
||||
"Supports images.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case LLMCore::ProviderID::OpenAICompatible:
|
||||
case LLMCore::ProviderID::OpenRouter:
|
||||
case LLMCore::ProviderID::LMStudio:
|
||||
case LLMCore::ProviderID::LlamaCpp:
|
||||
case PluginLLMCore::ProviderID::OpenAICompatible:
|
||||
case PluginLLMCore::ProviderID::OpenRouter:
|
||||
case PluginLLMCore::ProviderID::LMStudio:
|
||||
case PluginLLMCore::ProviderID::LlamaCpp:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -19,24 +19,24 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
#include "providers/OpenAIResponsesRequestBuilder.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class OpenAIResponses : public LLMCore::PromptTemplate
|
||||
class OpenAIResponses : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const noexcept override
|
||||
PluginLLMCore::TemplateType type() const noexcept override
|
||||
{
|
||||
return LLMCore::TemplateType::Chat;
|
||||
return PluginLLMCore::TemplateType::Chat;
|
||||
}
|
||||
|
||||
QString name() const override { return "OpenAI Responses"; }
|
||||
|
||||
QStringList stopWords() const override { return {}; }
|
||||
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
using namespace QodeAssist::OpenAIResponses;
|
||||
RequestBuilder builder;
|
||||
@ -108,9 +108,9 @@ public:
|
||||
"}\n\n"
|
||||
"Uses type-safe RequestBuilder for OpenAI Responses API.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const noexcept override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const noexcept override
|
||||
{
|
||||
return id == QodeAssist::LLMCore::ProviderID::OpenAIResponses;
|
||||
return id == QodeAssist::PluginLLMCore::ProviderID::OpenAIResponses;
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
@ -19,18 +19,18 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
#include <QJsonArray>
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class Qwen25CoderFIM : public LLMCore::PromptTemplate
|
||||
class Qwen25CoderFIM : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
QString name() const override { return "Qwen2.5 Coder FIM"; }
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::FIM; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::FIM; }
|
||||
QStringList stopWords() const override { return QStringList() << "<|endoftext|>" << "<|EOT|>"; }
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
request["prompt"] = QString("<|fim_prefix|>%1<|fim_suffix|>%2<|fim_middle|>")
|
||||
.arg(context.prefix.value_or(""), context.suffix.value_or(""));
|
||||
@ -46,10 +46,10 @@ public:
|
||||
"}\n\n"
|
||||
"Ideal for code completion with Qwen models.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case QodeAssist::LLMCore::ProviderID::Ollama:
|
||||
case QodeAssist::PluginLLMCore::ProviderID::Ollama:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -21,17 +21,17 @@
|
||||
|
||||
#include <QJsonArray>
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class Qwen3CoderFIM : public LLMCore::PromptTemplate
|
||||
class Qwen3CoderFIM : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
QString name() const override { return "Qwen3 Coder FIM"; }
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::FIMOnChat; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::FIMOnChat; }
|
||||
QStringList stopWords() const override { return QStringList() << "<|im_end|>"; }
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
QJsonArray messages;
|
||||
|
||||
@ -62,14 +62,14 @@ public:
|
||||
" ]\n"
|
||||
"}\n\n";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case LLMCore::ProviderID::Ollama:
|
||||
case LLMCore::ProviderID::LMStudio:
|
||||
case LLMCore::ProviderID::OpenRouter:
|
||||
case LLMCore::ProviderID::OpenAICompatible:
|
||||
case LLMCore::ProviderID::LlamaCpp:
|
||||
case PluginLLMCore::ProviderID::Ollama:
|
||||
case PluginLLMCore::ProviderID::LMStudio:
|
||||
case PluginLLMCore::ProviderID::OpenRouter:
|
||||
case PluginLLMCore::ProviderID::OpenAICompatible:
|
||||
case PluginLLMCore::ProviderID::LlamaCpp:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -19,21 +19,21 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "llmcore/PromptTemplate.hpp"
|
||||
#include "pluginllmcore/PromptTemplate.hpp"
|
||||
|
||||
namespace QodeAssist::Templates {
|
||||
|
||||
class StarCoder2Fim : public LLMCore::PromptTemplate
|
||||
class StarCoder2Fim : public PluginLLMCore::PromptTemplate
|
||||
{
|
||||
public:
|
||||
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::FIM; }
|
||||
PluginLLMCore::TemplateType type() const override { return PluginLLMCore::TemplateType::FIM; }
|
||||
QString name() const override { return "StarCoder2 FIM"; }
|
||||
QStringList stopWords() const override
|
||||
{
|
||||
return QStringList() << "<|endoftext|>" << "<file_sep>" << "<fim_prefix>" << "<fim_suffix>"
|
||||
<< "<fim_middle>";
|
||||
}
|
||||
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
|
||||
void prepareRequest(QJsonObject &request, const PluginLLMCore::ContextData &context) const override
|
||||
{
|
||||
request["prompt"] = QString("<fim_prefix>%1<fim_suffix>%2<fim_middle>")
|
||||
.arg(context.prefix.value_or(""), context.suffix.value_or(""));
|
||||
@ -48,10 +48,10 @@ public:
|
||||
"}\n\n"
|
||||
"Includes stop words to prevent token duplication.";
|
||||
}
|
||||
bool isSupportProvider(LLMCore::ProviderID id) const override
|
||||
bool isSupportProvider(PluginLLMCore::ProviderID id) const override
|
||||
{
|
||||
switch (id) {
|
||||
case QodeAssist::LLMCore::ProviderID::Ollama:
|
||||
case QodeAssist::PluginLLMCore::ProviderID::Ollama:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@ -19,7 +19,7 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "llmcore/PromptTemplateManager.hpp"
|
||||
#include "pluginllmcore/PromptTemplateManager.hpp"
|
||||
#include "templates/Alpaca.hpp"
|
||||
#include "templates/ChatML.hpp"
|
||||
#include "templates/Claude.hpp"
|
||||
@ -44,7 +44,7 @@ namespace QodeAssist::Templates {
|
||||
|
||||
inline void registerTemplates()
|
||||
{
|
||||
auto &templateManager = LLMCore::PromptTemplateManager::instance();
|
||||
auto &templateManager = PluginLLMCore::PromptTemplateManager::instance();
|
||||
templateManager.registerTemplate<OllamaChat>();
|
||||
templateManager.registerTemplate<OllamaFim>();
|
||||
templateManager.registerTemplate<CodeLlamaFim>();
|
||||
|
||||
Reference in New Issue
Block a user