feat: Add llama.cpp provider and fim template (#118)

This commit is contained in:
Petr Mironychev
2025-03-09 22:57:33 +01:00
committed by GitHub
parent c9a3cdaf25
commit e66f467214
13 changed files with 349 additions and 21 deletions

View File

@ -75,10 +75,11 @@ public:
bool isSupportProvider(LLMCore::ProviderID id) const override
{
switch (id) {
case QodeAssist::LLMCore::ProviderID::Ollama:
case QodeAssist::LLMCore::ProviderID::LMStudio:
case QodeAssist::LLMCore::ProviderID::OpenRouter:
case QodeAssist::LLMCore::ProviderID::OpenAICompatible:
case LLMCore::ProviderID::Ollama:
case LLMCore::ProviderID::LMStudio:
case LLMCore::ProviderID::OpenRouter:
case LLMCore::ProviderID::OpenAICompatible:
case LLMCore::ProviderID::LlamaCpp:
return true;
default:
return false;

View File

@ -76,10 +76,11 @@ public:
bool isSupportProvider(LLMCore::ProviderID id) const override
{
switch (id) {
case QodeAssist::LLMCore::ProviderID::Ollama:
case QodeAssist::LLMCore::ProviderID::LMStudio:
case QodeAssist::LLMCore::ProviderID::OpenRouter:
case QodeAssist::LLMCore::ProviderID::OpenAICompatible:
case LLMCore::ProviderID::Ollama:
case LLMCore::ProviderID::LMStudio:
case LLMCore::ProviderID::OpenRouter:
case LLMCore::ProviderID::OpenAICompatible:
case LLMCore::ProviderID::LlamaCpp:
return true;
default:
return false;

View File

@ -73,10 +73,11 @@ public:
bool isSupportProvider(LLMCore::ProviderID id) const override
{
switch (id) {
case QodeAssist::LLMCore::ProviderID::Ollama:
case QodeAssist::LLMCore::ProviderID::LMStudio:
case QodeAssist::LLMCore::ProviderID::OpenRouter:
case QodeAssist::LLMCore::ProviderID::OpenAICompatible:
case LLMCore::ProviderID::Ollama:
case LLMCore::ProviderID::LMStudio:
case LLMCore::ProviderID::OpenRouter:
case LLMCore::ProviderID::OpenAICompatible:
case LLMCore::ProviderID::LlamaCpp:
return true;
default:
return false;

View File

@ -80,10 +80,11 @@ public:
bool isSupportProvider(LLMCore::ProviderID id) const override
{
switch (id) {
case QodeAssist::LLMCore::ProviderID::Ollama:
case QodeAssist::LLMCore::ProviderID::LMStudio:
case QodeAssist::LLMCore::ProviderID::OpenRouter:
case QodeAssist::LLMCore::ProviderID::OpenAICompatible:
case LLMCore::ProviderID::Ollama:
case LLMCore::ProviderID::LMStudio:
case LLMCore::ProviderID::OpenRouter:
case LLMCore::ProviderID::OpenAICompatible:
case LLMCore::ProviderID::LlamaCpp:
return true;
default:
return false;

56
templates/LlamaCppFim.hpp Normal file
View File

@ -0,0 +1,56 @@
/*
* Copyright (C) 2024 Petr Mironychev
*
* This file is part of QodeAssist.
*
* QodeAssist is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* QodeAssist is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with QodeAssist. If not, see <https://www.gnu.org/licenses/>.
*/
#pragma once
#include "llmcore/PromptTemplate.hpp"
namespace QodeAssist::Templates {
class LlamaCppFim : public LLMCore::PromptTemplate
{
public:
LLMCore::TemplateType type() const override { return LLMCore::TemplateType::FIM; }
QString name() const override { return "llama.cpp FIM"; }
QStringList stopWords() const override { return {}; }
void prepareRequest(QJsonObject &request, const LLMCore::ContextData &context) const override
{
request["input_prefix"] = context.prefix.value_or("");
request["input_suffix"] = context.suffix.value_or("");
}
QString description() const override
{
return "Default llama.cpp FIM (Fill-in-Middle) /infill template with native format:\n\n"
"{\n"
" \"input_prefix\": \"<code prefix>\",\n"
" \"input_suffix\": \"<code suffix>\",\n"
" \"input_extra\": \"<system prompt>\"\n"
"}\n\n"
"Recommended for models with FIM capability.";
}
bool isSupportProvider(LLMCore::ProviderID id) const override
{
return id == QodeAssist::LLMCore::ProviderID::LlamaCpp;
}
};
} // namespace QodeAssist::Templates

View File

@ -63,9 +63,10 @@ public:
bool isSupportProvider(LLMCore::ProviderID id) const override
{
switch (id) {
case QodeAssist::LLMCore::ProviderID::OpenAICompatible:
case QodeAssist::LLMCore::ProviderID::OpenRouter:
case QodeAssist::LLMCore::ProviderID::LMStudio:
case LLMCore::ProviderID::OpenAICompatible:
case LLMCore::ProviderID::OpenRouter:
case LLMCore::ProviderID::LMStudio:
case LLMCore::ProviderID::LlamaCpp:
return true;
default:
return false;

View File

@ -34,6 +34,7 @@
#include "templates/GoogleAI.hpp"
#include "templates/Llama2.hpp"
#include "templates/Llama3.hpp"
#include "templates/LlamaCppFim.hpp"
#include "templates/Qwen.hpp"
#include "templates/StarCoder2Fim.hpp"
@ -60,6 +61,7 @@ inline void registerTemplates()
templateManager.registerTemplate<OpenAICompatible>();
templateManager.registerTemplate<Alpaca>();
templateManager.registerTemplate<GoogleAI>();
templateManager.registerTemplate<LlamaCppFim>();
}
} // namespace QodeAssist::Templates