From d235d0fcdfa852136930cfe1e1b6d6788f31c4fb Mon Sep 17 00:00:00 2001 From: Petr Mironychev <9195189+Palm1r@users.noreply.github.com> Date: Wed, 2 Oct 2024 22:25:53 +0200 Subject: [PATCH] Fix providers and prompt default value --- README.md | 10 ++++++++-- settings/GeneralSettings.cpp | 13 +++++++++---- templates/CodeLlamaFimTemplate.hpp | 2 +- templates/CodeLlamaInstruct.hpp | 2 +- 4 files changed, 19 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 58bfac2..52182a3 100644 --- a/README.md +++ b/README.md @@ -65,9 +65,15 @@ If you've successfully used a model that's not listed here, please let us know b 1. Install QtCreator 14.0 2. Install [Ollama](https://ollama.com). Make sure to review the system requirements before installation. -3. Install a language model in Ollama. For example, you can run: +3. Install a language models in Ollama. For example, you can run: + +For suggestions: ``` -ollama run starcoder2:7b +ollama run codellama:7b-code +``` +For chat: +``` +ollama run codellama:7b-instruct ``` 4. Download the QodeAssist plugin. 5. Launch Qt Creator and install the plugin: diff --git a/settings/GeneralSettings.cpp b/settings/GeneralSettings.cpp index 631080e..b06e78b 100644 --- a/settings/GeneralSettings.cpp +++ b/settings/GeneralSettings.cpp @@ -131,6 +131,11 @@ GeneralSettings::GeneralSettings() loadProviders(); loadPrompts(); + llmProviders.setDefaultValue(llmProviders.indexForDisplay("Ollama")); + chatLlmProviders.setDefaultValue(chatLlmProviders.indexForDisplay("Ollama")); + fimPrompts.setDefaultValue(fimPrompts.indexForDisplay("CodeLLama FIM")); + chatPrompts.setDefaultValue(chatPrompts.indexForDisplay("CodeLLama Chat")); + readSettings(); auto fimProviderName = llmProviders.displayForIndex(llmProviders.value()); @@ -273,12 +278,12 @@ void GeneralSettings::resetPageToDefaults() resetAspect(startSuggestionTimer); resetAspect(autoCompletionTypingInterval); resetAspect(autoCompletionCharThreshold); + resetAspect(llmProviders); + resetAspect(chatLlmProviders); + resetAspect(fimPrompts); + resetAspect(chatPrompts); } - int fimIndex = llmProviders.indexForDisplay("Ollama"); - llmProviders.setVolatileValue(fimIndex); - int chatIndex = chatLlmProviders.indexForDisplay("Ollama"); - chatLlmProviders.setVolatileValue(chatIndex); modelName.setVolatileValue(""); chatModelName.setVolatileValue(""); diff --git a/templates/CodeLlamaFimTemplate.hpp b/templates/CodeLlamaFimTemplate.hpp index 48fedfc..729f79e 100644 --- a/templates/CodeLlamaFimTemplate.hpp +++ b/templates/CodeLlamaFimTemplate.hpp @@ -27,7 +27,7 @@ class CodeLlamaFimTemplate : public PromptTemplate { public: TemplateType type() const override { return TemplateType::Fim; } - QString name() const override { return "CodeLlama FIM"; } + QString name() const override { return "CodeLLama FIM"; } QString promptTemplate() const override { return "%1
 %2 %3 "; }
     QStringList stopWords() const override
     {
diff --git a/templates/CodeLlamaInstruct.hpp b/templates/CodeLlamaInstruct.hpp
index 96d1f22..4c29f72 100644
--- a/templates/CodeLlamaInstruct.hpp
+++ b/templates/CodeLlamaInstruct.hpp
@@ -28,7 +28,7 @@ class CodeLlamaInstructTemplate : public PromptTemplate
 {
 public:
     TemplateType type() const override { return TemplateType::Chat; }
-    QString name() const override { return "CodeLlama Chat"; }
+    QString name() const override { return "CodeLLama Chat"; }
     QString promptTemplate() const override { return "[INST] %1 [/INST]"; }
     QStringList stopWords() const override { return QStringList() << "[INST]" << "[/INST]"; }