mirror of
https://github.com/Palm1r/QodeAssist.git
synced 2025-05-28 03:10:28 -04:00
Fix providers and prompt default value
This commit is contained in:
parent
1cbde3d55b
commit
d235d0fcdf
10
README.md
10
README.md
@ -65,9 +65,15 @@ If you've successfully used a model that's not listed here, please let us know b
|
||||
|
||||
1. Install QtCreator 14.0
|
||||
2. Install [Ollama](https://ollama.com). Make sure to review the system requirements before installation.
|
||||
3. Install a language model in Ollama. For example, you can run:
|
||||
3. Install a language models in Ollama. For example, you can run:
|
||||
|
||||
For suggestions:
|
||||
```
|
||||
ollama run starcoder2:7b
|
||||
ollama run codellama:7b-code
|
||||
```
|
||||
For chat:
|
||||
```
|
||||
ollama run codellama:7b-instruct
|
||||
```
|
||||
4. Download the QodeAssist plugin.
|
||||
5. Launch Qt Creator and install the plugin:
|
||||
|
@ -131,6 +131,11 @@ GeneralSettings::GeneralSettings()
|
||||
loadProviders();
|
||||
loadPrompts();
|
||||
|
||||
llmProviders.setDefaultValue(llmProviders.indexForDisplay("Ollama"));
|
||||
chatLlmProviders.setDefaultValue(chatLlmProviders.indexForDisplay("Ollama"));
|
||||
fimPrompts.setDefaultValue(fimPrompts.indexForDisplay("CodeLLama FIM"));
|
||||
chatPrompts.setDefaultValue(chatPrompts.indexForDisplay("CodeLLama Chat"));
|
||||
|
||||
readSettings();
|
||||
|
||||
auto fimProviderName = llmProviders.displayForIndex(llmProviders.value());
|
||||
@ -273,12 +278,12 @@ void GeneralSettings::resetPageToDefaults()
|
||||
resetAspect(startSuggestionTimer);
|
||||
resetAspect(autoCompletionTypingInterval);
|
||||
resetAspect(autoCompletionCharThreshold);
|
||||
resetAspect(llmProviders);
|
||||
resetAspect(chatLlmProviders);
|
||||
resetAspect(fimPrompts);
|
||||
resetAspect(chatPrompts);
|
||||
}
|
||||
|
||||
int fimIndex = llmProviders.indexForDisplay("Ollama");
|
||||
llmProviders.setVolatileValue(fimIndex);
|
||||
int chatIndex = chatLlmProviders.indexForDisplay("Ollama");
|
||||
chatLlmProviders.setVolatileValue(chatIndex);
|
||||
modelName.setVolatileValue("");
|
||||
chatModelName.setVolatileValue("");
|
||||
|
||||
|
@ -27,7 +27,7 @@ class CodeLlamaFimTemplate : public PromptTemplate
|
||||
{
|
||||
public:
|
||||
TemplateType type() const override { return TemplateType::Fim; }
|
||||
QString name() const override { return "CodeLlama FIM"; }
|
||||
QString name() const override { return "CodeLLama FIM"; }
|
||||
QString promptTemplate() const override { return "%1<PRE> %2 <SUF>%3 <MID>"; }
|
||||
QStringList stopWords() const override
|
||||
{
|
||||
|
@ -28,7 +28,7 @@ class CodeLlamaInstructTemplate : public PromptTemplate
|
||||
{
|
||||
public:
|
||||
TemplateType type() const override { return TemplateType::Chat; }
|
||||
QString name() const override { return "CodeLlama Chat"; }
|
||||
QString name() const override { return "CodeLLama Chat"; }
|
||||
QString promptTemplate() const override { return "[INST] %1 [/INST]"; }
|
||||
QStringList stopWords() const override { return QStringList() << "[INST]" << "[/INST]"; }
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user