Skip to content

Commit

Permalink
WIP: configuration & choice of the LLM models offered by the provider #…
Browse files Browse the repository at this point in the history
  • Loading branch information
dvorka committed Mar 12, 2024
1 parent 2bba6e7 commit ed1b5a5
Show file tree
Hide file tree
Showing 8 changed files with 71 additions and 17 deletions.
2 changes: 1 addition & 1 deletion app/src/qt/dialogs/configuration_dialog.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -779,7 +779,7 @@ ConfigurationDialog::WingmanOllamaTab::WingmanOllamaTab(QWidget* parent)
).arg(ENV_VAR_OPENAI_API_KEY));
helpLabel->setVisible(!config.canWingmanOllama());
urlEdit = new QLineEdit(this);
clearUrlButton = new QPushButton(tr("Clear ollama URL"), this);
clearUrlButton = new QPushButton(tr("Clear URL"), this);

QVBoxLayout* llmProvidersLayout = new QVBoxLayout();
llmProvidersLayout->addWidget(helpLabel);
Expand Down
2 changes: 1 addition & 1 deletion lib/src/config/configuration.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ const string Configuration::DEFAULT_UI_THEME_NAME = string{UI_DEFAULT_THEME};
const string Configuration::DEFAULT_UI_HTML_CSS_THEME = string{UI_DEFAULT_HTML_CSS_THEME};
const string Configuration::DEFAULT_EDITOR_FONT= string{UI_DEFAULT_EDITOR_FONT};
const string Configuration::DEFAULT_TIME_SCOPE = string{"0y0m0d0h0m"};
const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OPENAI = string{"gpt-3.5-turbo"}; // "gpt-3.5-turbo" and "gpt-4" are symbolic names
const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OPENAI = LLM_MODEL_OPENAI_GPT35TURBO;
const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OLLAMA = string{"llama2"};

Configuration::Configuration()
Expand Down
24 changes: 19 additions & 5 deletions lib/src/mind/ai/llm/ollama_wingman.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,20 +40,34 @@ size_t ollamaCurlWriteCallback(void* contents, size_t size, size_t nmemb, std::s
* Ollama Wingman class implementation.
*/

OllamaWingman::OllamaWingman(
const string& url,
const std::string& llmModel
)
OllamaWingman::OllamaWingman(const string& url)
: Wingman(WingmanLlmProviders::WINGMAN_PROVIDER_OLLAMA),
url{url + "/api/generate"},
llmModel{llmModel}
llmModels{}
{
}

OllamaWingman::~OllamaWingman()
{
}

void OllamaWingman::curlListModels() {
if(!this->llmModels.empty()) {
this->llmModels.clear();
}

// call to ollama API to list available models
throw std::runtime_error("OllamaWingman::curlListModels() not implemented");
}

std::vector<std::string>& OllamaWingman::listModels()
{
if(this->llmModels.empty()) {
this->curlListModels();
}
return this->llmModels;
}

/**
* OpenAI cURL GET request.
*
Expand Down
6 changes: 4 additions & 2 deletions lib/src/mind/ai/llm/ollama_wingman.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,18 +43,20 @@ class OllamaWingman: Wingman
{
private:
std::string url;
std::string llmModel;
std::vector<std::string> llmModels;

void curlListModels();
void curlGet(CommandWingmanChat& command);

public:
explicit OllamaWingman(const std::string& url, const std::string& llmModel);
explicit OllamaWingman(const std::string& url);
OllamaWingman(const OllamaWingman&) = delete;
OllamaWingman(const OllamaWingman&&) = delete;
OllamaWingman& operator =(const OllamaWingman&) = delete;
OllamaWingman& operator =(const OllamaWingman&&) = delete;
~OllamaWingman() override;

virtual std::vector<std::string>& listModels() override;
virtual void chat(CommandWingmanChat& command) override;
};

Expand Down
24 changes: 18 additions & 6 deletions lib/src/mind/ai/llm/openai_wingman.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@ namespace m8r {

using namespace std;


// OpenAI models: "gpt-3.5-turbo" and "gpt-4" are aliases for the latest models
const std::string LLM_GPT_35_TURBO=string{"gpt-3.5-turbo"};
const std::string LLM_GPT_4=string{"gpt-4"};

/*
* cURL callback for writing data to string.
*/
Expand All @@ -40,21 +45,28 @@ size_t openaiCurlWriteCallback(void* contents, size_t size, size_t nmemb, std::s
* OpenAi Wingman class implementation.
*/

OpenAiWingman::OpenAiWingman(
const string& apiKey,
const std::string& llmModel
)
OpenAiWingman::OpenAiWingman(const string& apiKey)
: Wingman(WingmanLlmProviders::WINGMAN_PROVIDER_OPENAI),
apiKey{apiKey},
llmModel{llmModel}
llmModels{},
defaultLlmModel{LLM_GPT_35_TURBO}
{
MF_DEBUG("OpenAiWingman::OpenAiWingman() apiKey: " << apiKey << endl);

// IMPROVE list models using OpenAI API - will many models be confusing for user?
llmModels.push_back(LLM_GPT_35_TURBO);
llmModels.push_back(LLM_GPT_4);
}

OpenAiWingman::~OpenAiWingman()
{
}

std::vector<std::string>& OpenAiWingman::listModels()
{
return this->llmModels;
}

// TODO refactor to parent class so that all wingmans can use it
/**
* OpenAI cURL GET request.
Expand Down Expand Up @@ -225,7 +237,7 @@ void OpenAiWingman::curlGet(CommandWingmanChat& command) {
command.httpResponse.clear();
command.answerMarkdown.clear();
command.answerTokens = 0;
command.answerLlmModel = llmModel;
command.answerLlmModel = llmModel.size()>0? llmModel: defaultLlmModel;

return;
}
Expand Down
10 changes: 8 additions & 2 deletions lib/src/mind/ai/llm/openai_wingman.h
Original file line number Diff line number Diff line change
Expand Up @@ -37,20 +37,26 @@ namespace m8r {
*/
class OpenAiWingman: Wingman
{
public:
static const std::string LLM_MODEL_OPENAI_GPT35;
static const std::string LLM_MODEL_OPENAI_GPT4;

private:
std::string apiKey;
std::string llmModel;
std::vector<std::string> llmModels;
std::string defaultLlmModel;

void curlGet(CommandWingmanChat& command);

public:
explicit OpenAiWingman(const std::string& apiKey, const std::string& llmModel);
explicit OpenAiWingman(const std::string& apiKey);
OpenAiWingman(const OpenAiWingman&) = delete;
OpenAiWingman(const OpenAiWingman&&) = delete;
OpenAiWingman& operator =(const OpenAiWingman&) = delete;
OpenAiWingman& operator =(const OpenAiWingman&&) = delete;
~OpenAiWingman() override;

virtual std::vector<std::string>& listModels() override;
virtual void chat(CommandWingmanChat& command) override;
};

Expand Down
1 change: 1 addition & 0 deletions lib/src/mind/ai/llm/wingman.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ using namespace std;
*/

Wingman::Wingman(WingmanLlmProviders llmProvider)
: llmModel{}
{
this->llmProvider = llmProvider;
}
Expand Down
19 changes: 19 additions & 0 deletions lib/src/mind/ai/llm/wingman.h
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,9 @@ class Wingman
PROMPT_ANTONYM,
};

protected:
std::string llmModel;

public:
explicit Wingman(WingmanLlmProviders llmProvider);
Wingman(const Wingman&) = delete;
Expand All @@ -152,6 +155,22 @@ class Wingman
return textPrompts;
}

virtual void setLlmModel(const std::string& llmModel) {
this->llmModel = llmModel;
}

virtual const std::string& getLlmModel() {
return llmModel;
}

/**
* List available LLM models.
*/
virtual std::vector<std::string>& listModels() = 0;

/**
* Chat with given LLM model.
*/
virtual void chat(CommandWingmanChat& command) = 0;
};

Expand Down

0 comments on commit ed1b5a5

Please sign in to comment.