Skip to content

Commit

Permalink
WIP: bug - env overrides MF key, but it should be opposite
Browse files Browse the repository at this point in the history
  • Loading branch information
dvorka committed Apr 6, 2024
1 parent e1b9b0a commit fdb6381
Show file tree
Hide file tree
Showing 6 changed files with 28 additions and 11 deletions.
2 changes: 2 additions & 0 deletions app/src/qt/dialogs/configuration_dialog.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -696,6 +696,8 @@ ConfigurationDialog::WingmanOpenAiTab::WingmanOpenAiTab(QWidget* parent, QComboB
"<b>.mindforger.md</b> file in your home directory.</li>"
"</ul>"
).arg(ENV_VAR_OPENAI_API_KEY));
BUG: if key in config > it overrides the env var > must be visible
hidden ONLY if env key is set & config key is empty
helpLabel->setVisible(!config.canWingmanOpenAiFromEnv());
apiKeyLabel = new QLabel(tr("<br>API key:"));
apiKeyLabel->setVisible(helpLabel->isVisible());
Expand Down
6 changes: 5 additions & 1 deletion lib/src/config/configuration.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,11 @@ bool Configuration::initWingmanOpenAi() {
MF_DEBUG(" Wingman API key loaded from the env: " << apiKeyEnv << endl);
wingmanOpenAiApiKey = apiKeyEnv;
}

#ifdef DO_MF_DEBUG
else {
MF_DEBUG(" Wingman API key from config will be used: " << wingmanOpenAiApiKey << endl);
}
#endif
// LLM model
const char* llmModelEnv = std::getenv(ENV_VAR_OPENAI_LLM_MODEL);
if(llmModelEnv) {
Expand Down
3 changes: 2 additions & 1 deletion lib/src/config/configuration.h
Original file line number Diff line number Diff line change
Expand Up @@ -582,11 +582,12 @@ class Configuration {
bool initWingmanMock();
bool initWingmanOpenAi();
bool initWingmanOllama();
public:
/**
* @brief Initialize Wingman's LLM provider.
*/
bool initWingman();
public:

std::string getWingmanOpenAiApiKey() const { return wingmanOpenAiApiKey; }
void setWingmanOpenAiApiKey(std::string apiKey) { wingmanOpenAiApiKey = apiKey; }
std::string getWingmanOpenAiLlm() const { return wingmanOpenAiLlm; }
Expand Down
17 changes: 12 additions & 5 deletions lib/src/mind/ai/llm/openai_wingman.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,17 @@ size_t openaiCurlWriteCallback(void* contents, size_t size, size_t nmemb, std::s
* OpenAi Wingman class implementation.
*/

OpenAiWingman::OpenAiWingman(const string& apiKey)
OpenAiWingman::OpenAiWingman()
: Wingman(WingmanLlmProviders::WINGMAN_PROVIDER_OPENAI),
apiKey{apiKey},
config(Configuration::getInstance()),
llmModels{},
defaultLlmModel{LLM_GPT_35_TURBO}
{
MF_DEBUG("OpenAiWingman::OpenAiWingman() apiKey: " << apiKey << endl);
// API key to be always read from config as it might be reconfigured
// during MindForger run
MF_DEBUG(
"OpenAiWingman::OpenAiWingman() apiKey: '"
<< config.getWingmanOpenAiApiKey() << "'" << endl);

listModels();
}
Expand Down Expand Up @@ -214,8 +218,11 @@ void OpenAiWingman::curlGet(CommandWingmanChat& command) {
&command.httpResponse);

struct curl_slist* headers = NULL;
headers = curl_slist_append(headers, ("Authorization: Bearer " + apiKey).c_str());
headers = curl_slist_append(headers, "Content-Type: application/json");
headers = curl_slist_append(
headers,
("Authorization: Bearer " + config.getWingmanOpenAiApiKey()).c_str());
headers = curl_slist_append(
headers, "Content-Type: application/json");
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);

// perform the request
Expand Down
8 changes: 5 additions & 3 deletions lib/src/mind/ai/llm/openai_wingman.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@
#include "curl/curl.h"
#endif

#include "../../../config/configuration.h"

#include "wingman.h"

namespace m8r {
Expand All @@ -42,8 +44,8 @@ class OpenAiWingman: Wingman
static const std::string LLM_MODEL_OPENAI_GPT4;

private:
// API key needed to access OpenAI API endpoint
std::string apiKey;
// API key needed to access OpenAI API endpoint - read from config
Configuration& config;
// Names of LLM models provided by the OpenAI API endpoint
std::vector<std::string> llmModels;
// Name of the LLM model which is used by Wingman - must be one of llmModels ^
Expand All @@ -52,7 +54,7 @@ class OpenAiWingman: Wingman
void curlGet(CommandWingmanChat& command);

public:
explicit OpenAiWingman(const std::string& apiKey);
explicit OpenAiWingman();
OpenAiWingman(const OpenAiWingman&) = delete;
OpenAiWingman(const OpenAiWingman&&) = delete;
OpenAiWingman& operator =(const OpenAiWingman&) = delete;
Expand Down
3 changes: 2 additions & 1 deletion lib/src/mind/mind.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1455,6 +1455,7 @@ void Mind::initWingman()
MF_DEBUG(
"MIND Wingman init: " << boolalpha << config.isWingman() << endl
);
config.initWingman();
if(config.isWingman()) {
MF_DEBUG("MIND Wingman initialization..." << endl);
switch(config.getWingmanLlmProvider()) {
Expand All @@ -1464,7 +1465,7 @@ void Mind::initWingman()
delete wingman;
wingman = nullptr;
}
wingman = (Wingman*)new OpenAiWingman{config.getWingmanOpenAiApiKey()};
wingman = (Wingman*)new OpenAiWingman{};
wingman->setLlmModel(config.getWingmanOpenAiLlm());
wingmanLlmProvider = config.getWingmanLlmProvider();
return;
Expand Down

0 comments on commit fdb6381

Please sign in to comment.