From 4c4cfa1f4b3b484e6a28968d717abbe3e89aaa1a Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Fri, 4 Oct 2024 13:39:36 +0700 Subject: [PATCH 1/2] fix: log --- engine/utils/file_manager_utils.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/engine/utils/file_manager_utils.h b/engine/utils/file_manager_utils.h index c1e5e2d97..8d452e5f8 100644 --- a/engine/utils/file_manager_utils.h +++ b/engine/utils/file_manager_utils.h @@ -140,7 +140,7 @@ inline void CreateConfigFileIfNotExist() { CLI_LOG("Config file not found. Creating one at " + config_path.string()); auto defaultDataFolderPath = file_manager_utils::GetHomeDirectoryPath() / default_data_folder_name; - CTL_INF("Default data folder path: " + defaultDataFolderPath.string()); + CLI_LOG("Default data folder path: " + defaultDataFolderPath.string()); auto config = config_yaml_utils::CortexConfig{ .logFolderPath = defaultDataFolderPath.string(), @@ -188,7 +188,7 @@ inline std::filesystem::path GetCortexDataPath() { } if (!std::filesystem::exists(data_folder_path)) { - CTL_INF("Cortex home folder not found. Create one: " + + CLI_LOG("Cortex home folder not found. Create one: " + data_folder_path.string()); std::filesystem::create_directory(data_folder_path); } From c326e4b602d764a26b1e85bad43ffebf6d4d1bf2 Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Fri, 4 Oct 2024 14:13:12 +0700 Subject: [PATCH 2/2] fix: engine name --- engine/commands/run_cmd.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/engine/commands/run_cmd.cc b/engine/commands/run_cmd.cc index b4023cfd3..9ae71d85c 100644 --- a/engine/commands/run_cmd.cc +++ b/engine/commands/run_cmd.cc @@ -91,7 +91,8 @@ void RunCmd::Exec(bool chat_flag) { // Always start model if not llamacpp // If it is llamacpp, then check model status first { - if ((mc.engine.find("llamacpp") == std::string::npos) || + if ((mc.engine.find(kLlamaRepo) == std::string::npos && + mc.engine.find(kLlamaEngine) == std::string::npos) || !commands::ModelStatusCmd().IsLoaded(host_, port_, *model_id)) { if (!ModelStartCmd().Exec(host_, port_, *model_id)) { return;