From c070a211d633997f9265f69da5185be06bd0ca8f Mon Sep 17 00:00:00 2001 From: tikikun Date: Tue, 9 Jan 2024 13:25:44 +0700 Subject: [PATCH] feat: add condition to not emit log file if there is no path to log folder --- controllers/llamaCPP.cc | 9 +++++---- controllers/llamaCPP.h | 3 ++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/controllers/llamaCPP.cc b/controllers/llamaCPP.cc index 39696c549..fc2705cdc 100644 --- a/controllers/llamaCPP.cc +++ b/controllers/llamaCPP.cc @@ -452,10 +452,11 @@ bool llamaCPP::loadModelImpl(const Json::Value &jsonBody) { this->pre_prompt = jsonBody.get("pre_prompt", "").asString(); this->repeat_last_n = jsonBody.get("repeat_last_n", 32).asInt(); - // Set folder for llama log - std::string llama_log_folder = - jsonBody.get("llama_log_folder", "log/").asString(); - log_set_target(llama_log_folder + "llama.log"); + if (!jsonBody["llama_log_folder"].isNull()) { + log_enable(); + std::string llama_log_folder = jsonBody["llama_log_folder"].asString(); + log_set_target(llama_log_folder + "llama.log"); + } // Set folder for llama log } #ifdef GGML_USE_CUBLAS LOG_INFO << "Setting up GGML CUBLAS PARAMS"; diff --git a/controllers/llamaCPP.h b/controllers/llamaCPP.h index 5dc693de5..e7e084e7c 100644 --- a/controllers/llamaCPP.h +++ b/controllers/llamaCPP.h @@ -5,6 +5,7 @@ #endif #pragma once +#define LOG_TARGET stdout #include "log.h" #include "utils/nitro_utils.h" @@ -2486,7 +2487,7 @@ class llamaCPP : public drogon::HttpController { public: llamaCPP() { // Some default values for now below - log_enable(); // Disable the log to file feature, reduce bloat for + log_disable(); // Disable the log to file feature, reduce bloat for // target // system () std::vector llama_models =