Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit d449b15

Browse files
fix: cortex models start has no output if variant not given (#1531)
* fix: use engine get env * fix: cortex models start has no output if variant not given --------- Co-authored-by: vansangpfiev <sang@jan.ai>
1 parent d39b332 commit d449b15

File tree

3 files changed

+80
-56
lines changed

3 files changed

+80
-56
lines changed

engine/cli/commands/model_start_cmd.cc

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,23 @@
11
#include "model_start_cmd.h"
2+
#include "config/yaml_config.h"
3+
#include "cortex_upd_cmd.h"
4+
#include "database/models.h"
25
#include "httplib.h"
6+
#include "run_cmd.h"
37
#include "server_start_cmd.h"
8+
#include "utils/cli_selection_utils.h"
49
#include "utils/logging_utils.h"
510

611
namespace commands {
712
bool ModelStartCmd::Exec(const std::string& host, int port,
813
const std::string& model_handle) {
14+
std::optional<std::string> model_id =
15+
SelectLocalModel(model_service_, model_handle);
16+
17+
if(!model_id.has_value()) {
18+
return false;
19+
}
20+
921
// Start server if server is not started yet
1022
if (!commands::IsServerAlive(host, port)) {
1123
CLI_LOG("Starting server ...");
@@ -17,14 +29,16 @@ bool ModelStartCmd::Exec(const std::string& host, int port,
1729
// Call API to start model
1830
httplib::Client cli(host + ":" + std::to_string(port));
1931
Json::Value json_data;
20-
json_data["model"] = model_handle;
32+
json_data["model"] = model_id.value();
2133
auto data_str = json_data.toStyledString();
2234
cli.set_read_timeout(std::chrono::seconds(60));
2335
auto res = cli.Post("/v1/models/start", httplib::Headers(), data_str.data(),
2436
data_str.size(), "application/json");
2537
if (res) {
2638
if (res->status == httplib::StatusCode::OK_200) {
27-
CLI_LOG("Model loaded!");
39+
CLI_LOG(model_id.value() << " model started successfully. Use `"
40+
<< commands::GetCortexBinary() << " run "
41+
<< *model_id << "` for interactive chat shell");
2842
return true;
2943
} else {
3044
CTL_ERR("Model failed to load with status code: " << res->status);

engine/cli/commands/run_cmd.cc

Lines changed: 60 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,59 @@
1010

1111
namespace commands {
1212

13+
std::optional<std::string> SelectLocalModel(ModelService& model_service,
14+
const std::string& model_handle) {
15+
std::optional<std::string> model_id = model_handle;
16+
cortex::db::Models modellist_handler;
17+
18+
if (model_handle.empty()) {
19+
auto all_local_models = modellist_handler.LoadModelList();
20+
if (all_local_models.has_error() || all_local_models.value().empty()) {
21+
CLI_LOG("No local models available!");
22+
return std::nullopt;
23+
}
24+
25+
if (all_local_models.value().size() == 1) {
26+
model_id = all_local_models.value().front().model;
27+
} else {
28+
std::vector<std::string> model_id_list{};
29+
for (const auto& model : all_local_models.value()) {
30+
model_id_list.push_back(model.model);
31+
}
32+
33+
auto selection = cli_selection_utils::PrintSelection(
34+
model_id_list, "Please select an option");
35+
if (!selection.has_value()) {
36+
return std::nullopt;
37+
}
38+
model_id = selection.value();
39+
CLI_LOG("Selected: " << selection.value());
40+
}
41+
} else {
42+
auto related_models_ids = modellist_handler.FindRelatedModel(model_handle);
43+
if (related_models_ids.has_error() || related_models_ids.value().empty()) {
44+
auto result = model_service.DownloadModel(model_handle);
45+
if (result.has_error()) {
46+
CLI_LOG("Model " << model_handle << " not found!");
47+
return std::nullopt;
48+
}
49+
model_id = result.value();
50+
CTL_INF("model_id: " << model_id.value());
51+
} else if (related_models_ids.value().size() == 1) {
52+
model_id = related_models_ids.value().front();
53+
} else { // multiple models with nearly same name found
54+
auto selection = cli_selection_utils::PrintSelection(
55+
related_models_ids.value(), "Local Models: (press enter to select)");
56+
if (!selection.has_value()) {
57+
return std::nullopt;
58+
}
59+
model_id = selection.value();
60+
CLI_LOG("Selected: " << selection.value());
61+
}
62+
}
63+
return model_id;
64+
}
65+
1366
namespace {
1467
std::string Repo2Engine(const std::string& r) {
1568
if (r == kLlamaRepo) {
@@ -24,63 +77,16 @@ std::string Repo2Engine(const std::string& r) {
2477
} // namespace
2578

2679
void RunCmd::Exec(bool run_detach) {
27-
std::optional<std::string> model_id = model_handle_;
28-
80+
std::optional<std::string> model_id =
81+
SelectLocalModel(model_service_, model_handle_);
82+
if (!model_id.has_value()) {
83+
return;
84+
}
85+
2986
cortex::db::Models modellist_handler;
3087
config::YamlHandler yaml_handler;
3188
auto address = host_ + ":" + std::to_string(port_);
3289

33-
{
34-
if (model_handle_.empty()) {
35-
auto all_local_models = modellist_handler.LoadModelList();
36-
if (all_local_models.has_error() || all_local_models.value().empty()) {
37-
CLI_LOG("No local models available!");
38-
return;
39-
}
40-
41-
if (all_local_models.value().size() == 1) {
42-
model_id = all_local_models.value().front().model;
43-
} else {
44-
std::vector<std::string> model_id_list{};
45-
for (const auto& model : all_local_models.value()) {
46-
model_id_list.push_back(model.model);
47-
}
48-
49-
auto selection = cli_selection_utils::PrintSelection(
50-
model_id_list, "Please select an option");
51-
if (!selection.has_value()) {
52-
return;
53-
}
54-
model_id = selection.value();
55-
CLI_LOG("Selected: " << selection.value());
56-
}
57-
} else {
58-
auto related_models_ids =
59-
modellist_handler.FindRelatedModel(model_handle_);
60-
if (related_models_ids.has_error() ||
61-
related_models_ids.value().empty()) {
62-
auto result = model_service_.DownloadModel(model_handle_);
63-
if (result.has_error()) {
64-
CLI_LOG("Model " << model_handle_ << " not found!");
65-
return;
66-
}
67-
model_id = result.value();
68-
CTL_INF("model_id: " << model_id.value());
69-
} else if (related_models_ids.value().size() == 1) {
70-
model_id = related_models_ids.value().front();
71-
} else { // multiple models with nearly same name found
72-
auto selection = cli_selection_utils::PrintSelection(
73-
related_models_ids.value(),
74-
"Local Models: (press enter to select)");
75-
if (!selection.has_value()) {
76-
return;
77-
}
78-
model_id = selection.value();
79-
CLI_LOG("Selected: " << selection.value());
80-
}
81-
}
82-
}
83-
8490
try {
8591
namespace fs = std::filesystem;
8692
namespace fmu = file_manager_utils;
@@ -148,7 +154,7 @@ void RunCmd::Exec(bool run_detach) {
148154
// Chat
149155
if (run_detach) {
150156
CLI_LOG(*model_id << " model started successfully. Use `"
151-
<< commands::GetCortexBinary() << " chat " << *model_id
157+
<< commands::GetCortexBinary() << " run " << *model_id
152158
<< "` for interactive chat shell");
153159
} else {
154160
ChatCompletionCmd(model_service_).Exec(host_, port_, *model_id, mc, "");

engine/cli/commands/run_cmd.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,10 @@
55
#include "services/model_service.h"
66

77
namespace commands {
8+
9+
std::optional<std::string> SelectLocalModel(ModelService& model_service,
10+
const std::string& model_handle);
11+
812
class RunCmd {
913
public:
1014
explicit RunCmd(std::string host, int port, std::string model_handle,

0 commit comments

Comments
 (0)