Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit facee2e

Browse files
vansangpfievsangjanai
authored andcommitted
chore: cleanup
1 parent ca35c1f commit facee2e

File tree

7 files changed

+16
-78
lines changed

7 files changed

+16
-78
lines changed

engine/controllers/engines.cc

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -289,11 +289,6 @@ void Engines::InstallRemoteEngine(
289289
resp->setStatusCode(k400BadRequest);
290290
callback(resp);
291291
} else {
292-
// auto gr = engine_service_->GenerateRemoteModel(engine);
293-
// if (gr.has_error()) {
294-
// CTL_INF("Error: " << gr.error());
295-
// }
296-
297292
Json::Value res;
298293
if (get_models_url.empty()) {
299294
res["warning"] =

engine/extensions/remote-engine/remote_engine.cc

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@ bool is_anthropic(const std::string& model) {
1717
}
1818

1919
bool is_openai(const std::string& model) {
20-
return model.find("gpt") != std::string::npos;
20+
return model.find("gpt-") != std::string::npos ||
21+
model.find("o1-") != std::string::npos;
2122
}
2223

2324
constexpr const std::array<std::string_view, 5> kAnthropicModels = {
@@ -181,7 +182,7 @@ static size_t WriteCallback(char* ptr, size_t size, size_t nmemb,
181182
}
182183

183184
RemoteEngine::RemoteEngine(const std::string& engine_name)
184-
: engine_name_(engine_name) {
185+
: engine_name_(engine_name), q_(1 /*n_parallel*/, engine_name) {
185186
curl_global_init(CURL_GLOBAL_ALL);
186187
}
187188

@@ -552,7 +553,9 @@ void RemoteEngine::HandleChatCompletion(
552553
}
553554

554555
if (is_stream) {
555-
MakeStreamingChatCompletionRequest(*model_config, result, callback);
556+
q_.runTaskInQueue([this, model_config, result, cb = std::move(callback)] {
557+
MakeStreamingChatCompletionRequest(*model_config, result, cb);
558+
});
556559
} else {
557560

558561
auto response = MakeChatCompletionRequest(*model_config, result);

engine/extensions/remote-engine/remote_engine.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
#include <unordered_map>
1010
#include "cortex-common/remote_enginei.h"
1111
#include "extensions/remote-engine/template_renderer.h"
12+
#include "trantor/utils/ConcurrentTaskQueue.h"
1213
#include "utils/engine_constants.h"
1314
#include "utils/file_logger.h"
1415
// Helper for CURL response
@@ -52,6 +53,7 @@ class RemoteEngine : public RemoteEngineI {
5253
std::string chat_res_template_;
5354
std::string api_key_header_;
5455
std::string engine_name_;
56+
trantor::ConcurrentTaskQueue q_;
5557

5658
// Helper functions
5759
CurlResponse MakeChatCompletionRequest(const ModelConfig& config,

engine/services/engine_service.cc

Lines changed: 0 additions & 65 deletions
Original file line numberDiff line numberDiff line change
@@ -1140,71 +1140,6 @@ bool EngineService::IsRemoteEngine(const std::string& engine_name) {
11401140
return true;
11411141
}
11421142

1143-
cpp::result<bool, std::string> EngineService::GenerateRemoteModel(
1144-
const std::string& engine_name) {
1145-
namespace fmu = file_manager_utils;
1146-
namespace fs = std::filesystem;
1147-
auto exist_engine = GetEngineByNameAndVariant(engine_name);
1148-
if (exist_engine.has_error()) {
1149-
return cpp::fail("Remote engine '" + engine_name + "' is not installed");
1150-
}
1151-
1152-
if (!IsEngineLoaded(engine_name)) {
1153-
engines_[engine_name].engine = new remote_engine::RemoteEngine(engine_name);
1154-
CTL_INF("Loaded engine: " << engine_name);
1155-
}
1156-
1157-
auto remote_engine_json = exist_engine.value().ToJson();
1158-
auto& e = std::get<RemoteEngineI*>(engines_[engine_name].engine);
1159-
auto url = remote_engine_json["metadata"]["get_models_url"].asString();
1160-
auto api_key = remote_engine_json["api_key"].asString();
1161-
auto api_key_template =
1162-
remote_engine_json["metadata"]["api_key_template"].asString();
1163-
auto res = e->GetRemoteModels(url, api_key, api_key_template);
1164-
if (!res["error"].isNull()) {
1165-
return cpp::fail(res["error"].asString());
1166-
} else {
1167-
for (auto& d : res["data"]) {
1168-
auto model_handle = d["id"].asString();
1169-
config::RemoteModelConfig model_config;
1170-
Json::Value body =
1171-
json_helper::ParseJsonString(config::kDefaultRemoteModelConfig);
1172-
body["model"] = model_handle;
1173-
body["engine"] = engine_name;
1174-
// CTL_INF(body.toStyledString());
1175-
model_config.LoadFromJson(body);
1176-
cortex::db::Models modellist_utils_obj;
1177-
1178-
std::string model_yaml_path =
1179-
(file_manager_utils::GetModelsContainerPath() /
1180-
std::filesystem::path("remote") /
1181-
std::filesystem::path(model_handle + ".yml"))
1182-
.string();
1183-
try {
1184-
auto yaml_rel_path =
1185-
fmu::ToRelativeCortexDataPath(fs::path(model_yaml_path));
1186-
cortex::db::ModelEntry model_entry{
1187-
model_handle, "", "", yaml_rel_path.string(),
1188-
model_handle, "remote", "imported", cortex::db::ModelStatus::Remote,
1189-
engine_name};
1190-
std::filesystem::create_directories(
1191-
std::filesystem::path(model_yaml_path).parent_path());
1192-
if (modellist_utils_obj.AddModelEntry(model_entry).value()) {
1193-
model_config.SaveToYamlFile(model_yaml_path);
1194-
} else {
1195-
CTL_INF("Fail to import model, model_id '" + model_handle +
1196-
"' already exists!");
1197-
}
1198-
} catch (const std::exception& e) {
1199-
return cpp::fail("Error while adding Remote model with model_id '" +
1200-
model_handle + "': " + e.what());
1201-
}
1202-
}
1203-
}
1204-
1205-
return true;
1206-
}
1207-
12081143
cpp::result<std::vector<std::string>, std::string>
12091144
EngineService::GetSupportedEngineNames() {
12101145
return file_manager_utils::GetCortexConfig().supportedEngines;

engine/services/engine_service.h

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -152,9 +152,6 @@ class EngineService : public EngineServiceI {
152152

153153
bool IsRemoteEngine(const std::string& engine_name) override;
154154

155-
cpp::result<bool, std::string> GenerateRemoteModel(
156-
const std::string& engine_name);
157-
158155
private:
159156
bool IsEngineLoaded(const std::string& engine);
160157

engine/services/model_service.cc

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1253,5 +1253,8 @@ ModelService::GetModelMetadata(const std::string& model_id) const {
12531253

12541254
std::shared_ptr<ModelMetadata> ModelService::GetCachedModelMetadata(
12551255
const std::string& model_id) const {
1256+
if (loaded_model_metadata_map_.find(model_id) ==
1257+
loaded_model_metadata_map_.end())
1258+
return nullptr;
12561259
return loaded_model_metadata_map_.at(model_id);
12571260
}

engine/test/components/test_remote_engine.cc

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,19 +25,22 @@ TEST_F(RemoteEngineTest, OpenAiToAnthropicRequest) {
2525
{% endfor %}
2626
]
2727
{% endif %}
28+
{% if not loop.is_last %},{% endif %}
2829
{% else if key == "system" or key == "model" or key == "temperature" or key == "store" or key == "max_tokens" or key == "stream" or key == "presence_penalty" or key == "metadata" or key == "frequency_penalty" or key == "tools" or key == "tool_choice" or key == "logprobs" or key == "top_logprobs" or key == "logit_bias" or key == "n" or key == "modalities" or key == "prediction" or key == "response_format" or key == "service_tier" or key == "seed" or key == "stop" or key == "stream_options" or key == "top_p" or key == "parallel_tool_calls" or key == "user" %}
2930
"{{ key }}": {{ tojson(value) }}
31+
{% if not loop.is_last %},{% endif %}
3032
{% endif %}
31-
{% if not loop.is_last %},{% endif %}
3233
{% endfor %} })";
3334
{
3435
std::string message_with_system = R"({
36+
"engine" : "anthropic",
37+
"max_tokens" : 1024,
3538
"messages": [
3639
{"role": "system", "content": "You are a seasoned data scientist at a Fortune 500 company."},
3740
{"role": "user", "content": "Hello, world"}
3841
],
3942
"model": "claude-3-5-sonnet-20241022",
40-
"max_tokens": 1024,
43+
"stream" : true
4144
})";
4245

4346
auto data = json_helper::ParseJsonString(message_with_system);

0 commit comments

Comments
 (0)