diff --git a/engine/controllers/models.cc b/engine/controllers/models.cc index 0dac515fc..be6d45ceb 100644 --- a/engine/controllers/models.cc +++ b/engine/controllers/models.cc @@ -370,6 +370,20 @@ void Models::StartModel( params_override.cache_type = o.asString(); } + if (auto& o = (*(req->getJsonObject()))["mmproj"]; !o.isNull()) { + params_override.mmproj = o.asString(); + } + + // Support both llama_model_path and model_path for backward compatible + // model_path has higher priority + if (auto& o = (*(req->getJsonObject()))["llama_model_path"]; !o.isNull()) { + params_override.model_path = o.asString(); + } + + if (auto& o = (*(req->getJsonObject()))["model_path"]; !o.isNull()) { + params_override.model_path = o.asString(); + } + auto model_entry = model_service_->GetDownloadedModel(model_handle); if (!model_entry.has_value()) { Json::Value ret; diff --git a/engine/services/model_service.cc b/engine/services/model_service.cc index c79e3c4f1..03ea512e3 100644 --- a/engine/services/model_service.cc +++ b/engine/services/model_service.cc @@ -622,6 +622,8 @@ cpp::result ModelService::StartModel( ASSIGN_IF_PRESENT(json_data, params_override, n_parallel); ASSIGN_IF_PRESENT(json_data, params_override, ctx_len); ASSIGN_IF_PRESENT(json_data, params_override, cache_type); + ASSIGN_IF_PRESENT(json_data, params_override, mmproj); + ASSIGN_IF_PRESENT(json_data, params_override, model_path); #undef ASSIGN_IF_PRESENT; CTL_INF(json_data.toStyledString()); diff --git a/engine/services/model_service.h b/engine/services/model_service.h index 4e6101eaa..3270542c5 100644 --- a/engine/services/model_service.h +++ b/engine/services/model_service.h @@ -14,6 +14,8 @@ std::optional n_parallel; std::optional ctx_len; std::optional custom_prompt_template; std::optional cache_type; +std::optional mmproj; +std::optional model_path; }; class ModelService { public: