From 5a5b2534d5655aa5d8e3658277ff9d1263afe557 Mon Sep 17 00:00:00 2001 From: aminediro Date: Wed, 10 Jul 2024 18:31:31 +0200 Subject: [PATCH] fix llm model name --- backend/core/quivr_core/llm/__init__.py | 2 +- backend/core/tests/test_llm_endpoint.py | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/backend/core/quivr_core/llm/__init__.py b/backend/core/quivr_core/llm/__init__.py index 4bc4fabc167..0db817c4d33 100644 --- a/backend/core/quivr_core/llm/__init__.py +++ b/backend/core/quivr_core/llm/__init__.py @@ -22,7 +22,7 @@ def from_config(cls, config: LLMEndpointConfig = LLMEndpointConfig()): from langchain_openai import ChatOpenAI _llm = ChatOpenAI( - name=config.model, + model=config.model, api_key=SecretStr(config.llm_api_key) if config.llm_api_key else None, base_url=config.llm_base_url, ) diff --git a/backend/core/tests/test_llm_endpoint.py b/backend/core/tests/test_llm_endpoint.py index e20c0f8eb3c..0d0d4d7d7fb 100644 --- a/backend/core/tests/test_llm_endpoint.py +++ b/backend/core/tests/test_llm_endpoint.py @@ -24,6 +24,17 @@ def test_llm_endpoint_from_config_default(): assert llm._llm.model_name in llm.get_config().model +def test_llm_endpoint_from_config(): + config = LLMEndpointConfig( + model="llama2", llm_api_key="test", llm_base_url="http://localhost:8441" + ) + llm = LLMEndpoint.from_config(config) + + assert not llm.supports_func_calling() + assert isinstance(llm._llm, ChatOpenAI) + assert llm._llm.model_name in llm.get_config().model + + def test_llm_endpoint_constructor(): llm_endpoint = FakeListChatModel(responses=[]) llm_endpoint = LLMEndpoint(