diff --git a/src/app/endpoints/query.py b/src/app/endpoints/query.py index c089d910..85bd986e 100644 --- a/src/app/endpoints/query.py +++ b/src/app/endpoints/query.py @@ -177,7 +177,7 @@ async def query_endpoint_handler( # log Llama Stack configuration, but without sensitive information llama_stack_config = configuration.llama_stack_configuration.model_copy() llama_stack_config.api_key = "********" - logger.info("LLama stack config: %s", llama_stack_config) + logger.info("Llama stack config: %s", llama_stack_config) user_id, _, token = auth diff --git a/src/app/endpoints/streaming_query.py b/src/app/endpoints/streaming_query.py index 1d1a4286..0ae4f45f 100644 --- a/src/app/endpoints/streaming_query.py +++ b/src/app/endpoints/streaming_query.py @@ -134,7 +134,7 @@ def stream_end_event(metadata_map: dict) -> str: def stream_build_event(chunk: Any, chunk_id: int, metadata_map: dict) -> Iterator[str]: """Build a streaming event from a chunk response. - This function processes chunks from the LLama Stack streaming response and formats + This function processes chunks from the Llama Stack streaming response and formats them into Server-Sent Events (SSE) format for the client. It handles two main event types: @@ -142,7 +142,7 @@ def stream_build_event(chunk: Any, chunk_id: int, metadata_map: dict) -> Iterato 2. step_complete: Contains information about completed tool execution steps Args: - chunk: The streaming chunk from LLama Stack containing event data + chunk: The streaming chunk from Llama Stack containing event data chunk_id: The current chunk ID counter (gets incremented for each token) Returns: @@ -544,7 +544,7 @@ async def streaming_query_endpoint_handler( # pylint: disable=too-many-locals # log Llama Stack configuration, but without sensitive information llama_stack_config = configuration.llama_stack_configuration.model_copy() llama_stack_config.api_key = "********" - logger.info("LLama stack config: %s", llama_stack_config) + logger.info("Llama stack config: %s", llama_stack_config) user_id, _user_name, token = auth diff --git a/src/client.py b/src/client.py index c44c774c..770869b4 100644 --- a/src/client.py +++ b/src/client.py @@ -1,4 +1,4 @@ -"""LLama Stack client retrieval class.""" +"""Llama Stack client retrieval class.""" import logging diff --git a/src/models/config.py b/src/models/config.py index d9e280f4..0516c661 100644 --- a/src/models/config.py +++ b/src/models/config.py @@ -179,11 +179,11 @@ def check_llama_stack_model(self) -> Self: if self.url is None: if self.use_as_library_client is None: raise ValueError( - "LLama stack URL is not specified and library client mode is not specified" + "Llama stack URL is not specified and library client mode is not specified" ) if self.use_as_library_client is False: raise ValueError( - "LLama stack URL is not specified and library client mode is not enabled" + "Llama stack URL is not specified and library client mode is not enabled" ) if self.use_as_library_client is None: self.use_as_library_client = False @@ -191,7 +191,7 @@ def check_llama_stack_model(self) -> Self: if self.library_client_config_path is None: # pylint: disable=line-too-long raise ValueError( - "LLama stack library client mode is enabled but a configuration file path is not specified" # noqa: E501 + "Llama stack library client mode is enabled but a configuration file path is not specified" # noqa: E501 ) # the configuration file must exists and be regular readable file checks.file_check( diff --git a/tests/unit/app/endpoints/test_models.py b/tests/unit/app/endpoints/test_models.py index a00c9b37..a825d288 100644 --- a/tests/unit/app/endpoints/test_models.py +++ b/tests/unit/app/endpoints/test_models.py @@ -84,7 +84,7 @@ async def test_models_endpoint_handler_improper_llama_stack_configuration(mocker with pytest.raises(HTTPException) as e: await models_endpoint_handler(request=request, auth=auth) assert e.value.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR - assert e.detail["response"] == "LLama stack is not configured" + assert e.detail["response"] == "Llama stack is not configured" @pytest.mark.asyncio diff --git a/tests/unit/app/endpoints/test_streaming_query.py b/tests/unit/app/endpoints/test_streaming_query.py index 8953ca21..794e5c18 100644 --- a/tests/unit/app/endpoints/test_streaming_query.py +++ b/tests/unit/app/endpoints/test_streaming_query.py @@ -182,7 +182,7 @@ async def _test_streaming_query_endpoint_handler(mocker, store_transcript=False) mocker.Mock(identifier="model2", model_type="llm", provider_id="provider2"), ] - # Construct the streaming response from LLama Stack. + # Construct the streaming response from Llama Stack. # We cannot use 'mock' as 'hasattr(mock, "xxx")' adds the missing # attribute and therefore makes checks to see whether it is missing fail. mock_streaming_response = mocker.AsyncMock() diff --git a/tests/unit/models/test_config.py b/tests/unit/models/test_config.py index ba44cf9c..78944726 100644 --- a/tests/unit/models/test_config.py +++ b/tests/unit/models/test_config.py @@ -114,23 +114,23 @@ def test_llama_stack_wrong_configuration_constructor_no_url() -> None: """ with pytest.raises( ValueError, - match="LLama stack URL is not specified and library client mode is not specified", + match="Llama stack URL is not specified and library client mode is not specified", ): LlamaStackConfiguration() def test_llama_stack_wrong_configuration_constructor_library_mode_off() -> None: - """Test the LLamaStackConfiguration constructor.""" + """Test the LlamaStackConfiguration constructor.""" with pytest.raises( ValueError, - match="LLama stack URL is not specified and library client mode is not enabled", + match="Llama stack URL is not specified and library client mode is not enabled", ): LlamaStackConfiguration(use_as_library_client=False) def test_llama_stack_wrong_configuration_no_config_file() -> None: - """Test the LLamaStackConfiguration constructor.""" - m = "LLama stack library client mode is enabled but a configuration file path is not specified" + """Test the LlamaStackConfiguration constructor.""" + m = "Llama stack library client mode is enabled but a configuration file path is not specified" with pytest.raises(ValueError, match=m): LlamaStackConfiguration(use_as_library_client=True)