From e60d7df1beca37a5909e5b066e39162c8fab889b Mon Sep 17 00:00:00 2001 From: Pavel Tisnovsky Date: Wed, 9 Jul 2025 08:27:24 +0200 Subject: [PATCH 1/3] LCORE-293: use customized system prompt if provided --- src/utils/endpoints.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/src/utils/endpoints.py b/src/utils/endpoints.py index 8d84ba2b..802ad591 100644 --- a/src/utils/endpoints.py +++ b/src/utils/endpoints.py @@ -16,10 +16,19 @@ def check_configuration_loaded(configuration: AppConfig) -> None: ) -def get_system_prompt(query_request: QueryRequest, _configuration: AppConfig) -> str: +def get_system_prompt(query_request: QueryRequest, configuration: AppConfig) -> str: """Get the system prompt: the provided one, configured one, or default one.""" - return ( - query_request.system_prompt - if query_request.system_prompt - else constants.DEFAULT_SYSTEM_PROMPT - ) + # system prompt defined in query request has precendence + if query_request.system_prompt: + return query_request.system_prompt + + # customized system prompt should be used when query request + # does not contain one + if ( + configuration.customization is not None + and configuration.customization.system_prompt is not None + ): + return configuration.customization.system_prompt + + # default system prompt has the lowest precedence + return constants.DEFAULT_SYSTEM_PROMPT From 371b985d5125a4b29c585f87d3502162466ed496 Mon Sep 17 00:00:00 2001 From: Pavel Tisnovsky Date: Wed, 9 Jul 2025 08:29:03 +0200 Subject: [PATCH 2/3] Updated unit tests accordingly --- tests/unit/utils/test_endpoints.py | 169 +++++++++++++++++++++++++++++ 1 file changed, 169 insertions(+) create mode 100644 tests/unit/utils/test_endpoints.py diff --git a/tests/unit/utils/test_endpoints.py b/tests/unit/utils/test_endpoints.py new file mode 100644 index 00000000..ca3c6967 --- /dev/null +++ b/tests/unit/utils/test_endpoints.py @@ -0,0 +1,169 @@ +"""Unit tests for endpoints utility functions.""" + +import os +import pytest + +import constants +from configuration import AppConfig + +from models.requests import QueryRequest +from utils import endpoints + + +@pytest.fixture +def input_file(tmp_path): + """Create file manually using the tmp_path fixture.""" + filename = os.path.join(tmp_path, "prompt.txt") + with open(filename, "wt") as fout: + fout.write("this is prompt!") + return filename + + +def test_get_default_system_prompt(): + """Test that default system prompt is returned when other prompts are not provided.""" + config_dict = { + "name": "foo", + "service": { + "host": "localhost", + "port": 8080, + "auth_enabled": False, + "workers": 1, + "color_log": True, + "access_log": True, + }, + "llama_stack": { + "api_key": "xyzzy", + "url": "http://x.y.com:1234", + "use_as_library_client": False, + }, + "user_data_collection": { + "feedback_disabled": True, + }, + "mcp_servers": [], + "customization": None, + } + + # no customization provided + cfg = AppConfig() + cfg.init_from_dict(config_dict) + + # no system prompt in query request + query_request = QueryRequest(query="query", system_prompt=None) + + # default system prompt needs to be returned + system_prompt = endpoints.get_system_prompt(query_request, cfg) + assert system_prompt == constants.DEFAULT_SYSTEM_PROMPT + + +def test_get_customized_system_prompt(): + """Test that customized system prompt is used when system prompt is not provided in query.""" + config_dict = { + "name": "foo", + "service": { + "host": "localhost", + "port": 8080, + "auth_enabled": False, + "workers": 1, + "color_log": True, + "access_log": True, + }, + "llama_stack": { + "api_key": "xyzzy", + "url": "http://x.y.com:1234", + "use_as_library_client": False, + }, + "user_data_collection": { + "feedback_disabled": True, + }, + "mcp_servers": [], + "customization": { + "system_prompt": "This is system prompt", + }, + } + + # no customization provided + cfg = AppConfig() + cfg.init_from_dict(config_dict) + + # no system prompt in query request + query_request = QueryRequest(query="query", system_prompt=None) + + # default system prompt needs to be returned + system_prompt = endpoints.get_system_prompt(query_request, cfg) + assert system_prompt == "This is system prompt" + + +def test_get_query_system_prompt(): + """Test that system prompt from query is returned.""" + config_dict = { + "name": "foo", + "service": { + "host": "localhost", + "port": 8080, + "auth_enabled": False, + "workers": 1, + "color_log": True, + "access_log": True, + }, + "llama_stack": { + "api_key": "xyzzy", + "url": "http://x.y.com:1234", + "use_as_library_client": False, + }, + "user_data_collection": { + "feedback_disabled": True, + }, + "mcp_servers": [], + "customization": None, + } + + # no customization provided + cfg = AppConfig() + cfg.init_from_dict(config_dict) + + # system prompt defined in query request + system_prompt = "System prompt defined in query" + query_request = QueryRequest(query="query", system_prompt=system_prompt) + + # default system prompt needs to be returned + system_prompt = endpoints.get_system_prompt(query_request, cfg) + assert system_prompt == system_prompt + + +def test_get_query_system_prompt_not_customized_one(): + """Test that system prompt from query is returned even when customized one is specified.""" + config_dict = { + "name": "foo", + "service": { + "host": "localhost", + "port": 8080, + "auth_enabled": False, + "workers": 1, + "color_log": True, + "access_log": True, + }, + "llama_stack": { + "api_key": "xyzzy", + "url": "http://x.y.com:1234", + "use_as_library_client": False, + }, + "user_data_collection": { + "feedback_disabled": True, + }, + "mcp_servers": [], + "customization": { + "system_prompt": "This is system prompt", + }, + } + + # no customization provided + cfg = AppConfig() + cfg.init_from_dict(config_dict) + + # system prompt defined in query request + system_prompt = "System prompt defined in query" + query_request = QueryRequest(query="query", system_prompt=system_prompt) + + # default system prompt needs to be returned + system_prompt = endpoints.get_system_prompt(query_request, cfg) + assert system_prompt == system_prompt From fec690348635d24c6f1f4ee235a26a834db8b442 Mon Sep 17 00:00:00 2001 From: Pavel Tisnovsky Date: Wed, 9 Jul 2025 08:30:42 +0200 Subject: [PATCH 3/3] Documentation about system prompt --- README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/README.md b/README.md index dbe65c90..70f5159e 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,9 @@ Lightspeed Core Stack (LCS) is an AI powered assistant that provides answers to * [Prerequisities](#prerequisities) * [Installation](#installation) * [Configuration](#configuration) + * [Llama Stack as separate server](#llama-stack-as-separate-server) + * [Llama Stack as client library](#llama-stack-as-client-library) + * [System prompt](#system-prompt) * [Usage](#usage) * [Make targets](#make-targets) * [Running Linux container image](#running-linux-container-image) @@ -100,6 +103,17 @@ user_data_collection: transcripts_storage: "/tmp/data/transcripts" ``` +## System prompt + + The service uses the, so called, system prompt to put the question into context before the question is sent to the selected LLM. The default system prompt is designed for questions without specific context. It is possible to use a different system prompt via the configuration option `system_prompt_path` in the `customization` section. That option must contain the path to the text file with the actual system prompt (can contain multiple lines). An example of such configuration: + +```yaml +customization: + system_prompt_path: "system_prompts/system_prompt_for_product_XYZZY" +``` + +Additionally an optional string parameter `system_prompt` can be specified in `/v1/query` and `/v1/streaming_query` endpoints to override the configured system prompt. + # Usage