Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ Lightspeed Core Stack (LCS) is an AI powered assistant that provides answers to
* [Prerequisities](#prerequisities)
* [Installation](#installation)
* [Configuration](#configuration)
* [Llama Stack as separate server](#llama-stack-as-separate-server)
* [Llama Stack as client library](#llama-stack-as-client-library)
* [System prompt](#system-prompt)
* [Usage](#usage)
* [Make targets](#make-targets)
* [Running Linux container image](#running-linux-container-image)
Expand Down Expand Up @@ -100,6 +103,17 @@ user_data_collection:
transcripts_storage: "/tmp/data/transcripts"
```

## System prompt

The service uses the, so called, system prompt to put the question into context before the question is sent to the selected LLM. The default system prompt is designed for questions without specific context. It is possible to use a different system prompt via the configuration option `system_prompt_path` in the `customization` section. That option must contain the path to the text file with the actual system prompt (can contain multiple lines). An example of such configuration:

```yaml
customization:
system_prompt_path: "system_prompts/system_prompt_for_product_XYZZY"
```

Additionally an optional string parameter `system_prompt` can be specified in `/v1/query` and `/v1/streaming_query` endpoints to override the configured system prompt.



# Usage
Expand Down
21 changes: 15 additions & 6 deletions src/utils/endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,19 @@ def check_configuration_loaded(configuration: AppConfig) -> None:
)


def get_system_prompt(query_request: QueryRequest, _configuration: AppConfig) -> str:
def get_system_prompt(query_request: QueryRequest, configuration: AppConfig) -> str:
"""Get the system prompt: the provided one, configured one, or default one."""
return (
query_request.system_prompt
if query_request.system_prompt
else constants.DEFAULT_SYSTEM_PROMPT
)
# system prompt defined in query request has precendence
if query_request.system_prompt:
return query_request.system_prompt

# customized system prompt should be used when query request
# does not contain one
if (
configuration.customization is not None
and configuration.customization.system_prompt is not None
):
return configuration.customization.system_prompt

# default system prompt has the lowest precedence
return constants.DEFAULT_SYSTEM_PROMPT
169 changes: 169 additions & 0 deletions tests/unit/utils/test_endpoints.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
"""Unit tests for endpoints utility functions."""

import os
import pytest

import constants
from configuration import AppConfig

from models.requests import QueryRequest
from utils import endpoints


@pytest.fixture
def input_file(tmp_path):
"""Create file manually using the tmp_path fixture."""
filename = os.path.join(tmp_path, "prompt.txt")
with open(filename, "wt") as fout:
fout.write("this is prompt!")
return filename


def test_get_default_system_prompt():
"""Test that default system prompt is returned when other prompts are not provided."""
config_dict = {
"name": "foo",
"service": {
"host": "localhost",
"port": 8080,
"auth_enabled": False,
"workers": 1,
"color_log": True,
"access_log": True,
},
"llama_stack": {
"api_key": "xyzzy",
"url": "http://x.y.com:1234",
"use_as_library_client": False,
},
"user_data_collection": {
"feedback_disabled": True,
},
"mcp_servers": [],
"customization": None,
}

# no customization provided
cfg = AppConfig()
cfg.init_from_dict(config_dict)

# no system prompt in query request
query_request = QueryRequest(query="query", system_prompt=None)

# default system prompt needs to be returned
system_prompt = endpoints.get_system_prompt(query_request, cfg)
assert system_prompt == constants.DEFAULT_SYSTEM_PROMPT


def test_get_customized_system_prompt():
"""Test that customized system prompt is used when system prompt is not provided in query."""
config_dict = {
"name": "foo",
"service": {
"host": "localhost",
"port": 8080,
"auth_enabled": False,
"workers": 1,
"color_log": True,
"access_log": True,
},
"llama_stack": {
"api_key": "xyzzy",
"url": "http://x.y.com:1234",
"use_as_library_client": False,
},
"user_data_collection": {
"feedback_disabled": True,
},
"mcp_servers": [],
"customization": {
"system_prompt": "This is system prompt",
},
}

# no customization provided
cfg = AppConfig()
cfg.init_from_dict(config_dict)

# no system prompt in query request
query_request = QueryRequest(query="query", system_prompt=None)

# default system prompt needs to be returned
system_prompt = endpoints.get_system_prompt(query_request, cfg)
assert system_prompt == "This is system prompt"


def test_get_query_system_prompt():
"""Test that system prompt from query is returned."""
config_dict = {
"name": "foo",
"service": {
"host": "localhost",
"port": 8080,
"auth_enabled": False,
"workers": 1,
"color_log": True,
"access_log": True,
},
"llama_stack": {
"api_key": "xyzzy",
"url": "http://x.y.com:1234",
"use_as_library_client": False,
},
"user_data_collection": {
"feedback_disabled": True,
},
"mcp_servers": [],
"customization": None,
}

# no customization provided
cfg = AppConfig()
cfg.init_from_dict(config_dict)

# system prompt defined in query request
system_prompt = "System prompt defined in query"
query_request = QueryRequest(query="query", system_prompt=system_prompt)

# default system prompt needs to be returned
system_prompt = endpoints.get_system_prompt(query_request, cfg)
assert system_prompt == system_prompt


def test_get_query_system_prompt_not_customized_one():
"""Test that system prompt from query is returned even when customized one is specified."""
config_dict = {
"name": "foo",
"service": {
"host": "localhost",
"port": 8080,
"auth_enabled": False,
"workers": 1,
"color_log": True,
"access_log": True,
},
"llama_stack": {
"api_key": "xyzzy",
"url": "http://x.y.com:1234",
"use_as_library_client": False,
},
"user_data_collection": {
"feedback_disabled": True,
},
"mcp_servers": [],
"customization": {
"system_prompt": "This is system prompt",
},
}

# no customization provided
cfg = AppConfig()
cfg.init_from_dict(config_dict)

# system prompt defined in query request
system_prompt = "System prompt defined in query"
query_request = QueryRequest(query="query", system_prompt=system_prompt)

# default system prompt needs to be returned
system_prompt = endpoints.get_system_prompt(query_request, cfg)
assert system_prompt == system_prompt