Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions src/models/config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Model with service configuration."""

from pathlib import Path
from typing import Optional

from pydantic import BaseModel, model_validator, FilePath, AnyHttpUrl, PositiveInt
Expand Down Expand Up @@ -82,6 +83,10 @@ def check_llama_stack_model(self) -> Self:
raise ValueError(
"LLama stack library client mode is enabled but a configuration file path is not specified" # noqa: C0301
)
# the configuration file must exists and be regular readable file
checks.file_check(
Path(self.library_client_config_path), "Llama Stack configuration file"
)
return self


Expand Down
125 changes: 125 additions & 0 deletions tests/configuration/run.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
version: '2'
image_name: minimal-viable-llama-stack-configuration

apis:
- agents
- datasetio
- eval
- inference
- post_training
- safety
- scoring
- telemetry
- tool_runtime
- vector_io
benchmarks: []
container_image: null
datasets: []
external_providers_dir: null
inference_store:
db_path: .llama/distributions/ollama/inference_store.db
type: sqlite
logging: null
metadata_store:
db_path: .llama/distributions/ollama/registry.db
namespace: null
type: sqlite
providers:
agents:
- config:
persistence_store:
db_path: .llama/distributions/ollama/agents_store.db
namespace: null
type: sqlite
responses_store:
db_path: .llama/distributions/ollama/responses_store.db
type: sqlite
provider_id: meta-reference
provider_type: inline::meta-reference
datasetio:
- config:
kvstore:
db_path: .llama/distributions/ollama/huggingface_datasetio.db
namespace: null
type: sqlite
provider_id: huggingface
provider_type: remote::huggingface
- config:
kvstore:
db_path: .llama/distributions/ollama/localfs_datasetio.db
namespace: null
type: sqlite
provider_id: localfs
provider_type: inline::localfs
eval:
- config:
kvstore:
db_path: .llama/distributions/ollama/meta_reference_eval.db
namespace: null
type: sqlite
provider_id: meta-reference
provider_type: inline::meta-reference
inference:
- provider_id: openai
provider_type: remote::openai
config:
api_key: ${env.OPENAI_API_KEY}
post_training:
- config:
checkpoint_format: huggingface
device: cpu
distributed_backend: null
provider_id: huggingface
provider_type: inline::huggingface
safety:
- config:
excluded_categories: []
provider_id: llama-guard
provider_type: inline::llama-guard
scoring:
- config: {}
provider_id: basic
provider_type: inline::basic
- config: {}
provider_id: llm-as-judge
provider_type: inline::llm-as-judge
- config:
openai_api_key: '********'
provider_id: braintrust
provider_type: inline::braintrust
telemetry:
- config:
service_name: ''
sinks: sqlite
sqlite_db_path: .llama/distributions/ollama/trace_store.db
provider_id: meta-reference
provider_type: inline::meta-reference
tool_runtime:
- provider_id: model-context-protocol
provider_type: remote::model-context-protocol
config: {}
vector_io:
- config:
kvstore:
db_path: .llama/distributions/ollama/faiss_store.db
namespace: null
type: sqlite
provider_id: faiss
provider_type: inline::faiss
scoring_fns: []
server:
auth: null
host: null
port: 8321
quota: null
tls_cafile: null
tls_certfile: null
tls_keyfile: null
shields: []
vector_dbs: []

models:
- model_id: gpt-4-turbo
provider_id: openai
model_type: llm
provider_model_id: gpt-4-turbo
41 changes: 31 additions & 10 deletions tests/unit/models/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
DataCollectorConfiguration,
)

from utils.checks import InvalidConfigurationError


def test_service_configuration_constructor() -> None:
"""Test the ServiceConfiguration constructor."""
Expand Down Expand Up @@ -58,7 +60,8 @@ def test_service_configuration_workers_value() -> None:
def test_llama_stack_configuration_constructor() -> None:
"""Test the LLamaStackConfiguration constructor."""
llama_stack_configuration = LlamaStackConfiguration(
use_as_library_client=True, library_client_config_path="foo"
use_as_library_client=True,
library_client_config_path="tests/configuration/run.yaml",
)
assert llama_stack_configuration is not None

Expand All @@ -76,6 +79,18 @@ def test_llama_stack_configuration_constructor() -> None:
assert llama_stack_configuration is not None


def test_llama_stack_configuration_no_run_yaml() -> None:
"""Test the LLamaStackConfiguration constructor when run.yaml file is not a file."""
with pytest.raises(
InvalidConfigurationError,
match="Llama Stack configuration file 'not a file' is not a file",
):
LlamaStackConfiguration(
use_as_library_client=True,
library_client_config_path="not a file",
)


def test_llama_stack_wrong_configuration_constructor_no_url() -> None:
"""Test the LLamaStackConfiguration constructor."""
with pytest.raises(
Expand Down Expand Up @@ -298,7 +313,8 @@ def test_configuration_empty_mcp_servers() -> None:
name="test_name",
service=ServiceConfiguration(),
llama_stack=LlamaStackConfiguration(
use_as_library_client=True, library_client_config_path="foo"
use_as_library_client=True,
library_client_config_path="tests/configuration/run.yaml",
),
user_data_collection=UserDataCollection(
feedback_disabled=True, feedback_storage=None
Expand All @@ -319,7 +335,8 @@ def test_configuration_single_mcp_server() -> None:
name="test_name",
service=ServiceConfiguration(),
llama_stack=LlamaStackConfiguration(
use_as_library_client=True, library_client_config_path="foo"
use_as_library_client=True,
library_client_config_path="tests/configuration/run.yaml",
),
user_data_collection=UserDataCollection(
feedback_disabled=True, feedback_storage=None
Expand All @@ -346,7 +363,8 @@ def test_configuration_multiple_mcp_servers() -> None:
name="test_name",
service=ServiceConfiguration(),
llama_stack=LlamaStackConfiguration(
use_as_library_client=True, library_client_config_path="foo"
use_as_library_client=True,
library_client_config_path="tests/configuration/run.yaml",
),
user_data_collection=UserDataCollection(
feedback_disabled=True, feedback_storage=None
Expand All @@ -368,7 +386,8 @@ def test_dump_configuration(tmp_path) -> None:
name="test_name",
service=ServiceConfiguration(),
llama_stack=LlamaStackConfiguration(
use_as_library_client=True, library_client_config_path="foo"
use_as_library_client=True,
library_client_config_path="tests/configuration/run.yaml",
),
user_data_collection=UserDataCollection(
feedback_disabled=True, feedback_storage=None
Expand Down Expand Up @@ -413,7 +432,7 @@ def test_dump_configuration(tmp_path) -> None:
"url": None,
"api_key": None,
"use_as_library_client": True,
"library_client_config_path": "foo",
"library_client_config_path": "tests/configuration/run.yaml",
},
"user_data_collection": {
"feedback_disabled": True,
Expand Down Expand Up @@ -450,7 +469,8 @@ def test_dump_configuration_with_one_mcp_server(tmp_path) -> None:
name="test_name",
service=ServiceConfiguration(),
llama_stack=LlamaStackConfiguration(
use_as_library_client=True, library_client_config_path="foo"
use_as_library_client=True,
library_client_config_path="tests/configuration/run.yaml",
),
user_data_collection=UserDataCollection(
feedback_disabled=True, feedback_storage=None
Expand Down Expand Up @@ -490,7 +510,7 @@ def test_dump_configuration_with_one_mcp_server(tmp_path) -> None:
"url": None,
"api_key": None,
"use_as_library_client": True,
"library_client_config_path": "foo",
"library_client_config_path": "tests/configuration/run.yaml",
},
"user_data_collection": {
"feedback_disabled": True,
Expand Down Expand Up @@ -535,7 +555,8 @@ def test_dump_configuration_with_more_mcp_servers(tmp_path) -> None:
name="test_name",
service=ServiceConfiguration(),
llama_stack=LlamaStackConfiguration(
use_as_library_client=True, library_client_config_path="foo"
use_as_library_client=True,
library_client_config_path="tests/configuration/run.yaml",
),
user_data_collection=UserDataCollection(
feedback_disabled=True, feedback_storage=None
Expand Down Expand Up @@ -581,7 +602,7 @@ def test_dump_configuration_with_more_mcp_servers(tmp_path) -> None:
"url": None,
"api_key": None,
"use_as_library_client": True,
"library_client_config_path": "foo",
"library_client_config_path": "tests/configuration/run.yaml",
},
"user_data_collection": {
"feedback_disabled": True,
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/utils/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ async def test_register_mcp_servers_async_with_library_client(mocker):
service=ServiceConfiguration(),
llama_stack=LlamaStackConfiguration(
use_as_library_client=True,
library_client_config_path="/path/to/config.yaml",
library_client_config_path="tests/configuration/run.yaml",
),
user_data_collection=UserDataCollection(feedback_disabled=True),
mcp_servers=[mcp_server],
Expand Down