Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 81 additions & 0 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
"""Shared fixtures for integration tests."""

from pathlib import Path

import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker

from configuration import configuration
from models.database.base import Base


@pytest.fixture(autouse=True)
def reset_configuration_state():
"""Reset configuration state before each integration test.

This autouse fixture ensures test independence by resetting the
singleton configuration state before each test runs. This allows
tests to verify both loaded and unloaded configuration states
regardless of execution order.
"""
# pylint: disable=protected-access
configuration._configuration = None
yield


@pytest.fixture(name="test_config", scope="function")
def test_config_fixture():
"""Load real configuration for integration tests.

This fixture loads the actual configuration file used in testing,
demonstrating integration with the configuration system.
"""
config_path = (
Path(__file__).parent.parent / "configuration" / "lightspeed-stack.yaml"
)
assert config_path.exists(), f"Config file not found: {config_path}"

# Load configuration
configuration.load_configuration(str(config_path))

yield configuration
# Note: Cleanup is handled by the autouse reset_configuration_state fixture


@pytest.fixture(name="test_db_engine", scope="function")
def test_db_engine_fixture():
"""Create an in-memory SQLite database engine for testing.

This provides a real database (not mocked) for integration tests.
Each test gets a fresh database.
"""
# Create in-memory SQLite database
engine = create_engine(
"sqlite:///:memory:",
echo=False, # Set to True to see SQL queries
connect_args={"check_same_thread": False}, # Allow multi-threaded access
)

# Create all tables
Base.metadata.create_all(engine)

yield engine

# Cleanup
Base.metadata.drop_all(engine)
engine.dispose()


@pytest.fixture(name="test_db_session", scope="function")
def test_db_session_fixture(test_db_engine):
"""Create a database session for testing.

Provides a real database session connected to the in-memory test database.
"""
session_local = sessionmaker(autocommit=False, autoflush=False, bind=test_db_engine)
session = session_local()

yield session

session.close()
1 change: 1 addition & 0 deletions tests/integration/endpoints/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Integration tests for API endpoints."""
149 changes: 149 additions & 0 deletions tests/integration/endpoints/test_info_integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
"""Integration tests for the /info endpoint."""

import pytest
from fastapi import HTTPException, Request, status
from llama_stack_client import APIConnectionError
from llama_stack_client.types import VersionInfo

from app.endpoints.info import info_endpoint_handler
from authentication.noop import NoopAuthDependency
from version import __version__


@pytest.fixture(name="mock_llama_stack_client")
def mock_llama_stack_client_fixture(mocker):
"""Mock only the external Llama Stack client.

This is the only external dependency we mock for integration tests,
as it represents an external service call.
"""
mock_holder_class = mocker.patch("app.endpoints.info.AsyncLlamaStackClientHolder")

mock_client = mocker.AsyncMock()
# Mock the version endpoint to return a known version
mock_client.inspect.version.return_value = VersionInfo(version="0.2.22")

# Create a mock holder instance
mock_holder_instance = mock_holder_class.return_value
mock_holder_instance.get_client.return_value = mock_client

yield mock_client


@pytest.fixture(name="test_request")
def test_request_fixture():
"""Create a test FastAPI Request object with proper scope."""
return Request(
scope={
"type": "http",
"query_string": b"",
"headers": [],
}
)


@pytest.fixture(name="test_auth")
async def test_auth_fixture(test_request):
"""Create authentication using real noop auth module.

This uses the actual NoopAuthDependency instead of mocking,
making this a true integration test.
"""
noop_auth = NoopAuthDependency()
return await noop_auth(test_request)


@pytest.mark.asyncio
async def test_info_endpoint_returns_service_information(
test_config, mock_llama_stack_client, test_request, test_auth
):
"""Test that info endpoint returns correct service information.

This integration test verifies:
- Endpoint handler integrates with configuration system
- Configuration values are correctly accessed
- Llama Stack client is properly called
- Real noop authentication is used
- Response structure matches expected format

Args:
test_config: Loads real configuration (required for endpoint to access config)
mock_llama_stack_client: Mocked Llama Stack client
test_request: FastAPI request
test_auth: noop authentication tuple
"""
# Fixtures with side effects (needed but not directly used)
_ = test_config

response = await info_endpoint_handler(auth=test_auth, request=test_request)

# Verify values from real configuration
assert response.name == "foo bar baz" # From lightspeed-stack.yaml
assert response.service_version == __version__
assert response.llama_stack_version == "0.2.22"

# Verify the Llama Stack client was called
mock_llama_stack_client.inspect.version.assert_called_once()


@pytest.mark.asyncio
async def test_info_endpoint_handles_connection_error(
test_config, mock_llama_stack_client, test_request, test_auth, mocker
):
"""Test that info endpoint properly handles Llama Stack connection errors.

This integration test verifies:
- Error handling when external service is unavailable
- HTTPException is raised with correct status code
- Error response includes proper error details

Args:
test_config: Loads real configuration (required for endpoint to access config)
mock_llama_stack_client: Mocked Llama Stack client
test_request: FastAPI request
test_auth: noop authentication tuple
mocker: pytest-mock fixture for creating mocks
"""
# test_config fixture loads configuration, which is required for the endpoint
_ = test_config
# Configure mock to raise connection error
mock_llama_stack_client.inspect.version.side_effect = APIConnectionError(
request=mocker.Mock()
)

# Verify that HTTPException is raised
with pytest.raises(HTTPException) as exc_info:
await info_endpoint_handler(auth=test_auth, request=test_request)

# Verify error details
assert exc_info.value.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR
assert isinstance(exc_info.value.detail, dict)
assert exc_info.value.detail["response"] == "Unable to connect to Llama Stack"
assert "cause" in exc_info.value.detail


@pytest.mark.asyncio
async def test_info_endpoint_uses_configuration_values(
test_config, mock_llama_stack_client, test_request, test_auth
):
"""Test that info endpoint correctly uses configuration values.

This integration test verifies:
- Configuration is properly loaded and accessible
- Endpoint reads configuration values correctly
- Service name from config appears in response

Args:
test_config: Loads real configuration (required for endpoint to access config)
mock_llama_stack_client: Mocked Llama Stack client
test_request: Real FastAPI request
test_auth: Real noop authentication tuple
"""
# Fixtures with side effects (needed but not directly used)
_ = mock_llama_stack_client

response = await info_endpoint_handler(auth=test_auth, request=test_request)

# Verify service name comes from configuration
assert response.name == test_config.configuration.name
assert response.name == "foo bar baz"
Loading