From e4696763e27db7bc220c58d6f1fe98d5b38f8056 Mon Sep 17 00:00:00 2001
From: bnbong
Date: Wed, 17 Sep 2025 14:24:45 +0900
Subject: [PATCH] [DOCS, FIX] add fastkit startdemo template use-cases
tutorial, add more specific clean up step at inspector
---
README.md | 3 +
docs/index.md | 25 +
docs/tutorial/async-crud-api.md | 665 +++++++
docs/tutorial/basic-api-server.md | 398 ++++
docs/tutorial/custom-response-handling.md | 1393 +++++++++++++
docs/tutorial/database-integration.md | 1027 ++++++++++
docs/tutorial/docker-deployment.md | 1177 +++++++++++
docs/tutorial/mcp-integration.md | 1730 +++++++++++++++++
mkdocs.yml | 6 +
src/fastapi_fastkit/backend/inspector.py | 63 +-
.../backend/temp_tmpfa8m90nv/requirements.txt | 1 +
.../backend/temp_tmpj91znt98/requirements.txt | 1 +
.../backend/temp_tmpqnqxgjef/requirements.txt | 1 +
tests/test_backends/test_inspector.py | 219 ++-
14 files changed, 6702 insertions(+), 7 deletions(-)
create mode 100644 docs/tutorial/async-crud-api.md
create mode 100644 docs/tutorial/basic-api-server.md
create mode 100644 docs/tutorial/custom-response-handling.md
create mode 100644 docs/tutorial/database-integration.md
create mode 100644 docs/tutorial/docker-deployment.md
create mode 100644 docs/tutorial/mcp-integration.md
create mode 100644 src/fastapi_fastkit/backend/temp_tmpfa8m90nv/requirements.txt
create mode 100644 src/fastapi_fastkit/backend/temp_tmpj91znt98/requirements.txt
create mode 100644 src/fastapi_fastkit/backend/temp_tmpqnqxgjef/requirements.txt
diff --git a/README.md b/README.md
index e285a8b..77178dc 100644
--- a/README.md
+++ b/README.md
@@ -11,6 +11,9 @@
+
+
+
---
diff --git a/docs/index.md b/docs/index.md
index 335bc08..26433ff 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -292,6 +292,31 @@ For comprehensive guides and detailed usage instructions, explore our documentat
- 📖 **[CLI Reference](user-guide/cli-reference.md)** - Complete command reference
- 🔍 **[Template Quality Assurance](reference/template-quality-assurance.md)** - Automated testing and quality standards
+## 🚀 Template-based Tutorials
+
+Learn FastAPI development through practical use cases with our pre-built templates:
+
+### 📖 Core Tutorials
+
+- **[Building a Basic API Server](tutorial/basic-api-server.md)** - Create your first FastAPI server using the `fastapi-default` template
+- **[Building an Asynchronous CRUD API](tutorial/async-crud-api.md)** - Develop a high-performance async API with the `fastapi-async-crud` template
+
+### 🗄️ Database & Infrastructure
+
+- **[Integrating with a Database](tutorial/database-integration.md)** - Utilize PostgreSQL + SQLAlchemy with the `fastapi-psql-orm` template
+- **[Dockerizing and Deploying](tutorial/docker-deployment.md)** - Set up a production deployment environment using the `fastapi-dockerized` template
+
+### ⚡ Advanced Features
+
+- **[Custom Response Handling & Advanced API Design](tutorial/custom-response-handling.md)** - Build enterprise-grade APIs with the `fastapi-custom-response` template
+- **[Integrating with MCP](tutorial/mcp-integration.md)** - Create an API server integrated with AI models using the `fastapi-mcp` template
+
+Each tutorial provides:
+- ✅ **Practical Examples** - Code you can use directly in real projects
+- ✅ **Step-by-Step Guides** - Detailed explanations for beginners to follow easily
+- ✅ **Best Practices** - Industry-standard patterns and security considerations
+- ✅ **Extension Methods** - Guidance for taking your project to the next level
+
## Contributing
We welcome contributions from the community! FastAPI-fastkit is designed to help newcomers to Python and FastAPI, and your contributions can make a significant impact.
diff --git a/docs/tutorial/async-crud-api.md b/docs/tutorial/async-crud-api.md
new file mode 100644
index 0000000..de431bc
--- /dev/null
+++ b/docs/tutorial/async-crud-api.md
@@ -0,0 +1,665 @@
+# Building Asynchronous CRUD APIs
+
+Learn how to build high-performance CRUD APIs using FastAPI's asynchronous processing capabilities. In this tutorial, we'll implement asynchronous file I/O and efficient data processing using the `fastapi-async-crud` template.
+
+## What You'll Learn in This Tutorial
+
+- Understanding asynchronous FastAPI applications
+- Asynchronous CRUD operations using `async/await` syntax
+- Asynchronous file processing with aiofiles
+- Writing and executing asynchronous tests
+- Performance optimization techniques
+
+## Prerequisites
+
+- Completed the [Basic API Server Tutorial](basic-api-server.md)
+- Understanding of Python's `async/await` basic concepts
+- FastAPI-fastkit installed
+
+## Why Asynchronous Processing is Needed
+
+Let's understand the difference between synchronous and asynchronous processing:
+
+### Synchronous Processing
+
+```python
+def process_items():
+ item1 = read_file("item1.json") # Wait 2 seconds
+ item2 = read_file("item2.json") # Wait 2 seconds
+ item3 = read_file("item3.json") # Wait 2 seconds
+ return [item1, item2, item3] # Total: 6 seconds
+```
+
+### Asynchronous Processing
+
+```python
+async def process_items():
+ item1_task = read_file_async("item1.json") # Start concurrently
+ item2_task = read_file_async("item2.json") # Start concurrently
+ item3_task = read_file_async("item3.json") # Start concurrently
+
+ items = await asyncio.gather(item1_task, item2_task, item3_task)
+ return items # Total: 2 seconds
+```
+
+## Step 1: Creating an Asynchronous CRUD Project
+
+Create a project using the `fastapi-async-crud` template:
+
+
+
+```console
+$ fastkit startdemo fastapi-async-crud
+Enter the project name: async-todo-api
+Enter the author name: Developer Kim
+Enter the author email: developer@example.com
+Enter the project description: Asynchronous todo management API
+Deploying FastAPI project using 'fastapi-async-crud' template
+
+ Project Information
+┌──────────────┬─────────────────────────────────────────┐
+│ Project Name │ async-todo-api │
+│ Author │ Developer Kim │
+│ Author Email │ developer@example.com │
+│ Description │ Asynchronous todo management API │
+└──────────────┴─────────────────────────────────────────┘
+
+ Template Dependencies
+┌──────────────┬───────────────────┐
+│ Dependency 1 │ fastapi │
+│ Dependency 2 │ uvicorn │
+│ Dependency 3 │ pydantic │
+│ Dependency 4 │ pydantic-settings │
+│ Dependency 5 │ aiofiles │
+│ Dependency 6 │ pytest-asyncio │
+└──────────────┴───────────────────┘
+
+Select package manager (pip, uv, pdm, poetry) [uv]: uv
+Do you want to proceed with project creation? [y/N]: y
+
+✨ FastAPI project 'async-todo-api' from 'fastapi-async-crud' has been created successfully!
+```
+
+
+
+## Step 2: Analyzing Project Structure
+
+Let's examine the key differences in the generated project:
+
+```
+async-todo-api/
+├── src/
+│ ├── main.py # Asynchronous FastAPI application
+│ ├── api/
+│ │ └── routes/
+│ │ └── items.py # Asynchronous CRUD endpoints
+│ ├── crud/
+│ │ └── items.py # Asynchronous data processing logic
+│ ├── schemas/
+│ │ └── items.py # Data models (same)
+│ ├── mocks/
+│ │ └── mock_items.json # JSON file database
+│ └── core/
+│ └── config.py # Configuration file
+└── tests/
+ ├── conftest.py # Asynchronous test configuration
+ └── test_items.py # Asynchronous test cases
+```
+
+### Key Differences
+
+1. **aiofiles**: Asynchronous file I/O processing
+2. **pytest-asyncio**: Asynchronous test support
+3. **async/await pattern**: All CRUD operations implemented asynchronously
+
+## Step 3: Understanding Asynchronous CRUD Logic
+
+### Asynchronous Data Processing (`src/crud/items.py`)
+
+```python
+import json
+import asyncio
+from typing import List, Optional
+from aiofiles import open as aio_open
+from pathlib import Path
+
+from src.schemas.items import Item, ItemCreate, ItemUpdate
+
+class AsyncItemCRUD:
+ def __init__(self, data_file: str = "src/mocks/mock_items.json"):
+ self.data_file = Path(data_file)
+
+ async def _read_data(self) -> List[dict]:
+ """Asynchronously read data from JSON file"""
+ try:
+ async with aio_open(self.data_file, 'r', encoding='utf-8') as f:
+ content = await f.read()
+ return json.loads(content)
+ except FileNotFoundError:
+ return []
+
+ async def _write_data(self, data: List[dict]) -> None:
+ """Asynchronously write data to JSON file"""
+ async with aio_open(self.data_file, 'w', encoding='utf-8') as f:
+ await f.write(json.dumps(data, indent=2, ensure_ascii=False))
+
+ async def get_items(self) -> List[Item]:
+ """Retrieve all items (asynchronous)"""
+ data = await self._read_data()
+ return [Item(**item) for item in data]
+
+ async def get_item(self, item_id: int) -> Optional[Item]:
+ """Retrieve specific item (asynchronous)"""
+ data = await self._read_data()
+ item_data = next((item for item in data if item["id"] == item_id), None)
+ return Item(**item_data) if item_data else None
+
+ async def create_item(self, item: ItemCreate) -> Item:
+ """Create new item (asynchronous)"""
+ data = await self._read_data()
+ new_id = max([item["id"] for item in data], default=0) + 1
+
+ new_item = Item(id=new_id, **item.dict())
+ data.append(new_item.dict())
+
+ await self._write_data(data)
+ return new_item
+
+ async def update_item(self, item_id: int, item_update: ItemUpdate) -> Optional[Item]:
+ """Update item (asynchronous)"""
+ data = await self._read_data()
+
+ for i, item in enumerate(data):
+ if item["id"] == item_id:
+ update_data = item_update.dict(exclude_unset=True)
+ data[i].update(update_data)
+ await self._write_data(data)
+ return Item(**data[i])
+
+ return None
+
+ async def delete_item(self, item_id: int) -> bool:
+ """Delete item (asynchronous)"""
+ data = await self._read_data()
+ original_length = len(data)
+
+ data = [item for item in data if item["id"] != item_id]
+
+ if len(data) < original_length:
+ await self._write_data(data)
+ return True
+
+ return False
+```
+
+### Asynchronous API Endpoints (`src/api/routes/items.py`)
+
+```python
+from typing import List
+from fastapi import APIRouter, HTTPException, status
+
+from src.schemas.items import Item, ItemCreate, ItemUpdate
+from src.crud.items import AsyncItemCRUD
+
+router = APIRouter()
+crud = AsyncItemCRUD()
+
+@router.get("/", response_model=List[Item])
+async def read_items():
+ """Retrieve all items (asynchronous)"""
+ return await crud.get_items()
+
+@router.get("/{item_id}", response_model=Item)
+async def read_item(item_id: int):
+ """Retrieve specific item (asynchronous)"""
+ item = await crud.get_item(item_id)
+ if item is None:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Item with id {item_id} not found"
+ )
+ return item
+
+@router.post("/", response_model=Item, status_code=status.HTTP_201_CREATED)
+async def create_item(item: ItemCreate):
+ """Create new item (asynchronous)"""
+ return await crud.create_item(item)
+
+@router.put("/{item_id}", response_model=Item)
+async def update_item(item_id: int, item_update: ItemUpdate):
+ """Update item (asynchronous)"""
+ updated_item = await crud.update_item(item_id, item_update)
+ if updated_item is None:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Item with id {item_id} not found"
+ )
+ return updated_item
+
+@router.delete("/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
+async def delete_item(item_id: int):
+ """Delete item (asynchronous)"""
+ deleted = await crud.delete_item(item_id)
+ if not deleted:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Item with id {item_id} not found"
+ )
+```
+
+## Step 4: Running Server and Testing
+
+Navigate to the project directory and run the server:
+
+
+
+```console
+$ cd async-todo-api
+$ fastkit runserver
+Starting FastAPI server at 127.0.0.1:8000...
+
+INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)
+INFO: Started reloader process [12345] using WatchFiles
+INFO: Started server process [12346]
+INFO: Waiting for application startup.
+INFO: Application startup complete.
+```
+
+
+
+### Performance Testing
+
+Let's verify the performance of asynchronous processing. Try sending multiple requests simultaneously:
+
+**Concurrent Request Testing (Python Script)**
+
+```python
+import asyncio
+import aiohttp
+import time
+
+async def create_item(session, item_data):
+ async with session.post("http://127.0.0.1:8000/items/", json=item_data) as response:
+ return await response.json()
+
+async def test_concurrent_requests():
+ start_time = time.time()
+
+ items_to_create = [
+ {"name": f"Item {i}", "description": f"Description {i}", "price": i * 10, "tax": i}
+ for i in range(1, 11) # Create 10 items concurrently
+ ]
+
+ async with aiohttp.ClientSession() as session:
+ tasks = [create_item(session, item) for item in items_to_create]
+ results = await asyncio.gather(*tasks)
+
+ end_time = time.time()
+ print(f"Created 10 items in: {end_time - start_time:.2f} seconds")
+ print(f"Number of items created: {len(results)}")
+
+# Run test
+# asyncio.run(test_concurrent_requests())
+```
+
+## Step 5: Writing Asynchronous Tests
+
+### Test Configuration (`tests/conftest.py`)
+
+```python
+import pytest
+import asyncio
+from httpx import AsyncClient
+from src.main import app
+
+@pytest.fixture(scope="session")
+def event_loop():
+ """Event loop configuration"""
+ loop = asyncio.get_event_loop_policy().new_event_loop()
+ yield loop
+ loop.close()
+
+@pytest.fixture
+async def async_client():
+ """Asynchronous test client"""
+ async with AsyncClient(app=app, base_url="http://test") as client:
+ yield client
+```
+
+### Asynchronous Test Cases (`tests/test_items.py`)
+
+```python
+import pytest
+from httpx import AsyncClient
+
+@pytest.mark.asyncio
+async def test_create_item_async(async_client: AsyncClient):
+ """Asynchronous item creation test"""
+ item_data = {
+ "name": "Test Item",
+ "description": "Item for asynchronous testing",
+ "price": 100.0,
+ "tax": 10.0
+ }
+
+ response = await async_client.post("/items/", json=item_data)
+
+ assert response.status_code == 201
+ data = response.json()
+ assert data["name"] == item_data["name"]
+ assert data["price"] == item_data["price"]
+ assert "id" in data
+
+@pytest.mark.asyncio
+async def test_read_items_async(async_client: AsyncClient):
+ """Asynchronous item list retrieval test"""
+ response = await async_client.get("/items/")
+
+ assert response.status_code == 200
+ items = response.json()
+ assert isinstance(items, list)
+
+@pytest.mark.asyncio
+async def test_concurrent_operations(async_client: AsyncClient):
+ """Concurrent operations test"""
+ import asyncio
+
+ # Create multiple items concurrently
+ tasks = []
+ for i in range(5):
+ item_data = {
+ "name": f"ConcurrentItem{i}",
+ "description": f"Description{i}",
+ "price": i * 10,
+ "tax": i
+ }
+ task = async_client.post("/items/", json=item_data)
+ tasks.append(task)
+
+ responses = await asyncio.gather(*tasks)
+
+ # Verify all requests succeeded
+ for response in responses:
+ assert response.status_code == 201
+
+ # Verify created items
+ response = await async_client.get("/items/")
+ items = response.json()
+ assert len(items) >= 5
+```
+
+### Running Tests
+
+
+
+```console
+$ pytest tests/ -v --asyncio-mode=auto
+======================== test session starts ========================
+collected 8 items
+
+tests/test_items.py::test_create_item_async PASSED [ 12%]
+tests/test_items.py::test_read_items_async PASSED [ 25%]
+tests/test_items.py::test_read_item_async PASSED [ 37%]
+tests/test_items.py::test_update_item_async PASSED [ 50%]
+tests/test_items.py::test_delete_item_async PASSED [ 62%]
+tests/test_items.py::test_concurrent_operations PASSED [ 75%]
+tests/test_items.py::test_item_not_found_async PASSED [ 87%]
+tests/test_items.py::test_invalid_item_data_async PASSED [100%]
+
+======================== 8 passed in 0.24s ========================
+```
+
+
+
+## Step 6: Performance Monitoring and Optimization
+
+### Adding Response Time Measurement Middleware
+
+Let's add performance monitoring to `src/main.py`:
+
+```python
+import time
+from fastapi import FastAPI, Request
+from src.api.api import api_router
+from src.core.config import settings
+
+app = FastAPI(
+ title=settings.PROJECT_NAME,
+ version=settings.VERSION,
+ description=settings.DESCRIPTION,
+)
+
+@app.middleware("http")
+async def add_process_time_header(request: Request, call_next):
+ """Add request processing time to headers"""
+ start_time = time.time()
+ response = await call_next(request)
+ process_time = time.time() - start_time
+ response.headers["X-Process-Time"] = str(process_time)
+ return response
+
+app.include_router(api_router)
+
+@app.get("/")
+async def read_root():
+ return {"message": "Welcome to the Asynchronous Todo API!"}
+```
+
+### Implementing Asynchronous Batch Processing
+
+Let's add batch endpoints for processing multiple items at once:
+
+```python
+# Add to src/api/routes/items.py
+
+@router.post("/batch", response_model=List[Item])
+async def create_items_batch(items: List[ItemCreate]):
+ """Create multiple items concurrently (batch processing)"""
+ import asyncio
+
+ # Execute all item creation tasks concurrently
+ tasks = [crud.create_item(item) for item in items]
+ created_items = await asyncio.gather(*tasks)
+
+ return created_items
+
+@router.get("/batch/{item_ids}")
+async def read_items_batch(item_ids: str):
+ """Retrieve multiple items concurrently (batch processing)"""
+ import asyncio
+
+ # Parse comma-separated IDs
+ ids = [int(id.strip()) for id in item_ids.split(",")]
+
+ # Execute all item retrieval tasks concurrently
+ tasks = [crud.get_item(item_id) for item_id in ids]
+ items = await asyncio.gather(*tasks)
+
+ # Return only non-None items
+ return [item for item in items if item is not None]
+```
+
+### Batch Processing Testing
+
+
+
+```console
+# Batch creation test
+$ curl -X POST "http://127.0.0.1:8000/items/batch" \
+ -H "Content-Type: application/json" \
+ -d '[
+ {"name": "Item1", "description": "Description1", "price": 10.0, "tax": 1.0},
+ {"name": "Item2", "description": "Description2", "price": 20.0, "tax": 2.0},
+ {"name": "Item3", "description": "Description3", "price": 30.0, "tax": 3.0}
+ ]'
+
+# Batch retrieval test
+$ curl -X GET "http://127.0.0.1:8000/items/batch/1,2,3"
+```
+
+
+
+## Step 7: Advanced Asynchronous Patterns
+
+### Implementing Rate Limiting
+
+```python
+import asyncio
+from collections import defaultdict
+from fastapi import HTTPException, Request
+from datetime import datetime, timedelta
+
+class AsyncRateLimiter:
+ def __init__(self, max_requests: int = 100, window_seconds: int = 60):
+ self.max_requests = max_requests
+ self.window_seconds = window_seconds
+ self.requests = defaultdict(list)
+
+ async def is_allowed(self, client_ip: str) -> bool:
+ now = datetime.now()
+ cutoff = now - timedelta(seconds=self.window_seconds)
+
+ # remove old request records
+ self.requests[client_ip] = [
+ req_time for req_time in self.requests[client_ip]
+ if req_time > cutoff
+ ]
+
+ # check current request count
+ if len(self.requests[client_ip]) >= self.max_requests:
+ return False
+
+ # add current request record
+ self.requests[client_ip].append(now)
+ return True
+
+# global rate limiter instance
+rate_limiter = AsyncRateLimiter()
+
+@app.middleware("http")
+async def rate_limit_middleware(request: Request, call_next):
+ client_ip = request.client.host
+
+ if not await rate_limiter.is_allowed(client_ip):
+ raise HTTPException(
+ status_code=429,
+ detail="Too many requests"
+ )
+
+ response = await call_next(request)
+ return response
+```
+
+### Implementing Asynchronous Caching
+
+```python
+import asyncio
+from typing import Optional, Any
+from datetime import datetime, timedelta
+
+class AsyncCache:
+ def __init__(self):
+ self._cache = {}
+ self._expiry = {}
+
+ async def get(self, key: str) -> Optional[Any]:
+ # remove expired items
+ if key in self._expiry and datetime.now() > self._expiry[key]:
+ del self._cache[key]
+ del self._expiry[key]
+ return None
+
+ return self._cache.get(key)
+
+ async def set(self, key: str, value: Any, ttl_seconds: int = 300):
+ self._cache[key] = value
+ self._expiry[key] = datetime.now() + timedelta(seconds=ttl_seconds)
+
+ async def delete(self, key: str):
+ self._cache.pop(key, None)
+ self._expiry.pop(key, None)
+
+# global cache instance
+cache = AsyncCache()
+
+# modify CRUD methods to use cache
+async def get_items_cached(self) -> List[Item]:
+ """Retrieve items using cache"""
+ cache_key = "all_items"
+ cached_items = await cache.get(cache_key)
+
+ if cached_items:
+ return cached_items
+
+ # if cache is not found, read from file
+ items = await self.get_items()
+ await cache.set(cache_key, items, ttl_seconds=60) # 1 minute cache
+
+ return items
+```
+
+## Step 8: Production Considerations
+
+### Managing Connection Pools
+
+```python
+# add to src/core/config.py
+class Settings(BaseSettings):
+ # ... existing settings ...
+
+ # asynchronous processing related settings
+ MAX_CONCURRENT_REQUESTS: int = 100
+ REQUEST_TIMEOUT: int = 30
+ CONNECTION_POOL_SIZE: int = 20
+
+settings = Settings()
+```
+
+### Improving Error Handling
+
+```python
+import logging
+from fastapi import HTTPException
+from typing import Union
+
+logger = logging.getLogger(__name__)
+
+async def safe_async_operation(operation, *args, **kwargs) -> Union[Any, None]:
+ """Execute safe asynchronous operation"""
+ try:
+ return await operation(*args, **kwargs)
+ except asyncio.TimeoutError:
+ logger.error(f"Timeout in {operation.__name__}")
+ raise HTTPException(status_code=504, detail="Request timeout")
+ except Exception as e:
+ logger.error(f"Error in {operation.__name__}: {str(e)}")
+ raise HTTPException(status_code=500, detail="Internal server error")
+
+# usage example
+@router.get("/safe/{item_id}")
+async def read_item_safe(item_id: int):
+ return await safe_async_operation(crud.get_item, item_id)
+```
+
+## Next Steps
+
+You've completed building an asynchronous CRUD API! Next things to try:
+
+1. **[Database Integration](database-integration.md)** - Using PostgreSQL with asynchronous SQLAlchemy
+2. **[Docker Containerization](docker-deployment.md)** - Containerizing asynchronous applications
+3. **[Custom Response Handling](custom-response-handling.md)** - Advanced response formats and error handling
+
+
+
+## Summary
+
+In this tutorial, we used asynchronous FastAPI to:
+
+- ✅ Implement asynchronous CRUD operations
+- ✅ Optimize file I/O with aiofiles
+- ✅ Handle concurrent requests and performance testing
+- ✅ Write and execute asynchronous tests
+- ✅ Implement batch processing and advanced asynchronous patterns
+- ✅ Address production considerations (caching, error handling, connection management)
+
+Mastering asynchronous processing enables you to build high-performance API servers!
diff --git a/docs/tutorial/basic-api-server.md b/docs/tutorial/basic-api-server.md
new file mode 100644
index 0000000..d81e509
--- /dev/null
+++ b/docs/tutorial/basic-api-server.md
@@ -0,0 +1,398 @@
+# Building a Basic API Server
+
+Learn how to quickly build a simple REST API server using FastAPI-fastkit. This tutorial is suitable for FastAPI beginners and covers creating basic CRUD APIs.
+
+## What You'll Learn in This Tutorial
+
+- Creating a basic API server with the `fastkit startdemo` command
+- Understanding FastAPI project structure
+- Using basic CRUD endpoints
+- API testing and documentation
+- Project expansion methods
+
+## Prerequisites
+
+- Python 3.12 or higher installed
+- FastAPI-fastkit installed (`pip install fastapi-fastkit`)
+- Basic Python knowledge
+
+## Step 1: Creating a Basic API Project
+
+Let's create a basic API server using the `fastapi-default` template.
+
+
+
+```console
+$ fastkit startdemo fastapi-default
+Enter the project name: my-first-api
+Enter the author name: Developer Kim
+Enter the author email: developer@example.com
+Enter the project description: My first FastAPI server
+Deploying FastAPI project using 'fastapi-default' template
+
+ Project Information
+┌──────────────┬────────────────────────────┐
+│ Project Name │ my-first-api │
+│ Author │ Developer Kim │
+│ Author Email │ developer@example.com │
+│ Description │ My first FastAPI server │
+└──────────────┴────────────────────────────┘
+
+ Template Dependencies
+┌──────────────┬───────────────────┐
+│ Dependency 1 │ fastapi │
+│ Dependency 2 │ uvicorn │
+│ Dependency 3 │ pydantic │
+│ Dependency 4 │ pydantic-settings │
+│ Dependency 5 │ python-dotenv │
+└──────────────┴───────────────────┘
+
+Select package manager (pip, uv, pdm, poetry) [uv]: uv
+Do you want to proceed with project creation? [y/N]: y
+
+✨ FastAPI project 'my-first-api' from 'fastapi-default' has been created successfully!
+```
+
+
+
+## Step 2: Understanding the Generated Project Structure
+
+Let's examine the generated project structure:
+
+```
+my-first-api/
+├── README.md # Project documentation
+├── requirements.txt # Dependency package list
+├── setup.py # Package configuration
+├── scripts/
+│ └── run-server.sh # Server execution script
+├── src/ # Main source code
+│ ├── main.py # FastAPI application entry point
+│ ├── core/
+│ │ └── config.py # Configuration management
+│ ├── api/
+│ │ ├── api.py # API router collection
+│ │ └── routes/
+│ │ └── items.py # Item-related endpoints
+│ ├── schemas/
+│ │ └── items.py # Data model definitions
+│ ├── crud/
+│ │ └── items.py # Data processing logic
+│ └── mocks/
+│ └── mock_items.json # Test data
+└── tests/ # Test code
+ ├── __init__.py
+ ├── conftest.py
+ └── test_items.py
+```
+
+### Key File Descriptions
+
+- **`src/main.py`**: FastAPI application entry point
+- **`src/api/routes/items.py`**: Item-related API endpoint definitions
+- **`src/schemas/items.py`**: Request/response data structure definitions
+- **`src/crud/items.py`**: Database operation logic
+- **`src/mocks/mock_items.json`**: Sample data for development
+
+## Step 3: Running the Server
+
+Let's navigate to the generated project directory and run the server.
+
+
+
+```console
+$ cd my-first-api
+$ fastkit runserver
+Starting FastAPI server at 127.0.0.1:8000...
+
+INFO: Will watch for changes in these directories: ['/path/to/my-first-api']
+INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)
+INFO: Started reloader process [12345] using WatchFiles
+INFO: Started server process [12346]
+INFO: Waiting for application startup.
+INFO: Application startup complete.
+```
+
+
+
+Once the server is successfully running, you can access the following URLs in your browser:
+
+- **API Server**: http://127.0.0.1:8000
+- **Swagger UI Documentation**: http://127.0.0.1:8000/docs
+- **ReDoc Documentation**: http://127.0.0.1:8000/redoc
+
+## Step 4: Exploring API Endpoints
+
+The generated API provides the following endpoints by default:
+
+| Method | Endpoint | Description |
+|--------|----------|-------------|
+| GET | `/items/` | Retrieve all items |
+| GET | `/items/{item_id}` | Retrieve specific item |
+| POST | `/items/` | Create new item |
+| PUT | `/items/{item_id}` | Update item |
+| DELETE | `/items/{item_id}` | Delete item |
+
+### Testing the API
+
+**1. Retrieving All Items**
+
+
+
+```console
+$ curl -X GET "http://127.0.0.1:8000/items/"
+[
+ {
+ "id": 1,
+ "name": "Laptop",
+ "description": "High-performance laptop",
+ "price": 999.99,
+ "tax": 99.99
+ },
+ {
+ "id": 2,
+ "name": "Mouse",
+ "description": "Wireless mouse",
+ "price": 29.99,
+ "tax": 2.99
+ }
+]
+```
+
+
+
+**2. Creating a New Item**
+
+
+
+```console
+$ curl -X POST "http://127.0.0.1:8000/items/" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "name": "Keyboard",
+ "description": "Mechanical keyboard",
+ "price": 150.00,
+ "tax": 15.00
+ }'
+
+{
+ "id": 3,
+ "name": "Keyboard",
+ "description": "Mechanical keyboard",
+ "price": 150.0,
+ "tax": 15.0
+}
+```
+
+
+
+**3. Retrieving a Specific Item**
+
+
+
+```console
+$ curl -X GET "http://127.0.0.1:8000/items/1"
+{
+ "id": 1,
+ "name": "Laptop",
+ "description": "High-performance laptop",
+ "price": 999.99,
+ "tax": 99.99
+}
+```
+
+
+
+## Step 5: Testing API with Swagger UI
+
+Navigate to http://127.0.0.1:8000/docs in your browser to view the automatically generated API documentation.
+
+What you can do with Swagger UI:
+
+1. **View API Endpoints**: Visually see all available endpoints
+2. **Check Request/Response Schemas**: View input/output formats for each endpoint
+3. **Test APIs Directly**: Make actual API calls with the "Try it out" button
+4. **View Example Data**: See example request/response data for each endpoint
+
+### How to Use Swagger UI
+
+1. Click on the `/items/` GET endpoint
+2. Click the "Try it out" button
+3. Click the "Execute" button
+4. View the server response
+
+## Step 6: Understanding Code Structure
+
+### Main Application (`src/main.py`)
+
+```python
+from fastapi import FastAPI
+from src.api.api import api_router
+from src.core.config import settings
+
+app = FastAPI(
+ title=settings.PROJECT_NAME,
+ version=settings.VERSION,
+ description=settings.DESCRIPTION,
+)
+
+app.include_router(api_router)
+
+@app.get("/")
+def read_root():
+ return {"message": "Hello World"}
+```
+
+### Item Schema (`src/schemas/items.py`)
+
+```python
+from pydantic import BaseModel
+from typing import Optional
+
+class ItemBase(BaseModel):
+ name: str
+ description: Optional[str] = None
+ price: float
+ tax: Optional[float] = None
+
+class ItemCreate(ItemBase):
+ pass
+
+class ItemUpdate(ItemBase):
+ name: Optional[str] = None
+ price: Optional[float] = None
+
+class Item(ItemBase):
+ id: int
+
+ class Config:
+ from_attributes = True
+```
+
+### CRUD Logic (`src/crud/items.py`)
+
+```python
+from typing import List, Optional
+from src.schemas.items import Item, ItemCreate, ItemUpdate
+
+class ItemCRUD:
+ def __init__(self):
+ self.items: List[Item] = []
+ self.next_id = 1
+
+ def create_item(self, item: ItemCreate) -> Item:
+ new_item = Item(id=self.next_id, **item.dict())
+ self.items.append(new_item)
+ self.next_id += 1
+ return new_item
+
+ def get_items(self) -> List[Item]:
+ return self.items
+
+ def get_item(self, item_id: int) -> Optional[Item]:
+ return next((item for item in self.items if item.id == item_id), None)
+```
+
+## Step 7: Expanding the Project
+
+### Adding New Routes
+
+You can add new endpoints using the `fastkit addroute` command:
+
+
+
+```console
+$ fastkit addroute user
+ Adding New Route
+┌──────────────────┬──────────────────────────────────────────┐
+│ Project │ my-first-api │
+│ Route Name │ user │
+│ Target Directory │ /path/to/my-first-api │
+└──────────────────┴──────────────────────────────────────────┘
+
+Do you want to add route 'user' to the current project? [Y/n]: y
+
+✨ Successfully added new route 'user' to the current project!
+```
+
+
+
+This command creates the following files:
+
+- `src/api/routes/user.py` - User-related endpoints
+- `src/schemas/user.py` - User data models
+- `src/crud/user.py` - User data processing logic
+
+### Customizing Environment Configuration
+
+You can modify the `src/core/config.py` file to change project settings:
+
+```python
+from pydantic_settings import BaseSettings
+
+class Settings(BaseSettings):
+ PROJECT_NAME: str = "My First API"
+ VERSION: str = "1.0.0"
+ DESCRIPTION: str = "My first FastAPI server"
+ API_V1_STR: str = "/api/v1"
+
+ class Config:
+ env_file = ".env"
+
+settings = Settings()
+```
+
+## Step 8: Running Tests
+
+The project includes basic tests:
+
+
+
+```console
+$ pytest tests/ -v
+======================== test session starts ========================
+collected 4 items
+
+tests/test_items.py::test_create_item PASSED [ 25%]
+tests/test_items.py::test_read_items PASSED [ 50%]
+tests/test_items.py::test_read_item PASSED [ 75%]
+tests/test_items.py::test_update_item PASSED [100%]
+
+======================== 4 passed in 0.15s ========================
+```
+
+
+
+## Next Steps
+
+You've completed building a basic API server! Next things to try:
+
+1. **[Building Asynchronous CRUD APIs](async-crud-api.md)** - Learn more complex asynchronous processing
+2. **[Database Integration](database-integration.md)** - Using PostgreSQL and SQLAlchemy
+3. **[Docker Containerization](docker-deployment.md)** - Preparing for production deployment
+4. **[Custom Response Handling](custom-response-handling.md)** - Advanced response format configuration
+
+## Troubleshooting
+
+### Common Issues
+
+**Q: The server won't start**
+A: Check that your virtual environment is activated and dependencies are properly installed.
+
+**Q: Cannot access API endpoints**
+A: Verify that the server is running normally and the port number (default: 8000) is correct.
+
+**Q: APIs don't appear in Swagger UI**
+A: Check that the router is properly included in `src/main.py`.
+
+## Summary
+
+In this tutorial, we used FastAPI-fastkit to:
+
+- ✅ Create a basic FastAPI project
+- ✅ Understand project structure
+- ✅ Use CRUD API endpoints
+- ✅ API documentation and testing
+- ✅ Project expansion methods
+
+Now that you've learned the basics of FastAPI, try taking on more complex projects!
diff --git a/docs/tutorial/custom-response-handling.md b/docs/tutorial/custom-response-handling.md
new file mode 100644
index 0000000..7a5e008
--- /dev/null
+++ b/docs/tutorial/custom-response-handling.md
@@ -0,0 +1,1393 @@
+# Custom Response Handling and Advanced API Design
+
+Learn how to implement consistent response formats, error handling, pagination, and custom OpenAPI documentation using FastAPI's advanced features. We'll implement enterprise-grade API design patterns using the `fastapi-custom-response` template.
+
+## What You'll Learn in This Tutorial
+
+- Designing standardized API response formats
+- Global exception handling and custom error responses
+- Implementing pagination systems
+- Filtering and sorting functionality
+- Customizing OpenAPI documentation
+- API version management
+- Response caching and optimization
+
+## Prerequisites
+
+- Completed the [Docker Containerization Tutorial](docker-deployment.md)
+- Understanding of REST API design principles
+- Knowledge of HTTP status codes
+- Basic concepts of OpenAPI/Swagger
+
+## The Importance of Standardized API Responses
+
+### Inconsistent vs Standardized Responses
+
+**Problematic response format:**
+```json
+// Success
+{"id": 1, "name": "item"}
+
+// Error
+{"detail": "Not found"}
+
+// List retrieval
+[{"id": 1}, {"id": 2}]
+```
+
+**Standardized response format:**
+```json
+// Success
+{
+ "success": true,
+ "data": {"id": 1, "name": "item"},
+ "message": "Item retrieved successfully",
+ "timestamp": "2024-01-01T12:00:00Z"
+}
+
+// Error
+{
+ "success": false,
+ "error": {
+ "code": "ITEM_NOT_FOUND",
+ "message": "Item not found",
+ "details": {"item_id": 123}
+ },
+ "timestamp": "2024-01-01T12:00:00Z"
+}
+```
+
+## Step 1: Creating a Custom Response Project
+
+Create a project using the `fastapi-custom-response` template:
+
+
+
+```console
+$ fastkit startdemo fastapi-custom-response
+Enter the project name: advanced-api-server
+Enter the author name: Developer Kim
+Enter the author email: developer@example.com
+Enter the project description: API server with advanced response handling
+Deploying FastAPI project using 'fastapi-custom-response' template
+
+ Project Information
+┌──────────────┬─────────────────────────────────────────────┐
+│ Project Name │ advanced-api-server │
+│ Author │ Developer Kim │
+│ Author Email │ developer@example.com │
+│ Description │ API server with advanced response handling │
+└──────────────┴─────────────────────────────────────────────┘
+
+ Template Dependencies
+┌──────────────┬───────────────────┐
+│ Dependency 1 │ fastapi │
+│ Dependency 2 │ uvicorn │
+│ Dependency 3 │ pydantic │
+│ Dependency 4 │ pydantic-settings │
+│ Dependency 5 │ aiofiles │
+│ Dependency 6 │ python-multipart │
+└──────────────┴───────────────────┘
+
+Select package manager (pip, uv, pdm, poetry) [uv]: uv
+Do you want to proceed with project creation? [y/N]: y
+
+✨ FastAPI project 'advanced-api-server' from 'fastapi-custom-response' has been created successfully!
+```
+
+
+
+## Step 2: Analyzing Project Structure
+
+Let's examine the advanced features of the generated project:
+
+```
+advanced-api-server/
+├── src/
+│ ├── main.py # FastAPI application
+│ ├── schemas/
+│ │ ├── base.py # Base response schemas
+│ │ ├── items.py # Item schemas
+│ │ └── responses.py # Response format definitions
+│ ├── helper/
+│ │ ├── exceptions.py # Custom exception classes
+│ │ └── pagination.py # Pagination helpers
+│ ├── utils/
+│ │ ├── responses.py # Response utilities
+│ │ └── documents.py # OpenAPI documentation customization
+│ ├── api/
+│ │ └── routes/
+│ │ └── items.py # Advanced API endpoints
+│ ├── crud/
+│ │ └── items.py # CRUD logic
+│ └── core/
+│ └── config.py # Configuration
+└── tests/
+ └── test_responses.py # Response format tests
+```
+
+## Step 3: Implementing Standardized Response Schemas
+
+### Base Response Schema (`src/schemas/base.py`)
+
+```python
+from typing import Generic, TypeVar, Optional, Any, Dict, List
+from pydantic import BaseModel, Field
+from datetime import datetime
+from enum import Enum
+
+T = TypeVar('T')
+
+class ResponseStatus(str, Enum):
+ """Response status"""
+ SUCCESS = "success"
+ ERROR = "error"
+ WARNING = "warning"
+
+class ErrorDetail(BaseModel):
+ """Error detail information"""
+ code: str = Field(..., description="Error code")
+ message: str = Field(..., description="Error message")
+ field: Optional[str] = Field(None, description="Field where error occurred")
+ details: Optional[Dict[str, Any]] = Field(None, description="Additional error information")
+
+class BaseResponse(BaseModel, Generic[T]):
+ """Base response format"""
+ success: bool = Field(..., description="Request success status")
+ status: ResponseStatus = Field(..., description="Response status")
+ data: Optional[T] = Field(None, description="Response data")
+ message: Optional[str] = Field(None, description="Response message")
+ timestamp: datetime = Field(default_factory=datetime.utcnow, description="Response timestamp")
+ request_id: Optional[str] = Field(None, description="Request tracking ID")
+
+class ErrorResponse(BaseModel):
+ """Error response format"""
+ success: bool = Field(False, description="Request success status")
+ status: ResponseStatus = Field(ResponseStatus.ERROR, description="Response status")
+ error: ErrorDetail = Field(..., description="Error information")
+ timestamp: datetime = Field(default_factory=datetime.utcnow, description="Response timestamp")
+ request_id: Optional[str] = Field(None, description="Request tracking ID")
+
+class PaginationMeta(BaseModel):
+ """Pagination metadata"""
+ page: int = Field(..., ge=1, description="Current page")
+ size: int = Field(..., ge=1, le=100, description="Page size")
+ total: int = Field(..., ge=0, description="Total number of items")
+ pages: int = Field(..., ge=0, description="Total number of pages")
+ has_next: bool = Field(..., description="Whether next page exists")
+ has_prev: bool = Field(..., description="Whether previous page exists")
+
+class PaginatedResponse(BaseModel, Generic[T]):
+ """Paginated response"""
+ success: bool = Field(True, description="Request success status")
+ status: ResponseStatus = Field(ResponseStatus.SUCCESS, description="Response status")
+ data: List[T] = Field(..., description="Data list")
+ meta: PaginationMeta = Field(..., description="Pagination information")
+ message: Optional[str] = Field(None, description="Response message")
+ timestamp: datetime = Field(default_factory=datetime.utcnow, description="Response time")
+ request_id: Optional[str] = Field(None, description="Request tracking ID")
+
+class ValidationErrorDetail(BaseModel):
+ """Validation error detail"""
+ field: str = Field(..., description="Validation failed field")
+ message: str = Field(..., description="Error message")
+ invalid_value: Any = Field(..., description="Invalid value")
+
+class ValidationErrorResponse(BaseModel):
+ """Validation error response"""
+ success: bool = Field(False, description="Request success status")
+ status: ResponseStatus = Field(ResponseStatus.ERROR, description="Response status")
+ error: ErrorDetail = Field(..., description="Error information")
+ validation_errors: List[ValidationErrorDetail] = Field(..., description="Validation error list")
+ timestamp: datetime = Field(default_factory=datetime.utcnow, description="Response time")
+ request_id: Optional[str] = Field(None, description="Request tracking ID")
+```
+
+### Response utility functions (`src/utils/responses.py`)
+
+```python
+from typing import Any, Optional, List, TypeVar
+from fastapi import Request
+from fastapi.responses import JSONResponse
+import uuid
+
+from src.schemas.base import (
+ BaseResponse, ErrorResponse, PaginatedResponse,
+ ResponseStatus, ErrorDetail, PaginationMeta
+)
+
+T = TypeVar('T')
+
+def generate_request_id() -> str:
+ """Generate request tracking ID"""
+ return str(uuid.uuid4())
+
+def success_response(
+ data: Any = None,
+ message: Optional[str] = None,
+ request_id: Optional[str] = None,
+ status_code: int = 200
+) -> JSONResponse:
+ """Generate success response"""
+ response_data = BaseResponse[Any](
+ success=True,
+ status=ResponseStatus.SUCCESS,
+ data=data,
+ message=message or "Request processed successfully",
+ request_id=request_id or generate_request_id()
+ )
+
+ return JSONResponse(
+ status_code=status_code,
+ content=response_data.dict(exclude_none=True)
+ )
+
+def error_response(
+ error_code: str,
+ error_message: str,
+ details: Optional[dict] = None,
+ status_code: int = 400,
+ request_id: Optional[str] = None
+) -> JSONResponse:
+ """Generate error response"""
+ error_detail = ErrorDetail(
+ code=error_code,
+ message=error_message,
+ details=details
+ )
+
+ response_data = ErrorResponse(
+ error=error_detail,
+ request_id=request_id or generate_request_id()
+ )
+
+ return JSONResponse(
+ status_code=status_code,
+ content=response_data.dict(exclude_none=True)
+ )
+
+def paginated_response(
+ data: List[T],
+ page: int,
+ size: int,
+ total: int,
+ message: Optional[str] = None,
+ request_id: Optional[str] = None
+) -> JSONResponse:
+ """Generate paginated response"""
+ pages = (total + size - 1) // size # round up calculation
+ has_next = page < pages
+ has_prev = page > 1
+
+ meta = PaginationMeta(
+ page=page,
+ size=size,
+ total=total,
+ pages=pages,
+ has_next=has_next,
+ has_prev=has_prev
+ )
+
+ response_data = PaginatedResponse[T](
+ data=data,
+ meta=meta,
+ message=message or f"Page {page}/{pages} data retrieved",
+ request_id=request_id or generate_request_id()
+ )
+
+ return JSONResponse(
+ status_code=200,
+ content=response_data.dict(exclude_none=True)
+ )
+
+class ResponseHelper:
+ """Response helper class"""
+
+ @staticmethod
+ def created(data: Any, message: str = "Resource created successfully") -> JSONResponse:
+ return success_response(data=data, message=message, status_code=201)
+
+ @staticmethod
+ def updated(data: Any, message: str = "Resource updated successfully") -> JSONResponse:
+ return success_response(data=data, message=message, status_code=200)
+
+ @staticmethod
+ def deleted(message: str = "Resource deleted successfully") -> JSONResponse:
+ return success_response(data=None, message=message, status_code=204)
+
+ @staticmethod
+ def not_found(resource: str = "Resource") -> JSONResponse:
+ return error_response(
+ error_code="RESOURCE_NOT_FOUND",
+ error_message=f"{resource} not found",
+ status_code=404
+ )
+
+ @staticmethod
+ def bad_request(message: str = "Bad request") -> JSONResponse:
+ return error_response(
+ error_code="BAD_REQUEST",
+ error_message=message,
+ status_code=400
+ )
+
+ @staticmethod
+ def unauthorized(message: str = "Authentication required") -> JSONResponse:
+ return error_response(
+ error_code="UNAUTHORIZED",
+ error_message=message,
+ status_code=401
+ )
+
+ @staticmethod
+ def forbidden(message: str = "Permission denied") -> JSONResponse:
+ return error_response(
+ error_code="FORBIDDEN",
+ error_message=message,
+ status_code=403
+ )
+
+ @staticmethod
+ def server_error(message: str = "Server internal error occurred") -> JSONResponse:
+ return error_response(
+ error_code="INTERNAL_SERVER_ERROR",
+ error_message=message,
+ status_code=500
+ )
+```
+
+## Step 4: Custom exception handling system
+
+### Custom exception class (`src/helper/exceptions.py`)
+
+```python
+from typing import Optional, Dict, Any
+from fastapi import HTTPException
+
+class BaseAPIException(HTTPException):
+ """Base API exception class"""
+
+ def __init__(
+ self,
+ error_code: str,
+ message: str,
+ status_code: int = 400,
+ details: Optional[Dict[str, Any]] = None
+ ):
+ self.error_code = error_code
+ self.message = message
+ self.details = details or {}
+ super().__init__(status_code=status_code, detail=message)
+
+class ValidationException(BaseAPIException):
+ """Validation exception"""
+
+ def __init__(self, message: str, field: Optional[str] = None, details: Optional[Dict] = None):
+ super().__init__(
+ error_code="VALIDATION_ERROR",
+ message=message,
+ status_code=422,
+ details=details or {"field": field} if field else None
+ )
+
+class ResourceNotFoundException(BaseAPIException):
+ """Resource not found exception"""
+
+ def __init__(self, resource: str, resource_id: Any):
+ super().__init__(
+ error_code="RESOURCE_NOT_FOUND",
+ message=f"{resource}(ID: {resource_id}) not found",
+ status_code=404,
+ details={"resource": resource, "id": resource_id}
+ )
+
+class DuplicateResourceException(BaseAPIException):
+ """Duplicate resource exception"""
+
+ def __init__(self, resource: str, field: str, value: Any):
+ super().__init__(
+ error_code="DUPLICATE_RESOURCE",
+ message=f"{resource} {field} '{value}' already exists",
+ status_code=409,
+ details={"resource": resource, "field": field, "value": value}
+ )
+
+class BusinessLogicException(BaseAPIException):
+ """Business logic exception"""
+
+ def __init__(self, message: str, error_code: str = "BUSINESS_LOGIC_ERROR"):
+ super().__init__(
+ error_code=error_code,
+ message=message,
+ status_code=422
+ )
+
+class RateLimitException(BaseAPIException):
+ """Request limit exception"""
+
+ def __init__(self, retry_after: int = 60):
+ super().__init__(
+ error_code="RATE_LIMIT_EXCEEDED",
+ message="Request limit exceeded. Please try again later",
+ status_code=429,
+ details={"retry_after": retry_after}
+ )
+
+class AuthenticationException(BaseAPIException):
+ """Authentication exception"""
+
+ def __init__(self, message: str = "Authentication required"):
+ super().__init__(
+ error_code="AUTHENTICATION_REQUIRED",
+ message=message,
+ status_code=401
+ )
+
+class AuthorizationException(BaseAPIException):
+ """Authorization exception"""
+
+ def __init__(self, message: str = "Permission denied"):
+ super().__init__(
+ error_code="INSUFFICIENT_PERMISSIONS",
+ message=message,
+ status_code=403
+ )
+```
+
+### Global exception handler (`src/main.py`)
+
+```python
+from fastapi import FastAPI, Request, status
+from fastapi.exceptions import RequestValidationError, HTTPException
+from fastapi.responses import JSONResponse
+from pydantic import ValidationError
+import logging
+import traceback
+
+from src.helper.exceptions import BaseAPIException
+from src.utils.responses import error_response, generate_request_id
+from src.schemas.base import ValidationErrorDetail, ValidationErrorResponse
+
+logger = logging.getLogger(__name__)
+
+app = FastAPI(
+ title="Advanced API Server",
+ description="API server with advanced response handling",
+ version="1.0.0"
+)
+
+@app.exception_handler(BaseAPIException)
+async def custom_api_exception_handler(request: Request, exc: BaseAPIException):
+ """Custom API exception handler"""
+ request_id = generate_request_id()
+
+ logger.error(
+ f"API Exception: {exc.error_code} - {exc.message}",
+ extra={
+ "request_id": request_id,
+ "path": request.url.path,
+ "method": request.method,
+ "details": exc.details
+ }
+ )
+
+ return error_response(
+ error_code=exc.error_code,
+ error_message=exc.message,
+ details=exc.details,
+ status_code=exc.status_code,
+ request_id=request_id
+ )
+
+@app.exception_handler(RequestValidationError)
+async def validation_exception_handler(request: Request, exc: RequestValidationError):
+ """Pydantic validation exception handler"""
+ request_id = generate_request_id()
+
+ validation_errors = []
+ for error in exc.errors():
+ field = ".".join(str(loc) for loc in error["loc"])
+ validation_errors.append(
+ ValidationErrorDetail(
+ field=field,
+ message=error["msg"],
+ invalid_value=error.get("input", "")
+ )
+ )
+
+ error_response_data = ValidationErrorResponse(
+ error={
+ "code": "VALIDATION_ERROR",
+ "message": "Input data validation failed",
+ "details": {"error_count": len(validation_errors)}
+ },
+ validation_errors=validation_errors,
+ request_id=request_id
+ )
+
+ logger.warning(
+ f"Validation Error: {len(validation_errors)} validation errors",
+ extra={
+ "request_id": request_id,
+ "path": request.url.path,
+ "method": request.method,
+ "errors": [err.dict() for err in validation_errors]
+ }
+ )
+
+ return JSONResponse(
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
+ content=error_response_data.dict(exclude_none=True)
+ )
+
+@app.exception_handler(HTTPException)
+async def http_exception_handler(request: Request, exc: HTTPException):
+ """HTTP exception handler"""
+ request_id = generate_request_id()
+
+ error_code_map = {
+ 400: "BAD_REQUEST",
+ 401: "UNAUTHORIZED",
+ 403: "FORBIDDEN",
+ 404: "NOT_FOUND",
+ 405: "METHOD_NOT_ALLOWED",
+ 500: "INTERNAL_SERVER_ERROR"
+ }
+
+ error_code = error_code_map.get(exc.status_code, "HTTP_ERROR")
+
+ return error_response(
+ error_code=error_code,
+ error_message=exc.detail,
+ status_code=exc.status_code,
+ request_id=request_id
+ )
+
+@app.exception_handler(Exception)
+async def general_exception_handler(request: Request, exc: Exception):
+ """General exception handler"""
+ request_id = generate_request_id()
+
+ logger.error(
+ f"Unhandled Exception: {type(exc).__name__} - {str(exc)}",
+ extra={
+ "request_id": request_id,
+ "path": request.url.path,
+ "method": request.method,
+ "traceback": traceback.format_exc()
+ }
+ )
+
+ return error_response(
+ error_code="INTERNAL_SERVER_ERROR",
+ error_message="Unexpected error occurred",
+ status_code=500,
+ request_id=request_id
+ )
+```
+
+## Step 5: Advanced pagination system
+
+### Pagination helper (`src/helper/pagination.py`)
+
+```python
+from typing import List, Optional, Any, Dict, Callable
+from pydantic import BaseModel, Field
+from fastapi import Query
+from enum import Enum
+
+class SortOrder(str, Enum):
+ """Sort order"""
+ ASC = "asc"
+ DESC = "desc"
+
+class PaginationParams(BaseModel):
+ """Pagination parameters"""
+ page: int = Field(1, ge=1, description="Page number")
+ size: int = Field(20, ge=1, le=100, description="Page size")
+ sort_by: Optional[str] = Field(None, description="Sort field")
+ sort_order: SortOrder = Field(SortOrder.ASC, description="Sort order")
+
+class FilterParams(BaseModel):
+ """Filtering parameters"""
+ search: Optional[str] = Field(None, description="Search term")
+ category: Optional[str] = Field(None, description="Category")
+ status: Optional[str] = Field(None, description="Status")
+ date_from: Optional[str] = Field(None, description="Start date (YYYY-MM-DD)")
+ date_to: Optional[str] = Field(None, description="End date (YYYY-MM-DD)")
+
+def pagination_params(
+ page: int = Query(1, ge=1, description="Page number"),
+ size: int = Query(20, ge=1, le=100, description="Page size"),
+ sort_by: Optional[str] = Query(None, description="Sort field"),
+ sort_order: SortOrder = Query(SortOrder.ASC, description="Sort order")
+) -> PaginationParams:
+ """Pagination parameters dependency"""
+ return PaginationParams(
+ page=page,
+ size=size,
+ sort_by=sort_by,
+ sort_order=sort_order
+ )
+
+def filter_params(
+ search: Optional[str] = Query(None, description="Search term"),
+ category: Optional[str] = Query(None, description="Category"),
+ status: Optional[str] = Query(None, description="Status"),
+ date_from: Optional[str] = Query(None, description="Start date (YYYY-MM-DD)"),
+ date_to: Optional[str] = Query(None, description="End date (YYYY-MM-DD)")
+) -> FilterParams:
+ """Filtering parameters dependency"""
+ return FilterParams(
+ search=search,
+ category=category,
+ status=status,
+ date_from=date_from,
+ date_to=date_to
+ )
+
+class AdvancedPaginator:
+ """Advanced pagination class"""
+
+ def __init__(self, data: List[Any], pagination: PaginationParams, filters: FilterParams):
+ self.data = data
+ self.pagination = pagination
+ self.filters = filters
+ self.filtered_data = self._apply_filters()
+ self.sorted_data = self._apply_sorting()
+
+ def _apply_filters(self) -> List[Any]:
+ """Apply filters"""
+ filtered = self.data
+
+ if self.filters.search:
+ # Filter by search term (example: search in name or description fields)
+ search_term = self.filters.search.lower()
+ filtered = [
+ item for item in filtered
+ if (hasattr(item, 'name') and search_term in item.name.lower()) or
+ (hasattr(item, 'description') and item.description and search_term in item.description.lower())
+ ]
+
+ if self.filters.category:
+ filtered = [item for item in filtered if hasattr(item, 'category') and item.category == self.filters.category]
+
+ if self.filters.status:
+ filtered = [item for item in filtered if hasattr(item, 'status') and item.status == self.filters.status]
+
+ # Implement date filtering (if date field exists)
+ if self.filters.date_from or self.filters.date_to:
+ from datetime import datetime
+ filtered = self._apply_date_filter(filtered)
+
+ return filtered
+
+ def _apply_date_filter(self, data: List[Any]) -> List[Any]:
+ """Apply date filter"""
+ from datetime import datetime
+
+ if not self.filters.date_from and not self.filters.date_to:
+ return data
+
+ filtered = []
+ for item in data:
+ if not hasattr(item, 'created_at'):
+ continue
+
+ item_date = item.created_at.date() if hasattr(item.created_at, 'date') else item.created_at
+
+ if self.filters.date_from:
+ start_date = datetime.strptime(self.filters.date_from, "%Y-%m-%d").date()
+ if item_date < start_date:
+ continue
+
+ if self.filters.date_to:
+ end_date = datetime.strptime(self.filters.date_to, "%Y-%m-%d").date()
+ if item_date > end_date:
+ continue
+
+ filtered.append(item)
+
+ return filtered
+
+ def _apply_sorting(self) -> List[Any]:
+ """Apply sorting"""
+ if not self.pagination.sort_by:
+ return self.filtered_data
+
+ reverse = self.pagination.sort_order == SortOrder.DESC
+
+ try:
+ return sorted(
+ self.filtered_data,
+ key=lambda x: getattr(x, self.pagination.sort_by, 0),
+ reverse=reverse
+ )
+ except (AttributeError, TypeError):
+ # Return original data if sort field is not found or cannot be sorted
+ return self.filtered_data
+
+ def get_page(self) -> tuple[List[Any], int]:
+ """Return current page data and total count"""
+ total = len(self.sorted_data)
+ start = (self.pagination.page - 1) * self.pagination.size
+ end = start + self.pagination.size
+
+ page_data = self.sorted_data[start:end]
+ return page_data, total
+
+ def get_metadata(self) -> Dict[str, Any]:
+ """Return pagination metadata"""
+ total = len(self.sorted_data)
+ pages = (total + self.pagination.size - 1) // self.pagination.size
+
+ return {
+ "page": self.pagination.page,
+ "size": self.pagination.size,
+ "total": total,
+ "pages": pages,
+ "has_next": self.pagination.page < pages,
+ "has_prev": self.pagination.page > 1,
+ "filters_applied": {
+ "search": self.filters.search,
+ "category": self.filters.category,
+ "status": self.filters.status,
+ "date_range": f"{self.filters.date_from} ~ {self.filters.date_to}" if self.filters.date_from or self.filters.date_to else None
+ },
+ "sorting": {
+ "field": self.pagination.sort_by,
+ "order": self.pagination.sort_order
+ } if self.pagination.sort_by else None
+ }
+```
+
+## Step 6: Implementing advanced API endpoints
+
+### Item API router (`src/api/routes/items.py`)
+
+```python
+from typing import List, Optional
+from fastapi import APIRouter, Depends, HTTPException, Query, Path, BackgroundTasks
+from fastapi.responses import JSONResponse
+
+from src.schemas.items import Item, ItemCreate, ItemUpdate, ItemResponse
+from src.helper.pagination import pagination_params, filter_params, PaginationParams, FilterParams, AdvancedPaginator
+from src.helper.exceptions import ResourceNotFoundException, DuplicateResourceException, ValidationException
+from src.utils.responses import success_response, paginated_response, ResponseHelper
+from src.crud.items import ItemCRUD
+
+router = APIRouter(prefix="/items", tags=["items"])
+crud = ItemCRUD()
+
+@router.post("/", response_model=dict, status_code=201)
+async def create_item(
+ item_create: ItemCreate,
+ background_tasks: BackgroundTasks
+) -> JSONResponse:
+ """
+ Create a new item
+
+ - **name**: Item name (required)
+ - **description**: Item description (optional)
+ - **price**: Price (required, 0 or greater)
+ - **category**: Category (optional)
+ """
+ # Check for duplicates
+ existing_item = await crud.get_by_name(item_create.name)
+ if existing_item:
+ raise DuplicateResourceException("Item", "name", item_create.name)
+
+ # Business logic validation
+ if item_create.price < 0:
+ raise ValidationException("Price must be 0 or greater", "price")
+
+ # Create item
+ created_item = await crud.create(item_create)
+
+ # Background tasks (e.g. sending notifications, logging, etc.)
+ background_tasks.add_task(send_creation_notification, created_item.id)
+
+ return ResponseHelper.created(
+ data=created_item.dict(),
+ message=f"Item '{created_item.name}' created successfully"
+ )
+
+@router.get("/", response_model=dict)
+async def list_items(
+ pagination: PaginationParams = Depends(pagination_params),
+ filters: FilterParams = Depends(filter_params)
+) -> JSONResponse:
+ """
+ Get item list (pagination, filtering, sorting supported)
+
+ **Pagination:**
+ - page: Page number (default: 1)
+ - size: Page size (default: 20, maximum: 100)
+
+ **Sorting:**
+ - sort_by: Sort field (name, price, created_at, etc.)
+ - sort_order: Sort order (asc, desc)
+
+ **Filtering:**
+ - search: Search term (search in name or description fields)
+ - category: Category filter
+ - status: Status filter
+ - date_from: Start date (YYYY-MM-DD)
+ - date_to: End date (YYYY-MM-DD)
+ """
+ # Get all items
+ all_items = await crud.get_all()
+
+ # Apply advanced pagination
+ paginator = AdvancedPaginator(all_items, pagination, filters)
+ page_data, total = paginator.get_page()
+
+ # Include additional metadata in response
+ metadata = paginator.get_metadata()
+
+ # Create custom message
+ message = f"Total {total} items, {len(page_data)} items retrieved"
+ if filters.search:
+ message += f" (Search term: '{filters.search}')"
+
+ return paginated_response(
+ data=[item.dict() for item in page_data],
+ page=pagination.page,
+ size=pagination.size,
+ total=total,
+ message=message
+ )
+
+@router.get("/search/advanced", response_model=dict)
+async def advanced_search(
+ q: str = Query(..., min_length=1, description="Search term"),
+ fields: List[str] = Query(["name", "description"], description="Search fields"),
+ exact_match: bool = Query(False, description="Exact match"),
+ case_sensitive: bool = Query(False, description="Case sensitive"),
+ pagination: PaginationParams = Depends(pagination_params)
+) -> JSONResponse:
+ """
+ Advanced search functionality
+
+ - **q**: Search term (required)
+ - **fields**: Search fields list
+ - **exact_match**: Exact match
+ - **case_sensitive**: Case sensitive
+ """
+ results = await crud.advanced_search(
+ query=q,
+ fields=fields,
+ exact_match=exact_match,
+ case_sensitive=case_sensitive
+ )
+
+ # Apply pagination
+ total = len(results)
+ start = (pagination.page - 1) * pagination.size
+ end = start + pagination.size
+ page_data = results[start:end]
+
+ return paginated_response(
+ data=[item.dict() for item in page_data],
+ page=pagination.page,
+ size=pagination.size,
+ total=total,
+ message=f"'{q}' search results: {total} items"
+ )
+
+@router.get("/{item_id}", response_model=dict)
+async def get_item(
+ item_id: int = Path(..., gt=0, description="Item ID")
+) -> JSONResponse:
+ """Get specific item"""
+ item = await crud.get_by_id(item_id)
+ if not item:
+ raise ResourceNotFoundException("Item", item_id)
+
+ return success_response(
+ data=item.dict(),
+ message=f"Item '{item.name}' retrieved successfully"
+ )
+
+@router.put("/{item_id}", response_model=dict)
+async def update_item(
+ item_id: int = Path(..., gt=0, description="Item ID"),
+ item_update: ItemUpdate
+) -> JSONResponse:
+ """Update item"""
+ existing_item = await crud.get_by_id(item_id)
+ if not existing_item:
+ raise ResourceNotFoundException("Item", item_id)
+
+ # Check for duplicate name (with other items)
+ if item_update.name and item_update.name != existing_item.name:
+ duplicate = await crud.get_by_name(item_update.name)
+ if duplicate:
+ raise DuplicateResourceException("Item", "name", item_update.name)
+
+ updated_item = await crud.update(item_id, item_update)
+
+ return ResponseHelper.updated(
+ data=updated_item.dict(),
+ message=f"Item '{updated_item.name}' updated successfully"
+ )
+
+@router.delete("/{item_id}", response_model=dict, status_code=204)
+async def delete_item(
+ item_id: int = Path(..., gt=0, description="Item ID"),
+ force: bool = Query(False, description="Force delete")
+) -> JSONResponse:
+ """Delete item"""
+ item = await crud.get_by_id(item_id)
+ if not item:
+ raise ResourceNotFoundException("Item", item_id)
+
+ # Validation before deletion (e.g. related orders)
+ if not force and await crud.has_related_orders(item_id):
+ raise ValidationException(
+ "Related orders exist, cannot be deleted. Use force=true to force delete"
+ )
+
+ await crud.delete(item_id)
+
+ return ResponseHelper.deleted(
+ message=f"Item '{item.name}' deleted successfully"
+ )
+
+@router.post("/bulk", response_model=dict)
+async def bulk_create_items(
+ items: List[ItemCreate],
+ skip_duplicates: bool = Query(False, description="Skip duplicates")
+) -> JSONResponse:
+ """Bulk create items"""
+ if len(items) > 100:
+ raise ValidationException("Maximum 100 items can be created at once")
+
+ created_items = []
+ skipped_items = []
+ errors = []
+
+ for i, item_create in enumerate(items):
+ try:
+ # Check for duplicates
+ existing = await crud.get_by_name(item_create.name)
+ if existing:
+ if skip_duplicates:
+ skipped_items.append({"index": i, "name": item_create.name, "reason": "Duplicate name"})
+ continue
+ else:
+ errors.append({"index": i, "name": item_create.name, "error": "Duplicate name"})
+ continue
+
+ created_item = await crud.create(item_create)
+ created_items.append(created_item)
+
+ except Exception as e:
+ errors.append({"index": i, "name": item_create.name, "error": str(e)})
+
+ result = {
+ "created_count": len(created_items),
+ "skipped_count": len(skipped_items),
+ "error_count": len(errors),
+ "created_items": [item.dict() for item in created_items],
+ "skipped_items": skipped_items,
+ "errors": errors
+ }
+
+ message = f"{len(created_items)} items created"
+ if skipped_items:
+ message += f", {len(skipped_items)} skipped"
+ if errors:
+ message += f", {len(errors)} errors"
+
+ return success_response(data=result, message=message)
+
+async def send_creation_notification(item_id: int):
+ """Item creation notification (background task)"""
+ # In actual implementation, send notification via email, Slack, etc.
+ import asyncio
+ await asyncio.sleep(1) # Simulation
+ print(f"Item {item_id} creation notification sent")
+```
+
+## Step 7: OpenAPI documentation customization
+
+### OpenAPI documentation customization (`src/utils/documents.py`)
+
+```python
+from fastapi import FastAPI
+from fastapi.openapi.utils import get_openapi
+from typing import Dict, Any
+
+def custom_openapi(app: FastAPI) -> Dict[str, Any]:
+ """Create custom OpenAPI schema"""
+ if app.openapi_schema:
+ return app.openapi_schema
+
+ openapi_schema = get_openapi(
+ title=app.title,
+ version=app.version,
+ description=app.description,
+ routes=app.routes,
+ )
+
+ # Add custom information
+ openapi_schema["info"].update({
+ "contact": {
+ "name": "API Support",
+ "url": "https://example.com/support",
+ "email": "support@example.com"
+ },
+ "license": {
+ "name": "MIT",
+ "url": "https://opensource.org/licenses/MIT"
+ },
+ "termsOfService": "https://example.com/terms"
+ })
+
+ # Add server information
+ openapi_schema["servers"] = [
+ {
+ "url": "https://api.example.com",
+ "description": "Production server"
+ },
+ {
+ "url": "https://staging-api.example.com",
+ "description": "Staging server"
+ },
+ {
+ "url": "http://localhost:8000",
+ "description": "Development server"
+ }
+ ]
+
+ # Add common response schema
+ openapi_schema["components"]["schemas"].update({
+ "SuccessResponse": {
+ "type": "object",
+ "properties": {
+ "success": {"type": "boolean", "example": True},
+ "status": {"type": "string", "example": "success"},
+ "data": {"type": "object"},
+ "message": {"type": "string", "example": "Request processed successfully"},
+ "timestamp": {"type": "string", "format": "date-time"},
+ "request_id": {"type": "string", "example": "123e4567-e89b-12d3-a456-426614174000"}
+ }
+ },
+ "ErrorResponse": {
+ "type": "object",
+ "properties": {
+ "success": {"type": "boolean", "example": False},
+ "status": {"type": "string", "example": "error"},
+ "error": {
+ "type": "object",
+ "properties": {
+ "code": {"type": "string", "example": "RESOURCE_NOT_FOUND"},
+ "message": {"type": "string", "example": "Resource not found"},
+ "details": {"type": "object"}
+ }
+ },
+ "timestamp": {"type": "string", "format": "date-time"},
+ "request_id": {"type": "string", "example": "123e4567-e89b-12d3-a456-426614174000"}
+ }
+ }
+ })
+
+ # Add tag grouping and description
+ openapi_schema["tags"] = [
+ {
+ "name": "items",
+ "description": "Item management API",
+ "externalDocs": {
+ "description": "More information",
+ "url": "https://example.com/docs/items"
+ }
+ },
+ {
+ "name": "health",
+ "description": "System status check API"
+ }
+ ]
+
+ # Add security schema
+ openapi_schema["components"]["securitySchemes"] = {
+ "BearerAuth": {
+ "type": "http",
+ "scheme": "bearer",
+ "bearerFormat": "JWT"
+ },
+ "ApiKeyAuth": {
+ "type": "apiKey",
+ "in": "header",
+ "name": "X-API-Key"
+ }
+ }
+
+ app.openapi_schema = openapi_schema
+ return app.openapi_schema
+
+def setup_docs(app: FastAPI):
+ """Setup documentation"""
+ app.openapi = lambda: custom_openapi(app)
+
+ # Swagger UI setup
+ app.docs_url = "/docs"
+ app.redoc_url = "/redoc"
+
+ # Additional document endpoint
+ @app.get("/openapi.json", include_in_schema=False)
+ async def get_openapi_endpoint():
+ return custom_openapi(app)
+```
+
+### Apply to main application (`src/main.py` addition)
+
+```python
+from src.utils.documents import setup_docs
+from src.api.routes import items
+
+# Include router
+app.include_router(items.router, prefix="/api/v1")
+
+# Apply documentation setup
+setup_docs(app)
+
+# Add request ID middleware
+@app.middleware("http")
+async def add_request_id(request: Request, call_next):
+ request_id = generate_request_id()
+ request.state.request_id = request_id
+
+ response = await call_next(request)
+ response.headers["X-Request-ID"] = request_id
+
+ return response
+```
+
+## Step 8: Implementing caching system
+
+### Response caching (`src/utils/cache.py`)
+
+```python
+from typing import Optional, Any, Dict
+from functools import wraps
+import asyncio
+import json
+import hashlib
+from datetime import datetime, timedelta
+
+class MemoryCache:
+ """Memory-based cache"""
+
+ def __init__(self):
+ self._cache: Dict[str, Dict[str, Any]] = {}
+
+ async def get(self, key: str) -> Optional[Any]:
+ """Get value from cache"""
+ if key not in self._cache:
+ return None
+
+ item = self._cache[key]
+ if datetime.utcnow() > item["expires_at"]:
+ del self._cache[key]
+ return None
+
+ return item["value"]
+
+ async def set(self, key: str, value: Any, ttl_seconds: int = 300):
+ """Save value to cache"""
+ self._cache[key] = {
+ "value": value,
+ "expires_at": datetime.utcnow() + timedelta(seconds=ttl_seconds),
+ "created_at": datetime.utcnow()
+ }
+
+ async def delete(self, key: str):
+ """Delete value from cache"""
+ self._cache.pop(key, None)
+
+ async def clear(self):
+ """Delete all cache"""
+ self._cache.clear()
+
+ def get_stats(self) -> Dict[str, Any]:
+ """Cache statistics"""
+ now = datetime.utcnow()
+ valid_items = [
+ item for item in self._cache.values()
+ if now <= item["expires_at"]
+ ]
+
+ return {
+ "total_items": len(self._cache),
+ "valid_items": len(valid_items),
+ "expired_items": len(self._cache) - len(valid_items),
+ "memory_usage_mb": len(str(self._cache)) / 1024 / 1024
+ }
+
+# Global cache instance
+cache = MemoryCache()
+
+def cache_response(ttl_seconds: int = 300, key_prefix: str = ""):
+ """Response caching decorator"""
+ def decorator(func):
+ @wraps(func)
+ async def wrapper(*args, **kwargs):
+ # Generate cache key
+ cache_key = generate_cache_key(func.__name__, args, kwargs, key_prefix)
+
+ # Get from cache
+ cached_response = await cache.get(cache_key)
+ if cached_response:
+ return cached_response
+
+ # Execute function
+ response = await func(*args, **kwargs)
+
+ # Cache response
+ await cache.set(cache_key, response, ttl_seconds)
+
+ return response
+ return wrapper
+ return decorator
+
+def generate_cache_key(func_name: str, args: tuple, kwargs: dict, prefix: str = "") -> str:
+ """Generate cache key"""
+ # Generate unique key based on function name and arguments
+ key_data = {
+ "function": func_name,
+ "args": str(args),
+ "kwargs": sorted(kwargs.items())
+ }
+
+ key_string = json.dumps(key_data, sort_keys=True)
+ key_hash = hashlib.md5(key_string.encode()).hexdigest()
+
+ return f"{prefix}:{func_name}:{key_hash}" if prefix else f"{func_name}:{key_hash}"
+
+# Cache management endpoint
+@app.get("/admin/cache/stats")
+async def get_cache_stats():
+ """Get cache statistics"""
+ stats = cache.get_stats()
+ return success_response(data=stats, message="Cache statistics retrieved")
+
+@app.delete("/admin/cache/clear")
+async def clear_cache():
+ """Delete all cache"""
+ await cache.clear()
+ return success_response(message="Cache deleted successfully")
+```
+
+### Caching example
+
+```python
+# Apply caching to src/api/routes/items.py
+
+from src.utils.cache import cache_response
+
+@router.get("/", response_model=dict)
+@cache_response(ttl_seconds=60, key_prefix="items_list") # 1 minute caching
+async def list_items(
+ pagination: PaginationParams = Depends(pagination_params),
+ filters: FilterParams = Depends(filter_params)
+) -> JSONResponse:
+ # ... existing code ...
+
+@router.get("/{item_id}", response_model=dict)
+@cache_response(ttl_seconds=300, key_prefix="item_detail") # 5 minute caching
+async def get_item(item_id: int = Path(..., gt=0)) -> JSONResponse:
+ # ... existing code ...
+```
+
+## Step 9: API test
+
+### Run server and basic test
+
+
+
+```console
+$ cd advanced-api-server
+$ fastkit runserver
+Starting FastAPI server at 127.0.0.1:8000...
+
+# Custom response format test
+$ curl -X POST "http://localhost:8000/api/v1/items/" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "name": "Advanced notebook",
+ "description": "Notebook with latest technology",
+ "price": 2500000,
+ "category": "electronics"
+ }'
+
+{
+ "success": true,
+ "status": "success",
+ "data": {
+ "id": 1,
+ "name": "Advanced notebook",
+ "description": "Notebook with latest technology",
+ "price": 2500000,
+ "category": "electronics",
+ "created_at": "2024-01-01T12:00:00Z"
+ },
+ "message": "Item 'Advanced notebook' created successfully",
+ "timestamp": "2024-01-01T12:00:00.123456Z",
+ "request_id": "123e4567-e89b-12d3-a456-426614174000"
+}
+
+# Pagination and filtering test
+$ curl "http://localhost:8000/api/v1/items/?page=1&size=10&search=notebook&sort_by=price&sort_order=desc"
+
+# Advanced search test
+$ curl "http://localhost:8000/api/v1/items/search/advanced?q=notebook&fields=name&fields=description&exact_match=false"
+
+# Error response test
+$ curl "http://localhost:8000/api/v1/items/999"
+
+{
+ "success": false,
+ "status": "error",
+ "error": {
+ "code": "RESOURCE_NOT_FOUND",
+ "message": "Item (ID: 999) not found",
+ "details": {
+ "resource": "Item",
+ "id": 999
+ }
+ },
+ "timestamp": "2024-01-01T12:00:00.123456Z",
+ "request_id": "123e4567-e89b-12d3-a456-426614174000"
+}
+```
+
+
+
+### OpenAPI document check
+
+Browse to http://localhost:8000/docs to view the customized API document.
+
+## Next Steps
+
+You've completed the custom response handling system! Next things to try:
+
+1. **[MCP Integration](mcp-integration.md)** - Implementing Model Context Protocol
+
+
+
+
+## Summary
+
+In this tutorial, we implemented an advanced response handling system:
+
+- ✅ Designed standardized API response formats
+- ✅ Global exception handling and custom error responses
+- ✅ Advanced pagination and filtering systems
+- ✅ OpenAPI documentation customization
+- ✅ Response caching and performance optimization
+- ✅ Request tracking system
+- ✅ Background task processing
+- ✅ Batch operation APIs
+
+Now you can implement all the core features of enterprise-grade API servers!
diff --git a/docs/tutorial/database-integration.md b/docs/tutorial/database-integration.md
new file mode 100644
index 0000000..9afb0bb
--- /dev/null
+++ b/docs/tutorial/database-integration.md
@@ -0,0 +1,1027 @@
+# Database Integration (PostgreSQL + SQLAlchemy)
+
+Build a FastAPI application using PostgreSQL database and SQLAlchemy ORM that can be used in a real production environment. In this tutorial, we'll implement a complete database integration system using the `fastapi-psql-orm` template.
+
+## What You'll Learn in This Tutorial
+
+- PostgreSQL database setup and integration
+- Data modeling with SQLAlchemy ORM
+- Database migrations using Alembic
+- Development environment setup with Docker Compose
+- Database connection pool management
+- Transaction processing and data integrity
+
+## Prerequisites
+
+- Completed the [Asynchronous CRUD API Tutorial](async-crud-api.md)
+- Docker and Docker Compose installed
+- Basic PostgreSQL knowledge
+- Understanding of SQLAlchemy ORM basic concepts
+
+## Why PostgreSQL and SQLAlchemy?
+
+### JSON Files vs PostgreSQL Comparison
+
+| Category | JSON Files | PostgreSQL |
+|----------|------------|------------|
+| **Performance** | Limited | High-performance indexing |
+| **Concurrency** | File locking issues | Transaction support |
+| **Scalability** | Memory limited | Large-scale data processing |
+| **Integrity** | Not guaranteed | ACID properties guaranteed |
+| **Queries** | Need to load all data | Complex query support |
+| **Backup** | File copying | Complete backup/recovery |
+
+## Step 1: Creating PostgreSQL + ORM Project
+
+Create a project using the `fastapi-psql-orm` template:
+
+
+
+```console
+$ fastkit startdemo fastapi-psql-orm
+Enter the project name: todo-postgres-api
+Enter the author name: Developer Kim
+Enter the author email: developer@example.com
+Enter the project description: Todo management API using PostgreSQL
+Deploying FastAPI project using 'fastapi-psql-orm' template
+
+ Project Information
+┌──────────────┬─────────────────────────────────────────┐
+│ Project Name │ todo-postgres-api │
+│ Author │ Developer Kim │
+│ Author Email │ developer@example.com │
+│ Description │ Todo management API using PostgreSQL │
+└──────────────┴─────────────────────────────────────────┘
+
+ Template Dependencies
+┌──────────────┬────────────────┐
+│ Dependency 1 │ fastapi │
+│ Dependency 2 │ uvicorn │
+│ Dependency 3 │ sqlalchemy │
+│ Dependency 4 │ alembic │
+│ Dependency 5 │ psycopg2 │
+│ Dependency 6 │ asyncpg │
+│ Dependency 7 │ sqlmodel │
+└──────────────┴────────────────┘
+
+Select package manager (pip, uv, pdm, poetry) [uv]: uv
+Do you want to proceed with project creation? [y/N]: y
+
+✨ FastAPI project 'todo-postgres-api' from 'fastapi-psql-orm' has been created successfully!
+```
+
+
+
+## Step 2: Analyzing Project Structure
+
+The generated project provides a complete database integration environment:
+
+```
+todo-postgres-api/
+├── docker-compose.yml # PostgreSQL container configuration
+├── Dockerfile # Application container
+├── alembic.ini # Alembic configuration
+├── template-config.yml # Template configuration
+├── scripts/
+│ ├── pre-start.sh # Pre-start initialization
+│ └── test.sh # Test execution script
+├── src/
+│ ├── main.py # FastAPI application
+│ ├── core/
+│ │ ├── config.py # Environment configuration
+│ │ └── db.py # Database connection setup
+│ ├── api/
+│ │ ├── deps.py # Dependency injection
+│ │ └── routes/
+│ │ └── items.py # API endpoints
+│ ├── crud/
+│ │ └── items.py # Database operations
+│ ├── schemas/
+│ │ └── items.py # Pydantic models
+│ ├── utils/
+│ │ ├── backend_pre_start.py # Backend initialization
+│ │ ├── init_data.py # Initial data loading
+│ │ └── tests_pre_start.py # Test preparation
+│ └── alembic/
+│ ├── env.py # Alembic environment configuration
+│ └── versions/ # Migration files
+└── tests/
+ ├── conftest.py # Test configuration
+ └── test_items.py # API tests
+```
+
+### Core Components
+
+1. **SQLModel**: SQLAlchemy + Pydantic integration
+2. **Alembic**: Database schema migration
+3. **asyncpg**: Asynchronous PostgreSQL driver
+4. **Docker Compose**: Development environment containerization
+
+## Step 3: Understanding Database Configuration
+
+### Database Connection Setup (`src/core/db.py`)
+
+```python
+from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
+from sqlalchemy.orm import sessionmaker
+from sqlmodel import SQLModel
+
+from src.core.config import settings
+
+# Create asynchronous PostgreSQL engine
+engine = create_async_engine(
+ settings.DATABASE_URL,
+ echo=settings.DEBUG, # Output SQL logs
+ pool_size=20, # Connection pool size
+ max_overflow=0, # Number of additional connections allowed
+ pool_pre_ping=True, # Check connection status
+)
+
+# Asynchronous session factory
+AsyncSessionLocal = sessionmaker(
+ autocommit=False,
+ autoflush=False,
+ bind=engine,
+ class_=AsyncSession,
+ expire_on_commit=False,
+)
+
+async def create_tables():
+ """Create database tables"""
+ async with engine.begin() as conn:
+ await conn.run_sync(SQLModel.metadata.create_all)
+
+async def get_session() -> AsyncSession:
+ """Provide database session (for dependency injection)"""
+ async with AsyncSessionLocal() as session:
+ try:
+ yield session
+ finally:
+ await session.close()
+```
+
+### Environment Configuration (`src/core/config.py`)
+
+```python
+from pydantic_settings import BaseSettings
+from typing import Optional
+
+class Settings(BaseSettings):
+ PROJECT_NAME: str = "Todo PostgreSQL API"
+ VERSION: str = "1.0.0"
+ DESCRIPTION: str = "Todo management API using PostgreSQL"
+
+ # Database configuration
+ POSTGRES_SERVER: str = "localhost"
+ POSTGRES_USER: str = "postgres"
+ POSTGRES_PASSWORD: str = "password"
+ POSTGRES_DB: str = "todoapp"
+ POSTGRES_PORT: int = 5432
+
+ # Test database
+ TEST_DATABASE_URL: Optional[str] = None
+
+ # Debug mode
+ DEBUG: bool = False
+
+ @property
+ def DATABASE_URL(self) -> str:
+ """Generate PostgreSQL connection URL"""
+ return (
+ f"postgresql+asyncpg://{self.POSTGRES_USER}:"
+ f"{self.POSTGRES_PASSWORD}@{self.POSTGRES_SERVER}:"
+ f"{self.POSTGRES_PORT}/{self.POSTGRES_DB}"
+ )
+
+ class Config:
+ env_file = ".env"
+
+settings = Settings()
+```
+
+## Step 4: Define data model
+
+### Data model using SQLModel (`src/schemas/items.py`)
+
+```python
+from sqlmodel import SQLModel, Field
+from typing import Optional
+from datetime import datetime
+
+# Define common fields
+class ItemBase(SQLModel):
+ name: str = Field(index=True, max_length=100)
+ description: Optional[str] = Field(default=None, max_length=500)
+ price: float = Field(gt=0, description="Price must be greater than 0")
+ tax: Optional[float] = Field(default=None, ge=0)
+ is_active: bool = Field(default=True)
+
+# Database table model
+class Item(ItemBase, table=True):
+ __tablename__ = "items"
+
+ id: Optional[int] = Field(default=None, primary_key=True)
+ created_at: datetime = Field(default_factory=datetime.utcnow)
+ updated_at: Optional[datetime] = Field(default=None)
+
+ # Set index
+ class Config:
+ schema_extra = {
+ "example": {
+ "name": "notebook",
+ "description": "High-performance gaming notebook",
+ "price": 1500000.0,
+ "tax": 150000.0,
+ "is_active": True
+ }
+ }
+
+# API request/response model
+class ItemCreate(ItemBase):
+ pass
+
+class ItemUpdate(SQLModel):
+ name: Optional[str] = Field(default=None, max_length=100)
+ description: Optional[str] = Field(default=None, max_length=500)
+ price: Optional[float] = Field(default=None, gt=0)
+ tax: Optional[float] = Field(default=None, ge=0)
+ is_active: Optional[bool] = Field(default=None)
+
+class ItemResponse(ItemBase):
+ id: int
+ created_at: datetime
+ updated_at: Optional[datetime]
+```
+
+## Step 5: Implement CRUD operations
+
+### Database CRUD logic (`src/crud/items.py`)
+
+```python
+from typing import List, Optional
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select, update, delete
+from sqlalchemy.orm import selectinload
+from datetime import datetime
+
+from src.schemas.items import Item, ItemCreate, ItemUpdate
+
+class ItemCRUD:
+ def __init__(self, db: AsyncSession):
+ self.db = db
+
+ async def create(self, item_create: ItemCreate) -> Item:
+ """Create new item"""
+ db_item = Item(**item_create.dict())
+
+ self.db.add(db_item)
+ await self.db.commit()
+ await self.db.refresh(db_item)
+
+ return db_item
+
+ async def get_by_id(self, item_id: int) -> Optional[Item]:
+ """Get item by ID"""
+ statement = select(Item).where(Item.id == item_id)
+ result = await self.db.execute(statement)
+ return result.scalar_one_or_none()
+
+ async def get_many(
+ self,
+ skip: int = 0,
+ limit: int = 100,
+ active_only: bool = True
+ ) -> List[Item]:
+ """Get multiple items (pagination supported)"""
+ statement = select(Item)
+
+ if active_only:
+ statement = statement.where(Item.is_active == True)
+
+ statement = statement.offset(skip).limit(limit)
+ result = await self.db.execute(statement)
+ return result.scalars().all()
+
+ async def update(self, item_id: int, item_update: ItemUpdate) -> Optional[Item]:
+ """Update item"""
+ # Prepare update data
+ update_data = item_update.dict(exclude_unset=True)
+ if update_data:
+ update_data["updated_at"] = datetime.utcnow()
+
+ # Execute update
+ statement = (
+ update(Item)
+ .where(Item.id == item_id)
+ .values(**update_data)
+ .returning(Item)
+ )
+
+ result = await self.db.execute(statement)
+ await self.db.commit()
+
+ return result.scalar_one_or_none()
+
+ async def delete(self, item_id: int) -> bool:
+ """Delete item (soft delete)"""
+ statement = (
+ update(Item)
+ .where(Item.id == item_id)
+ .values(is_active=False, updated_at=datetime.utcnow())
+ )
+
+ result = await self.db.execute(statement)
+ await self.db.commit()
+
+ return result.rowcount > 0
+
+ async def hard_delete(self, item_id: int) -> bool:
+ """Delete item completely"""
+ statement = delete(Item).where(Item.id == item_id)
+ result = await self.db.execute(statement)
+ await self.db.commit()
+
+ return result.rowcount > 0
+
+ async def search(self, query: str) -> List[Item]:
+ """Search item (name, description)"""
+ statement = select(Item).where(
+ (Item.name.ilike(f"%{query}%")) |
+ (Item.description.ilike(f"%{query}%"))
+ ).where(Item.is_active == True)
+
+ result = await self.db.execute(statement)
+ return result.scalars().all()
+
+ async def get_total_count(self, active_only: bool = True) -> int:
+ """Get total item count"""
+ from sqlalchemy import func
+
+ statement = select(func.count(Item.id))
+ if active_only:
+ statement = statement.where(Item.is_active == True)
+
+ result = await self.db.execute(statement)
+ return result.scalar()
+```
+
+## Step 6: Implement API endpoints
+
+### Dependency injection setup (`src/api/deps.py`)
+
+```python
+from typing import AsyncGenerator
+from fastapi import Depends
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from src.core.db import get_session
+from src.crud.items import ItemCRUD
+
+async def get_db() -> AsyncGenerator[AsyncSession, None]:
+ """Database session dependency"""
+ async for session in get_session():
+ yield session
+
+def get_item_crud(db: AsyncSession = Depends(get_db)) -> ItemCRUD:
+ """Item CRUD dependency"""
+ return ItemCRUD(db)
+```
+
+### API router implementation (`src/api/routes/items.py`)
+
+```python
+from typing import List
+from fastapi import APIRouter, Depends, HTTPException, Query, status
+
+from src.api.deps import get_item_crud
+from src.crud.items import ItemCRUD
+from src.schemas.items import Item, ItemCreate, ItemUpdate, ItemResponse
+
+router = APIRouter()
+
+@router.post("/", response_model=ItemResponse, status_code=status.HTTP_201_CREATED)
+async def create_item(
+ item_create: ItemCreate,
+ crud: ItemCRUD = Depends(get_item_crud)
+):
+ """Create new item"""
+ return await crud.create(item_create)
+
+@router.get("/", response_model=List[ItemResponse])
+async def read_items(
+ skip: int = Query(0, ge=0, description="Skip items"),
+ limit: int = Query(100, ge=1, le=1000, description="Maximum items to retrieve"),
+ active_only: bool = Query(True, description="Only active items"),
+ crud: ItemCRUD = Depends(get_item_crud)
+):
+ """Get item list (pagination supported)"""
+ return await crud.get_many(skip=skip, limit=limit, active_only=active_only)
+
+@router.get("/search", response_model=List[ItemResponse])
+async def search_items(
+ q: str = Query(..., min_length=1, description="Search term"),
+ crud: ItemCRUD = Depends(get_item_crud)
+):
+ """Search item"""
+ return await crud.search(q)
+
+@router.get("/count")
+async def get_items_count(
+ active_only: bool = Query(True, description="Only active items"),
+ crud: ItemCRUD = Depends(get_item_crud)
+):
+ """Get total item count"""
+ count = await crud.get_total_count(active_only)
+ return {"total": count}
+
+@router.get("/{item_id}", response_model=ItemResponse)
+async def read_item(
+ item_id: int,
+ crud: ItemCRUD = Depends(get_item_crud)
+):
+ """Get specific item"""
+ item = await crud.get_by_id(item_id)
+ if not item:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Item ID {item_id} not found"
+ )
+ return item
+
+@router.put("/{item_id}", response_model=ItemResponse)
+async def update_item(
+ item_id: int,
+ item_update: ItemUpdate,
+ crud: ItemCRUD = Depends(get_item_crud)
+):
+ """Update item"""
+ updated_item = await crud.update(item_id, item_update)
+ if not updated_item:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Item ID {item_id} not found"
+ )
+ return updated_item
+
+@router.delete("/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
+async def delete_item(
+ item_id: int,
+ hard_delete: bool = Query(False, description="Complete delete"),
+ crud: ItemCRUD = Depends(get_item_crud)
+):
+ """Delete item"""
+ if hard_delete:
+ deleted = await crud.hard_delete(item_id)
+ else:
+ deleted = await crud.delete(item_id)
+
+ if not deleted:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Item ID {item_id} not found"
+ )
+```
+
+## Step 7: Run Docker container
+
+### Check Docker Compose setup (`docker-compose.yml`)
+
+```yaml
+version: '3.8'
+
+services:
+ db:
+ image: postgres:15
+ restart: always
+ environment:
+ POSTGRES_DB: todoapp
+ POSTGRES_USER: postgres
+ POSTGRES_PASSWORD: password
+ ports:
+ - "5432:5432"
+ volumes:
+ - postgres_data:/var/lib/postgresql/data
+
+ app:
+ build: .
+ restart: always
+ ports:
+ - "8000:8000"
+ environment:
+ POSTGRES_SERVER: db
+ POSTGRES_USER: postgres
+ POSTGRES_PASSWORD: password
+ POSTGRES_DB: todoapp
+ depends_on:
+ - db
+ volumes:
+ - ./src:/app/src
+
+volumes:
+ postgres_data:
+```
+
+### Run container
+
+
+
+```console
+$ cd todo-postgres-api
+
+# Start service in background
+$ docker-compose up -d
+Creating network "todo-postgres-api_default" with the default driver
+Creating volume "todo-postgres-api_postgres_data" with default driver
+Pulling db (postgres:15)...
+Creating todo-postgres-api_db_1 ... done
+Building app
+Creating todo-postgres-api_app_1 ... done
+
+# Check service status
+$ docker-compose ps
+ Name Command State Ports
+-------------------------------------------------------------------------------------
+todo-postgres-api_app_1 uvicorn src.main:app --host=0.0.0.0 --port=8000 Up 0.0.0.0:8000->8000/tcp
+todo-postgres-api_db_1 docker-entrypoint.sh postgres Up 0.0.0.0:5432->5432/tcp
+
+# Check log
+$ docker-compose logs app
+```
+
+
+
+## Step 8: Database migration
+
+### Create initial migration using Alembic
+
+
+
+```console
+# Run migration inside container
+$ docker-compose exec app alembic revision --autogenerate -m "Create items table"
+INFO [alembic.runtime.migration] Context impl PostgresqlImpl.
+INFO [alembic.runtime.migration] Will assume transactional DDL.
+INFO [alembic.autogenerate.compare] Detected added table 'items'
+Generating migration script /app/src/alembic/versions/001_create_items_table.py ... done
+
+# Apply migration
+$ docker-compose exec app alembic upgrade head
+INFO [alembic.runtime.migration] Context impl PostgresqlImpl.
+INFO [alembic.runtime.migration] Will assume transactional DDL.
+INFO [alembic.runtime.migration] Running upgrade -> 001, Create items table
+```
+
+
+
+### Check migration file
+
+Check the created migration file:
+
+```python
+# src/alembic/versions/001_create_items_table.py
+"""Create items table
+
+Revision ID: 001
+Revises:
+Create Date: 2024-01-01 12:00:00.000000
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel
+
+# revision identifiers
+revision = '001'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('items',
+ sa.Column('name', sqlmodel.sql.sqltypes.AutoString(length=100), nullable=False),
+ sa.Column('description', sqlmodel.sql.sqltypes.AutoString(length=500), nullable=True),
+ sa.Column('price', sa.Float(), nullable=False),
+ sa.Column('tax', sa.Float(), nullable=True),
+ sa.Column('is_active', sa.Boolean(), nullable=False),
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('created_at', sa.DateTime(), nullable=False),
+ sa.Column('updated_at', sa.DateTime(), nullable=True),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_index(op.f('ix_items_name'), 'items', ['name'], unique=False)
+ # ### end Alembic commands ###
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_index(op.f('ix_items_name'), table_name='items')
+ op.drop_table('items')
+ # ### end Alembic commands ###
+```
+
+## Step 9: API test
+
+### Basic CRUD test
+
+
+
+```console
+# Create new item
+$ curl -X POST "http://localhost:8000/items/" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "name": "MacBook Pro",
+ "description": "M2 chipset-equipped high-performance notebook",
+ "price": 2500000,
+ "tax": 250000
+ }'
+
+{
+ "id": 1,
+ "name": "MacBook Pro",
+ "description": "M2 chipset-equipped high-performance notebook",
+ "price": 2500000.0,
+ "tax": 250000.0,
+ "is_active": true,
+ "created_at": "2024-01-01T12:00:00.123456",
+ "updated_at": null
+}
+
+# Get item list
+$ curl "http://localhost:8000/items/"
+
+# Get item list with pagination
+$ curl "http://localhost:8000/items/?skip=0&limit=10"
+
+# Search item
+$ curl "http://localhost:8000/items/search?q=MacBook"
+
+# Get item count
+$ curl "http://localhost:8000/items/count"
+{"total": 1}
+```
+
+
+
+### Advanced query feature test
+
+
+
+```console
+# Get item list with inactive items
+$ curl "http://localhost:8000/items/?active_only=false"
+
+# Update item
+$ curl -X PUT "http://localhost:8000/items/1" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "price": 2300000,
+ "tax": 230000
+ }'
+
+# Soft delete item
+$ curl -X DELETE "http://localhost:8000/items/1"
+
+# Hard delete item
+$ curl -X DELETE "http://localhost:8000/items/1?hard_delete=true"
+```
+
+
+
+## Step 10: Advanced database features
+
+### Transaction processing
+
+```python
+# Add to src/crud/items.py
+
+from sqlalchemy.exc import SQLAlchemyError
+
+async def create_items_batch(self, items_create: List[ItemCreate]) -> List[Item]:
+ """Create multiple items in a transaction"""
+ created_items = []
+
+ try:
+ for item_create in items_create:
+ db_item = Item(**item_create.dict())
+ self.db.add(db_item)
+ created_items.append(db_item)
+
+ await self.db.commit()
+
+ # Refresh all items
+ for item in created_items:
+ await self.db.refresh(item)
+
+ return created_items
+
+ except SQLAlchemyError:
+ await self.db.rollback()
+ raise
+```
+
+### Relational data modeling
+
+```python
+# Add to src/schemas/items.py
+
+from sqlmodel import Relationship
+
+class Category(SQLModel, table=True):
+ __tablename__ = "categories"
+
+ id: Optional[int] = Field(default=None, primary_key=True)
+ name: str = Field(max_length=50, unique=True)
+ description: Optional[str] = None
+
+ # Set relationship
+ items: List["Item"] = Relationship(back_populates="category")
+
+class Item(ItemBase, table=True):
+ __tablename__ = "items"
+
+ id: Optional[int] = Field(default=None, primary_key=True)
+ created_at: datetime = Field(default_factory=datetime.utcnow)
+ updated_at: Optional[datetime] = Field(default=None)
+
+ # Add foreign key
+ category_id: Optional[int] = Field(foreign_key="categories.id")
+
+ # Set relationship
+ category: Optional[Category] = Relationship(back_populates="items")
+```
+
+### Index optimization
+
+```python
+# Add to src/schemas/items.py
+
+from sqlalchemy import Index
+
+class Item(ItemBase, table=True):
+ __tablename__ = "items"
+
+ # ... existing fields ...
+
+ # Set composite index
+ __table_args__ = (
+ Index('ix_items_price_active', 'price', 'is_active'),
+ Index('ix_items_created_at', 'created_at'),
+ Index('ix_items_name_description', 'name', 'description'), # For full text search
+ )
+```
+
+## Step 11: Write tests
+
+### Database test setup (`tests/conftest.py`)
+
+```python
+import pytest
+import asyncio
+from httpx import AsyncClient
+from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
+from sqlalchemy.orm import sessionmaker
+from sqlmodel import SQLModel
+
+from src.main import app
+from src.core.db import get_session
+from src.core.config import settings
+
+# Test database engine
+test_engine = create_async_engine(
+ settings.TEST_DATABASE_URL or "sqlite+aiosqlite:///./test.db",
+ echo=False,
+)
+
+TestSessionLocal = sessionmaker(
+ autocommit=False,
+ autoflush=False,
+ bind=test_engine,
+ class_=AsyncSession,
+ expire_on_commit=False,
+)
+
+@pytest.fixture(scope="session")
+def event_loop():
+ loop = asyncio.get_event_loop_policy().new_event_loop()
+ yield loop
+ loop.close()
+
+@pytest.fixture(scope="function")
+async def db_session():
+ # Create test table
+ async with test_engine.begin() as conn:
+ await conn.run_sync(SQLModel.metadata.create_all)
+
+ # Provide session
+ async with TestSessionLocal() as session:
+ yield session
+
+ # Delete table after test
+ async with test_engine.begin() as conn:
+ await conn.run_sync(SQLModel.metadata.drop_all)
+
+@pytest.fixture
+async def client(db_session: AsyncSession):
+ # Override dependency
+ async def override_get_session():
+ yield db_session
+
+ app.dependency_overrides[get_session] = override_get_session
+
+ async with AsyncClient(app=app, base_url="http://test") as client:
+ yield client
+
+ app.dependency_overrides.clear()
+```
+
+### Integration test (`tests/test_items.py`)
+
+```python
+import pytest
+from httpx import AsyncClient
+
+@pytest.mark.asyncio
+async def test_create_and_read_item(client: AsyncClient):
+ """Integration test for creating and reading item"""
+ # Create item
+ item_data = {
+ "name": "Test Item",
+ "description": "Database test",
+ "price": 50000,
+ "tax": 5000
+ }
+
+ response = await client.post("/items/", json=item_data)
+ assert response.status_code == 201
+
+ created_item = response.json()
+ assert created_item["name"] == item_data["name"]
+ assert "id" in created_item
+ assert "created_at" in created_item
+
+ # Get created item
+ item_id = created_item["id"]
+ response = await client.get(f"/items/{item_id}")
+ assert response.status_code == 200
+
+ retrieved_item = response.json()
+ assert retrieved_item["id"] == item_id
+ assert retrieved_item["name"] == item_data["name"]
+
+@pytest.mark.asyncio
+async def test_item_pagination(client: AsyncClient):
+ """Test pagination feature"""
+ # Create multiple items
+ for i in range(15):
+ item_data = {
+ "name": f"Item {i}",
+ "description": f"Description {i}",
+ "price": i * 1000,
+ "tax": i * 100
+ }
+ await client.post("/items/", json=item_data)
+
+ # Get first page
+ response = await client.get("/items/?skip=0&limit=10")
+ assert response.status_code == 200
+
+ items = response.json()
+ assert len(items) == 10
+
+ # Get second page
+ response = await client.get("/items/?skip=10&limit=10")
+ assert response.status_code == 200
+
+ items = response.json()
+ assert len(items) == 5
+
+@pytest.mark.asyncio
+async def test_item_search(client: AsyncClient):
+ """Test search feature"""
+ # Create test items
+ items = [
+ {"name": "iPhone 15", "description": "Latest smartphone", "price": 1200000, "tax": 120000},
+ {"name": "Galaxy S24", "description": "Samsung flagship", "price": 1100000, "tax": 110000},
+ {"name": "MacBook Air", "description": "Apple notebook", "price": 1500000, "tax": 150000},
+ ]
+
+ for item in items:
+ await client.post("/items/", json=item)
+
+ # Search "iPhone"
+ response = await client.get("/items/search?q=iPhone")
+ assert response.status_code == 200
+
+ results = response.json()
+ assert len(results) == 1
+ assert results[0]["name"] == "iPhone 15"
+
+ # Search "smartphone" (description)
+ response = await client.get("/items/search?q=smartphone")
+ assert response.status_code == 200
+
+ results = response.json()
+ assert len(results) == 1
+ assert results[0]["description"] == "Latest smartphone"
+```
+
+### Run tests
+
+
+
+```console
+# Run tests inside container
+$ docker-compose exec app python -m pytest tests/ -v
+======================== test session starts ========================
+collected 12 items
+
+tests/test_items.py::test_create_and_read_item PASSED [ 8%]
+tests/test_items.py::test_item_pagination PASSED [16%]
+tests/test_items.py::test_item_search PASSED [25%]
+tests/test_items.py::test_update_item PASSED [33%]
+tests/test_items.py::test_delete_item PASSED [41%]
+tests/test_items.py::test_soft_delete PASSED [50%]
+tests/test_items.py::test_item_not_found PASSED [58%]
+tests/test_items.py::test_invalid_item_data PASSED [66%]
+tests/test_items.py::test_database_transaction PASSED [75%]
+tests/test_items.py::test_concurrent_operations PASSED [83%]
+tests/test_items.py::test_item_count PASSED [91%]
+tests/test_items.py::test_batch_operations PASSED [100%]
+
+======================== 12 passed in 2.34s ========================
+```
+
+
+
+## Step 12: Considerations for production deployment
+
+### Optimize connection pool
+
+```python
+# Add to src/core/config.py
+
+class Settings(BaseSettings):
+ # ... existing settings ...
+
+ # Database connection pool settings
+ DB_POOL_SIZE: int = 20
+ DB_MAX_OVERFLOW: int = 0
+ DB_POOL_PRE_PING: bool = True
+ DB_POOL_RECYCLE: int = 300 # 5 minutes
+
+ # Query timeout
+ DB_QUERY_TIMEOUT: int = 30
+
+ # Connection retry settings
+ DB_RETRY_ATTEMPTS: int = 3
+ DB_RETRY_DELAY: int = 1
+```
+
+### Database monitoring
+
+```python
+# Add to src/core/db.py
+
+import logging
+from sqlalchemy import event
+from sqlalchemy.engine import Engine
+
+logger = logging.getLogger(__name__)
+
+@event.listens_for(Engine, "before_cursor_execute")
+def receive_before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
+ """Log before query execution"""
+ context._query_start_time = time.time()
+
+@event.listens_for(Engine, "after_cursor_execute")
+def receive_after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
+ """Log after query execution"""
+ total = time.time() - context._query_start_time
+ if total > 1.0: # Log slow queries (1 second or more)
+ logger.warning(f"Slow query: {total:.2f}s - {statement[:100]}...")
+```
+
+## Next Steps
+
+You've completed PostgreSQL database integration! Next things to try:
+
+1. **[Docker Containerization](docker-deployment.md)** - Building production deployment environment
+2. **[Custom Response Handling](custom-response-handling.md)** - Advanced API response formats
+
+
+
+## Summary
+
+In this tutorial, we used PostgreSQL and SQLAlchemy to:
+
+- ✅ Integrate PostgreSQL database
+- ✅ Implement ORM using SQLModel
+- ✅ Set up Alembic migration system
+- ✅ Advanced CRUD operations and query optimization
+- ✅ Transaction processing and data integrity
+- ✅ Pagination, search, and sorting features
+- ✅ Integration tests and database testing
+- ✅ Production deployment considerations
+
+Now you can build robust database-driven APIs that can be used in real production environments!
diff --git a/docs/tutorial/docker-deployment.md b/docs/tutorial/docker-deployment.md
new file mode 100644
index 0000000..04d872c
--- /dev/null
+++ b/docs/tutorial/docker-deployment.md
@@ -0,0 +1,1177 @@
+# Docker Containerization and Deployment
+
+Learn how to containerize FastAPI applications with Docker to build consistent development environments and prepare for production deployment. We'll set up a complete Docker-based deployment environment using the `fastapi-dockerized` template.
+
+## What You'll Learn in This Tutorial
+
+- Containerizing FastAPI applications with Docker
+- Creating optimized Docker images with multi-stage builds
+- Setting up development environments with Docker Compose
+- Docker configuration for production deployment
+- Container monitoring and log management
+- Building CI/CD pipelines
+
+## Prerequisites
+
+- Completed the [Database Integration Tutorial](database-integration.md)
+- Docker and Docker Compose installed
+- Understanding of basic Docker commands
+- Basic knowledge of container concepts
+
+## Advantages of Docker Containerization
+
+### Traditional vs Docker Approach
+
+| Category | Traditional Approach | Docker Approach |
+|----------|---------------------|-----------------|
+| **Environment Consistency** | Differences between environments | Same environment everywhere |
+| **Dependency Management** | Manual installation required | All dependencies included in image |
+| **Deployment Speed** | Slow | Fast deployment possible |
+| **Scalability** | Limited | Easy scaling |
+| **Rollback** | Complex | Immediate rollback to previous version |
+| **Resource Usage** | Heavy | Lightweight containers |
+
+## Step 1: Creating Docker-based Project
+
+Create a project using the `fastapi-dockerized` template:
+
+
+
+```console
+$ fastkit startdemo fastapi-dockerized
+Enter the project name: dockerized-todo-api
+Enter the author name: Developer Kim
+Enter the author email: developer@example.com
+Enter the project description: Dockerized todo management API
+Deploying FastAPI project using 'fastapi-dockerized' template
+
+ Project Information
+┌──────────────┬─────────────────────────────────────────────┐
+│ Project Name │ dockerized-todo-api │
+│ Author │ Developer Kim │
+│ Author Email │ developer@example.com │
+│ Description │ Dockerized todo management API │
+└──────────────┴─────────────────────────────────────────────┘
+
+ Template Dependencies
+┌──────────────┬───────────────────┐
+│ Dependency 1 │ fastapi │
+│ Dependency 2 │ uvicorn │
+│ Dependency 3 │ pydantic │
+│ Dependency 4 │ pydantic-settings │
+│ Dependency 5 │ python-dotenv │
+└──────────────┴───────────────────┘
+
+Select package manager (pip, uv, pdm, poetry) [uv]: uv
+Do you want to proceed with project creation? [y/N]: y
+
+✨ FastAPI project 'dockerized-todo-api' from 'fastapi-dockerized' has been created successfully!
+```
+
+
+
+## Step 2: Analyzing Docker Configuration Files
+
+Let's examine the Docker-related files in the generated project:
+
+```
+dockerized-todo-api/
+├── Dockerfile # Docker image build configuration
+├── docker-compose.yml # Development environment container setup
+├── docker-compose.prod.yml # Production environment configuration
+├── .dockerignore # Files to exclude during Docker build
+├── scripts/
+│ ├── start.sh # Container startup script
+│ ├── prestart.sh # Pre-start initialization script
+│ └── gunicorn.conf.py # Gunicorn configuration
+├── src/
+│ ├── main.py # FastAPI application
+│ └── ... # Other source code
+└── requirements.txt # Python dependencies
+```
+
+### Dockerfile Analysis
+
+```dockerfile
+# Optimized Dockerfile using multi-stage build
+
+# ============================================
+# Stage 1: Build stage
+# ============================================
+FROM python:3.12-slim as builder
+
+# Install build tools
+RUN apt-get update && apt-get install -y \
+ build-essential \
+ curl \
+ && rm -rf /var/lib/apt/lists/*
+
+# Copy dependency file and install
+COPY requirements.txt .
+RUN pip install --user --no-cache-dir -r requirements.txt
+
+# ============================================
+# Stage 2: Runtime stage
+# ============================================
+FROM python:3.12-slim
+
+# System update and essential package installation
+RUN apt-get update && apt-get install -y \
+ curl \
+ && rm -rf /var/lib/apt/lists/* \
+ && apt-get clean
+
+# Create non-root user (security enhancement)
+RUN groupadd -r appuser && useradd -r -g appuser appuser
+
+# Create application directory
+WORKDIR /app
+
+# Copy Python packages from build stage
+COPY --from=builder /root/.local /home/appuser/.local
+
+# Copy application code
+COPY . .
+
+# Set file permissions
+RUN chown -R appuser:appuser /app
+RUN chmod +x scripts/start.sh scripts/prestart.sh
+
+# Add Python package path to PATH
+ENV PATH=/home/appuser/.local/bin:$PATH
+
+# Switch to non-root user
+USER appuser
+
+# Configure health check
+HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
+ CMD curl -f http://localhost:8000/health || exit 1
+
+# Expose port
+EXPOSE 8000
+
+# Execute startup script
+CMD ["./scripts/start.sh"]
+```
+
+### Docker Compose development environment (`docker-compose.yml`)
+
+```yaml
+version: '3.8'
+
+services:
+ app:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ container_name: dockerized-todo-api
+ restart: unless-stopped
+ ports:
+ - "8000:8000"
+ environment:
+ - ENVIRONMENT=development
+ - DEBUG=true
+ - RELOAD=true
+ volumes:
+ # Mount volume for development (auto-reload on code changes)
+ - ./src:/app/src:ro
+ - ./scripts:/app/scripts:ro
+ networks:
+ - app-network
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+ start_period: 40s
+
+ # Redis (for caching and session store)
+ redis:
+ image: redis:7-alpine
+ container_name: dockerized-todo-redis
+ restart: unless-stopped
+ ports:
+ - "6379:6379"
+ volumes:
+ - redis_data:/data
+ networks:
+ - app-network
+ healthcheck:
+ test: ["CMD", "redis-cli", "ping"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+
+ # Nginx (reverse proxy)
+ nginx:
+ image: nginx:alpine
+ container_name: dockerized-todo-nginx
+ restart: unless-stopped
+ ports:
+ - "80:80"
+ - "443:443"
+ volumes:
+ - ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
+ - ./nginx/ssl:/etc/nginx/ssl:ro
+ depends_on:
+ - app
+ networks:
+ - app-network
+ healthcheck:
+ test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://localhost/health"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+
+volumes:
+ redis_data:
+
+networks:
+ app-network:
+ driver: bridge
+```
+
+### Docker Compose production environment (`docker-compose.prod.yml`)
+
+```yaml
+version: '3.8'
+
+services:
+ app:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ restart: always
+ environment:
+ - ENVIRONMENT=production
+ - DEBUG=false
+ - WORKERS=4
+ - MAX_WORKERS=8
+ volumes:
+ - app_logs:/app/logs
+ networks:
+ - app-network
+ deploy:
+ replicas: 2
+ resources:
+ limits:
+ cpus: '1.0'
+ memory: 1G
+ reservations:
+ cpus: '0.5'
+ memory: 512M
+ restart_policy:
+ condition: on-failure
+ delay: 5s
+ max_attempts: 3
+
+ redis:
+ image: redis:7-alpine
+ restart: always
+ command: redis-server --appendonly yes --requirepass ${REDIS_PASSWORD}
+ volumes:
+ - redis_data:/data
+ networks:
+ - app-network
+ deploy:
+ resources:
+ limits:
+ cpus: '0.5'
+ memory: 512M
+
+ nginx:
+ image: nginx:alpine
+ restart: always
+ ports:
+ - "80:80"
+ - "443:443"
+ volumes:
+ - ./nginx/nginx.prod.conf:/etc/nginx/nginx.conf:ro
+ - ./nginx/ssl:/etc/nginx/ssl:ro
+ - nginx_logs:/var/log/nginx
+ depends_on:
+ - app
+ networks:
+ - app-network
+ deploy:
+ resources:
+ limits:
+ cpus: '0.5'
+ memory: 256M
+
+volumes:
+ redis_data:
+ app_logs:
+ nginx_logs:
+
+networks:
+ app-network:
+ driver: overlay
+ attachable: true
+```
+
+## Step 3: Configure startup scripts
+
+### Main startup script (`scripts/start.sh`)
+
+```bash
+#!/bin/bash
+
+set -e
+
+# Set environment variables
+export PYTHONPATH=/app:$PYTHONPATH
+
+# Run pre-start script
+echo "Running pre-start script..."
+./scripts/prestart.sh
+
+# Determine execution mode based on environment
+if [[ "$ENVIRONMENT" == "production" ]]; then
+ echo "Starting production server with Gunicorn..."
+ exec gunicorn src.main:app \
+ --config scripts/gunicorn.conf.py \
+ --bind 0.0.0.0:8000 \
+ --workers ${WORKERS:-4} \
+ --worker-class uvicorn.workers.UvicornWorker \
+ --max-requests 1000 \
+ --max-requests-jitter 100 \
+ --preload \
+ --access-logfile - \
+ --error-logfile -
+else
+ echo "Starting development server with Uvicorn..."
+ if [[ "$RELOAD" == "true" ]]; then
+ exec uvicorn src.main:app \
+ --host 0.0.0.0 \
+ --port 8000 \
+ --reload \
+ --reload-dir src \
+ --log-level debug
+ else
+ exec uvicorn src.main:app \
+ --host 0.0.0.0 \
+ --port 8000 \
+ --log-level info
+ fi
+fi
+```
+
+### Pre-start script (`scripts/prestart.sh`)
+
+```bash
+#!/bin/bash
+
+set -e
+
+echo "Running pre-start checks..."
+
+# Check Python modules and dependencies
+echo "Checking Python dependencies..."
+python -c "import fastapi, uvicorn, pydantic; print('✓ Core dependencies OK')"
+
+# Check environment variables
+if [[ -z "$ENVIRONMENT" ]]; then
+ export ENVIRONMENT="development"
+ echo "ℹ ENVIRONMENT not set, defaulting to development"
+fi
+
+# Create log directory
+mkdir -p /app/logs
+touch /app/logs/app.log
+
+# Check if health endpoint is present
+echo "Checking health endpoint..."
+python -c "
+from src.main import app
+routes = [route.path for route in app.routes]
+if '/health' not in routes:
+ print('⚠ Warning: /health endpoint not found')
+else:
+ print('✓ Health endpoint OK')
+"
+
+echo "Pre-start checks completed successfully!"
+```
+
+### Gunicorn configuration (`scripts/gunicorn.conf.py`)
+
+```python
+import multiprocessing
+import os
+
+# Server socket
+bind = "0.0.0.0:8000"
+backlog = 2048
+
+# Worker process
+workers = int(os.getenv("WORKERS", multiprocessing.cpu_count() * 2 + 1))
+worker_class = "uvicorn.workers.UvicornWorker"
+worker_connections = 1000
+max_requests = 1000
+max_requests_jitter = 100
+
+# Worker restart settings
+preload_app = True
+timeout = 120
+keepalive = 2
+
+# Logging
+accesslog = "-"
+errorlog = "-"
+loglevel = "info"
+access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s" %(D)s'
+
+# Process name
+proc_name = "dockerized-todo-api"
+
+# Security
+limit_request_line = 4094
+limit_request_fields = 100
+limit_request_field_size = 8190
+
+# Performance tuning
+def when_ready(server):
+ server.log.info("Server is ready. Spawning workers")
+
+def worker_int(worker):
+ worker.log.info("worker received INT or QUIT signal")
+
+def pre_fork(server, worker):
+ server.log.info("Worker spawned (pid: %s)", worker.pid)
+
+def post_fork(server, worker):
+ server.log.info("Worker spawned (pid: %s)", worker.pid)
+
+def worker_abort(worker):
+ worker.log.info("worker received SIGABRT signal")
+```
+
+## Step 4: Implement health check and monitoring
+
+### Add health check endpoint (`src/main.py`)
+
+```python
+from fastapi import FastAPI, status, Depends
+from fastapi.responses import JSONResponse
+import psutil
+import time
+from datetime import datetime
+
+app = FastAPI(
+ title="Dockerized Todo API",
+ description="Dockerized todo management API",
+ version="1.0.0"
+)
+
+# Application start time
+start_time = time.time()
+
+@app.get("/health", status_code=status.HTTP_200_OK)
+async def health_check():
+ """
+ Container health check endpoint
+ """
+ current_time = time.time()
+ uptime = current_time - start_time
+
+ # System resource information
+ memory_info = psutil.virtual_memory()
+ cpu_percent = psutil.cpu_percent(interval=1)
+
+ health_data = {
+ "status": "healthy",
+ "timestamp": datetime.utcnow().isoformat(),
+ "uptime_seconds": round(uptime, 2),
+ "version": app.version,
+ "system": {
+ "memory_usage_percent": memory_info.percent,
+ "memory_available_mb": round(memory_info.available / 1024 / 1024, 2),
+ "cpu_usage_percent": cpu_percent,
+ },
+ "checks": {
+ "database": await check_database_connection(),
+ "redis": await check_redis_connection(),
+ "disk_space": check_disk_space(),
+ }
+ }
+
+ # Check if all checks passed
+ all_checks_passed = all(health_data["checks"].values())
+
+ if not all_checks_passed:
+ return JSONResponse(
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
+ content=health_data
+ )
+
+ return health_data
+
+async def check_database_connection() -> bool:
+ """Check database connection status"""
+ try:
+ # In actual implementation, test database connection
+ return True
+ except Exception:
+ return False
+
+async def check_redis_connection() -> bool:
+ """Check Redis connection status"""
+ try:
+ # In actual implementation, test Redis connection
+ return True
+ except Exception:
+ return False
+
+def check_disk_space() -> bool:
+ """Check disk space"""
+ disk_usage = psutil.disk_usage('/')
+ free_percentage = (disk_usage.free / disk_usage.total) * 100
+ return free_percentage > 10 # 10% or more free space needed
+
+@app.get("/health/ready", status_code=status.HTTP_200_OK)
+async def readiness_check():
+ """
+ Kubernetes readiness probe endpoint
+ """
+ # Check if application is ready to receive traffic
+ return {"status": "ready", "timestamp": datetime.utcnow().isoformat()}
+
+@app.get("/health/live", status_code=status.HTTP_200_OK)
+async def liveness_check():
+ """
+ Kubernetes liveness probe endpoint
+ """
+ return {"status": "alive", "timestamp": datetime.utcnow().isoformat()}
+```
+
+## Step 5: Configure Nginx reverse proxy
+
+### Development environment Nginx configuration (`nginx/nginx.conf`)
+
+```nginx
+events {
+ worker_connections 1024;
+}
+
+http {
+ upstream fastapi_backend {
+ # Specify backend by container name
+ server app:8000;
+ }
+
+ # Define log format
+ log_format main '$remote_addr - $remote_user [$time_local] "$request" '
+ '$status $body_bytes_sent "$http_referer" '
+ '"$http_user_agent" "$http_x_forwarded_for" '
+ 'rt=$request_time uct="$upstream_connect_time" '
+ 'uht="$upstream_header_time" urt="$upstream_response_time"';
+
+ access_log /var/log/nginx/access.log main;
+ error_log /var/log/nginx/error.log warn;
+
+ # Default settings
+ sendfile on;
+ tcp_nopush on;
+ tcp_nodelay on;
+ keepalive_timeout 65;
+ types_hash_max_size 2048;
+ client_max_body_size 100M;
+
+ # Gzip compression
+ gzip on;
+ gzip_vary on;
+ gzip_min_length 1024;
+ gzip_types text/plain text/css text/xml text/javascript
+ application/json application/javascript application/xml+rss
+ application/atom+xml image/svg+xml;
+
+ server {
+ listen 80;
+ server_name localhost;
+
+ # Security headers
+ add_header X-Content-Type-Options nosniff;
+ add_header X-Frame-Options DENY;
+ add_header X-XSS-Protection "1; mode=block";
+
+ # Health check endpoint
+ location /health {
+ proxy_pass http://fastapi_backend;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+
+ # Health check should respond quickly
+ proxy_connect_timeout 5s;
+ proxy_send_timeout 5s;
+ proxy_read_timeout 5s;
+ }
+
+ # API endpoint
+ location / {
+ proxy_pass http://fastapi_backend;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+
+ # Timeout settings
+ proxy_connect_timeout 30s;
+ proxy_send_timeout 30s;
+ proxy_read_timeout 30s;
+
+ # Buffering settings
+ proxy_buffering on;
+ proxy_buffer_size 4k;
+ proxy_buffers 8 4k;
+ }
+
+ # Static file caching (future use)
+ location /static {
+ expires 1y;
+ add_header Cache-Control public;
+ add_header ETag "";
+ }
+ }
+}
+```
+
+### Production Nginx configuration (`nginx/nginx.prod.conf`)
+
+```nginx
+events {
+ worker_connections 2048;
+}
+
+http {
+ upstream fastapi_backend {
+ # Load balancing for multiple app instances
+ server app:8000 max_fails=3 fail_timeout=30s;
+ # server app2:8000 max_fails=3 fail_timeout=30s; # For scaling
+
+ # Keep-alive
+ keepalive 32;
+ }
+
+ # Security settings
+ server_tokens off;
+
+ # Rate limiting
+ limit_req_zone $binary_remote_addr zone=api:10m rate=10r/s;
+ limit_req_zone $binary_remote_addr zone=health:10m rate=100r/s;
+
+ # SSL settings
+ ssl_protocols TLSv1.2 TLSv1.3;
+ ssl_ciphers ECDHE-RSA-AES256-GCM-SHA512:DHE-RSA-AES256-GCM-SHA512:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES256-GCM-SHA384;
+ ssl_prefer_server_ciphers off;
+ ssl_session_cache shared:SSL:10m;
+ ssl_session_timeout 10m;
+
+ server {
+ listen 80;
+ server_name your-domain.com;
+ return 301 https://$server_name$request_uri;
+ }
+
+ server {
+ listen 443 ssl http2;
+ server_name your-domain.com;
+
+ ssl_certificate /etc/nginx/ssl/cert.pem;
+ ssl_certificate_key /etc/nginx/ssl/key.pem;
+
+ # Security headers
+ add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
+ add_header X-Content-Type-Options nosniff always;
+ add_header X-Frame-Options DENY always;
+ add_header X-XSS-Protection "1; mode=block" always;
+ add_header Referrer-Policy "strict-origin-when-cross-origin" always;
+
+ # Health check (rate limit applied)
+ location /health {
+ limit_req zone=health burst=20 nodelay;
+ proxy_pass http://fastapi_backend;
+ include /etc/nginx/proxy_params;
+ }
+
+ # API endpoint (rate limit applied)
+ location / {
+ limit_req zone=api burst=20 nodelay;
+ proxy_pass http://fastapi_backend;
+ include /etc/nginx/proxy_params;
+ }
+ }
+}
+```
+
+## Step 6: Build and run containers
+
+### Run in development environment
+
+
+
+```console
+$ cd dockerized-todo-api
+
+# Build Docker image
+$ docker-compose build
+Building app
+Step 1/15 : FROM python:3.12-slim as builder
+ ---> abc123def456
+Step 2/15 : RUN apt-get update && apt-get install -y build-essential curl
+ ---> Running in xyz789abc123
+...
+Successfully built def456ghi789
+Successfully tagged dockerized-todo-api_app:latest
+
+# Run container (background)
+$ docker-compose up -d
+Creating network "dockerized-todo-api_app-network" with driver "bridge"
+Creating volume "dockerized-todo-api_redis_data" with default driver
+Creating dockerized-todo-redis ... done
+Creating dockerized-todo-api ... done
+Creating dockerized-todo-nginx ... done
+
+# Check container status
+$ docker-compose ps
+ Name Command State Ports
+------------------------------------------------------------------------------------------------
+dockerized-todo-api ./scripts/start.sh Up (healthy) 8000/tcp
+dockerized-todo-nginx /docker-entrypoint.sh ngin ... Up 0.0.0.0:80->80/tcp, :::80->80/tcp
+dockerized-todo-redis docker-entrypoint.sh redis ... Up (healthy) 0.0.0.0:6379->6379/tcp, :::6379->6379/tcp
+```
+
+
+
+### Check logs
+
+
+
+```console
+# Check all service logs
+$ docker-compose logs
+
+# Check specific service logs
+$ docker-compose logs app
+$ docker-compose logs nginx
+$ docker-compose logs redis
+
+# Check real-time logs
+$ docker-compose logs -f app
+```
+
+
+
+### Health check test
+
+
+
+```console
+# Basic health check
+$ curl http://localhost/health
+{
+ "status": "healthy",
+ "timestamp": "2024-01-01T12:00:00.123456",
+ "uptime_seconds": 45.67,
+ "version": "1.0.0",
+ "system": {
+ "memory_usage_percent": 25.3,
+ "memory_available_mb": 3072.45,
+ "cpu_usage_percent": 5.2
+ },
+ "checks": {
+ "database": true,
+ "redis": true,
+ "disk_space": true
+ }
+}
+
+# Kubernetes probe test
+$ curl http://localhost/health/ready
+$ curl http://localhost/health/live
+```
+
+
+
+## Step 7: Production deployment
+
+### Set environment variables (`.env.prod`)
+
+```bash
+# Application settings
+ENVIRONMENT=production
+DEBUG=false
+SECRET_KEY=your-super-secret-key-here
+WORKERS=4
+
+# Database settings
+DATABASE_URL=postgresql://user:password@db:5432/todoapp
+REDIS_URL=redis://:password@redis:6379/0
+REDIS_PASSWORD=your-redis-password
+
+# Logging settings
+LOG_LEVEL=info
+LOG_FILE=/app/logs/app.log
+
+# Security settings
+ALLOWED_HOSTS=["your-domain.com"]
+CORS_ORIGINS=["https://your-frontend.com"]
+
+# Monitoring
+SENTRY_DSN=https://your-sentry-dsn@sentry.io/project-id
+```
+
+### Production deployment command
+
+
+
+```console
+# Deploy in production environment
+$ docker-compose -f docker-compose.prod.yml --env-file .env.prod up -d
+
+# Scaling (app instance scaling)
+$ docker-compose -f docker-compose.prod.yml up -d --scale app=3
+
+# Rolling update
+$ docker-compose -f docker-compose.prod.yml build app
+$ docker-compose -f docker-compose.prod.yml up -d --no-deps app
+
+# Safe shutdown before backup
+$ docker-compose -f docker-compose.prod.yml down --timeout 30
+```
+
+
+
+## Step 8: Monitoring and logging
+
+### Docker container resource monitoring
+
+
+
+```console
+# Check real-time resource usage
+$ docker stats
+
+CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS
+abc123def456 dockerized-todo-api 2.34% 128.5MiB / 1GiB 12.55% 1.23MB / 456kB 12.3MB / 4.56MB 15
+def456ghi789 dockerized-todo-nginx 0.12% 12.5MiB / 256MiB 4.88% 456kB / 1.23MB 1.23MB / 456kB 3
+ghi789jkl012 dockerized-todo-redis 1.45% 32.1MiB / 512MiB 6.27% 789kB / 2.34MB 4.56MB / 1.23MB 4
+
+# Check specific container details
+$ docker inspect dockerized-todo-api
+
+# Check container internal processes
+$ docker-compose exec app ps aux
+```
+
+
+
+### Log aggregation and analysis
+
+```yaml
+# docker-compose.logging.yml
+version: '3.8'
+
+services:
+ # ELK Stack for log aggregation
+ elasticsearch:
+ image: docker.elastic.co/elasticsearch/elasticsearch:8.6.0
+ environment:
+ - discovery.type=single-node
+ - xpack.security.enabled=false
+ volumes:
+ - elasticsearch_data:/usr/share/elasticsearch/data
+ networks:
+ - logging
+
+ logstash:
+ image: docker.elastic.co/logstash/logstash:8.6.0
+ volumes:
+ - ./logstash/pipeline:/usr/share/logstash/pipeline:ro
+ - ./logstash/config:/usr/share/logstash/config:ro
+ networks:
+ - logging
+ depends_on:
+ - elasticsearch
+
+ kibana:
+ image: docker.elastic.co/kibana/kibana:8.6.0
+ ports:
+ - "5601:5601"
+ environment:
+ - ELASTICSEARCH_HOSTS=http://elasticsearch:9200
+ networks:
+ - logging
+ depends_on:
+ - elasticsearch
+
+ # Fluentd for log collection
+ fluentd:
+ image: fluent/fluentd:v1.16-debian-1
+ volumes:
+ - ./fluentd/conf:/fluentd/etc:ro
+ - /var/log:/var/log:ro
+ networks:
+ - logging
+ depends_on:
+ - elasticsearch
+
+volumes:
+ elasticsearch_data:
+
+networks:
+ logging:
+ driver: bridge
+```
+
+### Prometheus metric collection
+
+```python
+# src/monitoring.py
+from prometheus_client import Counter, Histogram, Gauge, generate_latest
+from fastapi import Request, Response
+import time
+
+# Define metrics
+REQUEST_COUNT = Counter(
+ 'http_requests_total',
+ 'Total HTTP requests',
+ ['method', 'endpoint', 'status_code']
+)
+
+REQUEST_DURATION = Histogram(
+ 'http_request_duration_seconds',
+ 'HTTP request duration in seconds',
+ ['method', 'endpoint']
+)
+
+ACTIVE_CONNECTIONS = Gauge(
+ 'active_connections',
+ 'Number of active connections'
+)
+
+async def metrics_middleware(request: Request, call_next):
+ """Prometheus metric collection middleware"""
+ start_time = time.time()
+ method = request.method
+ endpoint = request.url.path
+
+ ACTIVE_CONNECTIONS.inc()
+
+ try:
+ response = await call_next(request)
+ status_code = response.status_code
+ except Exception as e:
+ status_code = 500
+ raise
+ finally:
+ duration = time.time() - start_time
+ REQUEST_DURATION.labels(method=method, endpoint=endpoint).observe(duration)
+ REQUEST_COUNT.labels(method=method, endpoint=endpoint, status_code=status_code).inc()
+ ACTIVE_CONNECTIONS.dec()
+
+ return response
+
+@app.get("/metrics")
+async def get_metrics():
+ """Prometheus metric endpoint"""
+ return Response(generate_latest(), media_type="text/plain")
+```
+
+## Step 9: Build CI/CD pipeline
+
+### GitHub Actions workflow (`.github/workflows/deploy.yml`)
+
+```yaml
+name: Deploy to Production
+
+on:
+ push:
+ branches: [main]
+ pull_request:
+ branches: [main]
+
+env:
+ REGISTRY: ghcr.io
+ IMAGE_NAME: ${{ github.repository }}
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.12'
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install -r requirements.txt
+ pip install pytest pytest-asyncio httpx
+
+ - name: Run tests
+ run: |
+ pytest tests/ -v --cov=src --cov-report=xml
+
+ - name: Upload coverage reports
+ uses: codecov/codecov-action@v3
+ with:
+ file: ./coverage.xml
+
+ build:
+ needs: test
+ runs-on: ubuntu-latest
+ if: github.event_name == 'push' && github.ref == 'refs/heads/main'
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Log in to Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ${{ env.REGISTRY }}
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Extract metadata
+ id: meta
+ uses: docker/metadata-action@v5
+ with:
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
+ tags: |
+ type=ref,event=branch
+ type=ref,event=pr
+ type=sha
+ type=raw,value=latest
+
+ - name: Build and push Docker image
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ file: ./Dockerfile
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
+ cache-from: type=gha
+ cache-to: type=gha,mode=max
+
+ deploy:
+ needs: build
+ runs-on: ubuntu-latest
+ if: github.event_name == 'push' && github.ref == 'refs/heads/main'
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Deploy to production
+ uses: appleboy/ssh-action@v1.0.0
+ with:
+ host: ${{ secrets.PROD_HOST }}
+ username: ${{ secrets.PROD_USERNAME }}
+ key: ${{ secrets.PROD_SSH_KEY }}
+ script: |
+ cd /opt/dockerized-todo-api
+
+ # Pull new image
+ docker-compose -f docker-compose.prod.yml pull
+
+ # Rolling update
+ docker-compose -f docker-compose.prod.yml up -d --no-deps app
+
+ # Health check
+ sleep 30
+ curl -f http://localhost/health || exit 1
+
+ # Clean up previous image
+ docker image prune -f
+```
+
+## Step 10: Enhance security
+
+### Container security settings
+
+```dockerfile
+# Add security enhancement to Dockerfile
+
+# Run as non-root user
+USER appuser
+
+# Read-only root filesystem
+# docker run --read-only --tmpfs /tmp dockerized-todo-api
+
+# Limit permissions
+# docker run --cap-drop=ALL dockerized-todo-api
+
+# Network isolation
+# docker run --network=none dockerized-todo-api
+```
+
+### Docker Compose security settings
+
+```yaml
+# Add security settings to docker-compose.yml
+services:
+ app:
+ # ... existing settings ...
+ security_opt:
+ - no-new-privileges:true
+ cap_drop:
+ - ALL
+ cap_add:
+ - NET_BIND_SERVICE
+ read_only: true
+ tmpfs:
+ - /tmp
+ - /app/logs
+ user: "1000:1000"
+```
+
+### Secrets management
+
+```yaml
+# Add secrets settings to docker-compose.yml
+version: '3.8'
+
+services:
+ app:
+ secrets:
+ - db_password
+ - api_key
+ environment:
+ - DB_PASSWORD_FILE=/run/secrets/db_password
+ - API_KEY_FILE=/run/secrets/api_key
+
+secrets:
+ db_password:
+ file: ./secrets/db_password.txt
+ api_key:
+ external: true
+```
+
+## Next Steps
+
+You've completed Docker containerization! Next things to try:
+
+1. **[Custom Response Handling](custom-response-handling.md)** - Implementing advanced API response formats
+
+
+
+
+## Summary
+
+In this tutorial, we used Docker to:
+
+- ✅ Create optimized container images with multi-stage builds
+- ✅ Set up development/production environments with Docker Compose
+- ✅ Configure Nginx reverse proxy and load balancing
+- ✅ Build health check and monitoring systems
+- ✅ Implement automated deployment through CI/CD pipelines
+- ✅ Set up production-level security configurations
+- ✅ Implement logging and metrics collection systems
+
+Now you can safely and efficiently deploy FastAPI applications to production environments!
diff --git a/docs/tutorial/mcp-integration.md b/docs/tutorial/mcp-integration.md
new file mode 100644
index 0000000..66367b3
--- /dev/null
+++ b/docs/tutorial/mcp-integration.md
@@ -0,0 +1,1730 @@
+# MCP (Model Context Protocol) Integration
+
+Learn how to integrate Model Context Protocol (MCP) with FastAPI to build a system where AI models can use API endpoints as tools. We'll implement a complete AI-integrated API including authentication, permission management, and MCP server implementation using the `fastapi-mcp` template.
+
+## What You'll Learn in This Tutorial
+
+- Model Context Protocol (MCP) concepts and implementation
+- Building JWT-based authentication systems
+- Implementing Role-Based Access Control (RBAC)
+- Exposing and managing MCP tools
+- Secure API communication with AI models
+- User session and context management
+
+## Prerequisites
+
+- Completed the [Custom Response Handling Tutorial](custom-response-handling.md)
+- Understanding of JWT and OAuth2 basic concepts
+- API communication concepts with AI/LLM models
+- Basic knowledge of MCP protocol
+
+## What is Model Context Protocol (MCP)?
+
+MCP is a standardized protocol that allows AI models to interact with external systems.
+
+### Traditional vs MCP Approach
+
+**Traditional Approach (Direct API Calls):**
+```
+AI Model → HTTP Request → API Server → Response
+```
+
+**MCP Approach:**
+```
+AI Model → MCP Client → MCP Server (FastAPI) → Safe Tool Execution → Response
+```
+
+### Advantages of MCP
+
+- **Security**: Integrated authentication and permission management
+- **Standardization**: Consistent interface provision
+- **Context Management**: Session-based state maintenance
+- **Tool Abstraction**: Expose complex APIs as simple tools
+
+## Step 1: Creating MCP Integration Project
+
+Create a project using the `fastapi-mcp` template:
+
+
+
+```console
+$ fastkit startdemo fastapi-mcp
+Enter the project name: ai-integrated-api
+Enter the author name: Developer Kim
+Enter the author email: developer@example.com
+Enter the project description: MCP-based API server integrated with AI models
+Deploying FastAPI project using 'fastapi-mcp' template
+
+ Project Information
+┌──────────────┬─────────────────────────────────────────────┐
+│ Project Name │ ai-integrated-api │
+│ Author │ Developer Kim │
+│ Author Email │ developer@example.com │
+│ Description │ MCP-based API server integrated with AI models │
+└──────────────┴─────────────────────────────────────────────┘
+
+ Template Dependencies
+┌──────────────┬────────────────┐
+│ Dependency 1 │ fastapi │
+│ Dependency 2 │ uvicorn │
+│ Dependency 3 │ pydantic │
+│ Dependency 4 │ python-jose │
+│ Dependency 5 │ passlib │
+│ Dependency 6 │ python-multipart│
+│ Dependency 7 │ mcp │
+└──────────────┴────────────────┘
+
+Select package manager (pip, uv, pdm, poetry) [uv]: uv
+Do you want to proceed with project creation? [y/N]: y
+
+✨ FastAPI project 'ai-integrated-api' from 'fastapi-mcp' has been created successfully!
+```
+
+
+
+## Step 2: Project Structure Analysis
+
+Let's examine the structure of the generated project:
+
+```
+ai-integrated-api/
+├── src/
+│ ├── main.py # FastAPI application
+│ ├── auth/
+│ │ ├── __init__.py
+│ │ ├── models.py # Authentication-related data models
+│ │ ├── jwt_handler.py # JWT token processing
+│ │ ├── dependencies.py # Authentication dependencies
+│ │ └── routes.py # Authentication router
+│ ├── mcp/
+│ │ ├── __init__.py
+│ │ ├── server.py # MCP server implementation
+│ │ ├── tools.py # MCP tool definitions
+│ │ └── client.py # MCP client (for testing)
+│ ├── api/
+│ │ ├── __init__.py
+│ │ ├── api.py # API router collection
+│ │ └── routes/
+│ │ ├── items.py # Item management API
+│ │ ├── users.py # User management API
+│ │ └── admin.py # Admin API
+│ ├── schemas/
+│ │ ├── __init__.py
+│ │ ├── auth.py # Authentication schemas
+│ │ ├── users.py # User schemas
+│ │ └── items.py # Item schemas
+│ └── core/
+│ ├── __init__.py
+│ ├── config.py # Configuration
+│ ├── database.py # Database (in-memory)
+│ └── security.py # Security configuration
+└── tests/
+ ├── test_auth.py # Authentication tests
+ ├── test_mcp.py # MCP tests
+ └── test_integration.py # Integration tests
+```
+
+## Step 3: Authentication System Implementation
+
+### JWT Token Processing (`src/auth/jwt_handler.py`)
+
+```python
+from datetime import datetime, timedelta
+from typing import Optional, Dict, Any
+from jose import JWTError, jwt
+from passlib.context import CryptContext
+
+from src.core.config import settings
+
+# Password hashing
+pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
+
+def verify_password(plain_password: str, hashed_password: str) -> bool:
+ """Password verification"""
+ return pwd_context.verify(plain_password, hashed_password)
+
+def get_password_hash(password: str) -> str:
+ """Password hashing"""
+ return pwd_context.hash(password)
+
+def create_access_token(data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
+ """Access token generation"""
+ to_encode = data.copy()
+
+ if expires_delta:
+ expire = datetime.utcnow() + expires_delta
+ else:
+ expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
+
+ to_encode.update({"exp": expire, "iat": datetime.utcnow()})
+
+ encoded_jwt = jwt.encode(
+ to_encode,
+ settings.SECRET_KEY,
+ algorithm=settings.ALGORITHM
+ )
+
+ return encoded_jwt
+
+def create_refresh_token(user_id: str) -> str:
+ """Refresh token generation"""
+ data = {"sub": user_id, "type": "refresh"}
+ expire = datetime.utcnow() + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS)
+
+ to_encode = data.copy()
+ to_encode.update({"exp": expire, "iat": datetime.utcnow()})
+
+ return jwt.encode(
+ to_encode,
+ settings.SECRET_KEY,
+ algorithm=settings.ALGORITHM
+ )
+
+def decode_token(token: str) -> Optional[Dict[str, Any]]:
+ """Token decoding"""
+ try:
+ payload = jwt.decode(
+ token,
+ settings.SECRET_KEY,
+ algorithms=[settings.ALGORITHM]
+ )
+ return payload
+ except JWTError:
+ return None
+
+def verify_token(token: str, token_type: str = "access") -> Optional[str]:
+ """Token verification and user ID return"""
+ payload = decode_token(token)
+
+ if not payload:
+ return None
+
+ # Token type verification
+ if token_type == "refresh" and payload.get("type") != "refresh":
+ return None
+
+ user_id = payload.get("sub")
+ if not user_id:
+ return None
+
+ return user_id
+
+class TokenManager:
+ """Token management class"""
+
+ def __init__(self):
+ self.blacklisted_tokens = set()
+
+ def blacklist_token(self, token: str):
+ """Add token to blacklist"""
+ self.blacklisted_tokens.add(token)
+
+ def is_blacklisted(self, token: str) -> bool:
+ """Check if token is blacklisted"""
+ return token in self.blacklisted_tokens
+
+ def create_token_pair(self, user_id: str, user_role: str) -> Dict[str, str]:
+ """Create access/refresh token pair"""
+ access_token_data = {
+ "sub": user_id,
+ "role": user_role,
+ "type": "access"
+ }
+
+ access_token = create_access_token(access_token_data)
+ refresh_token = create_refresh_token(user_id)
+
+ return {
+ "access_token": access_token,
+ "refresh_token": refresh_token,
+ "token_type": "bearer"
+ }
+
+# Global token manager
+token_manager = TokenManager()
+```
+
+### User Model and Database (`src/auth/models.py`)
+
+```python
+from typing import List, Optional, Dict, Any
+from pydantic import BaseModel, EmailStr
+from enum import Enum
+from datetime import datetime
+
+class UserRole(str, Enum):
+ """User roles"""
+ ADMIN = "admin"
+ USER = "user"
+ AI_AGENT = "ai_agent"
+ READONLY = "readonly"
+
+class Permission(str, Enum):
+ """Permissions"""
+ READ_ITEMS = "read:items"
+ WRITE_ITEMS = "write:items"
+ DELETE_ITEMS = "delete:items"
+ MANAGE_USERS = "manage:users"
+ USE_MCP_TOOLS = "use:mcp_tools"
+ ADMIN_MCP = "admin:mcp"
+
+class User(BaseModel):
+ """User model"""
+ id: str
+ email: EmailStr
+ username: str
+ full_name: Optional[str] = None
+ role: UserRole
+ permissions: List[Permission]
+ is_active: bool = True
+ created_at: datetime
+ last_login: Optional[datetime] = None
+ api_key: Optional[str] = None # For MCP client
+
+class UserInDB(User):
+ """User model for database storage"""
+ hashed_password: str
+
+class UserCreate(BaseModel):
+ """User creation schema"""
+ email: EmailStr
+ username: str
+ password: str
+ full_name: Optional[str] = None
+ role: UserRole = UserRole.USER
+
+class UserUpdate(BaseModel):
+ """User update schema"""
+ email: Optional[EmailStr] = None
+ username: Optional[str] = None
+ full_name: Optional[str] = None
+ role: Optional[UserRole] = None
+ is_active: Optional[bool] = None
+
+class LoginRequest(BaseModel):
+ """Login request schema"""
+ username: str
+ password: str
+
+class TokenResponse(BaseModel):
+ """Token response schema"""
+ access_token: str
+ refresh_token: str
+ token_type: str = "bearer"
+ expires_in: int
+ user: User
+
+# Default permission mapping by role
+ROLE_PERMISSIONS = {
+ UserRole.ADMIN: [
+ Permission.READ_ITEMS,
+ Permission.WRITE_ITEMS,
+ Permission.DELETE_ITEMS,
+ Permission.MANAGE_USERS,
+ Permission.USE_MCP_TOOLS,
+ Permission.ADMIN_MCP
+ ],
+ UserRole.USER: [
+ Permission.READ_ITEMS,
+ Permission.WRITE_ITEMS,
+ Permission.USE_MCP_TOOLS
+ ],
+ UserRole.AI_AGENT: [
+ Permission.READ_ITEMS,
+ Permission.WRITE_ITEMS,
+ Permission.USE_MCP_TOOLS
+ ],
+ UserRole.READONLY: [
+ Permission.READ_ITEMS
+ ]
+}
+
+class UserDatabase:
+ """Memory-based user database"""
+
+ def __init__(self):
+ self.users: Dict[str, UserInDB] = {}
+ self._init_default_users()
+
+ def _init_default_users(self):
+ """Create default users"""
+ from src.auth.jwt_handler import get_password_hash
+ import uuid
+
+ # Admin account
+ admin_id = str(uuid.uuid4())
+ self.users[admin_id] = UserInDB(
+ id=admin_id,
+ email="admin@example.com",
+ username="admin",
+ full_name="System Administrator",
+ role=UserRole.ADMIN,
+ permissions=ROLE_PERMISSIONS[UserRole.ADMIN],
+ hashed_password=get_password_hash("admin123"),
+ created_at=datetime.utcnow(),
+ api_key=str(uuid.uuid4())
+ )
+
+ # AI agent account
+ ai_id = str(uuid.uuid4())
+ self.users[ai_id] = UserInDB(
+ id=ai_id,
+ email="ai@example.com",
+ username="ai_agent",
+ full_name="AI Assistant",
+ role=UserRole.AI_AGENT,
+ permissions=ROLE_PERMISSIONS[UserRole.AI_AGENT],
+ hashed_password=get_password_hash("ai123"),
+ created_at=datetime.utcnow(),
+ api_key=str(uuid.uuid4())
+ )
+
+ def get_user_by_username(self, username: str) -> Optional[UserInDB]:
+ """Get user by username"""
+ return next(
+ (user for user in self.users.values() if user.username == username),
+ None
+ )
+
+ def get_user_by_id(self, user_id: str) -> Optional[UserInDB]:
+ """Get user by ID"""
+ return self.users.get(user_id)
+
+ def get_user_by_api_key(self, api_key: str) -> Optional[UserInDB]:
+ """Get user by API key"""
+ return next(
+ (user for user in self.users.values() if user.api_key == api_key),
+ None
+ )
+
+ def create_user(self, user_create: UserCreate) -> UserInDB:
+ """Create user"""
+ import uuid
+ from src.auth.jwt_handler import get_password_hash
+
+ user_id = str(uuid.uuid4())
+ user = UserInDB(
+ id=user_id,
+ email=user_create.email,
+ username=user_create.username,
+ full_name=user_create.full_name,
+ role=user_create.role,
+ permissions=ROLE_PERMISSIONS[user_create.role],
+ hashed_password=get_password_hash(user_create.password),
+ created_at=datetime.utcnow(),
+ api_key=str(uuid.uuid4())
+ )
+
+ self.users[user_id] = user
+ return user
+
+ def update_user(self, user_id: str, user_update: UserUpdate) -> Optional[UserInDB]:
+ """Update user"""
+ if user_id not in self.users:
+ return None
+
+ user = self.users[user_id]
+ update_data = user_update.dict(exclude_unset=True)
+
+ for field, value in update_data.items():
+ setattr(user, field, value)
+
+ # Update permissions if role changed
+ if "role" in update_data:
+ user.permissions = ROLE_PERMISSIONS[user.role]
+
+ return user
+
+ def update_last_login(self, user_id: str):
+ """Update last login time"""
+ if user_id in self.users:
+ self.users[user_id].last_login = datetime.utcnow()
+
+# Global database instance
+user_db = UserDatabase()
+```
+
+## Step 4: Authentication Dependencies Implementation
+
+### Authentication Dependencies (`src/auth/dependencies.py`)
+
+```python
+from typing import Optional, List
+from fastapi import Depends, HTTPException, status, Security
+from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials, APIKeyHeader
+from jose import JWTError
+
+from src.auth.jwt_handler import decode_token, token_manager
+from src.auth.models import User, UserInDB, Permission, user_db
+
+# Security schema
+security = HTTPBearer()
+api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
+
+async def get_current_user(
+ credentials: HTTPAuthorizationCredentials = Security(security)
+) -> User:
+ """Get current authenticated user"""
+ credentials_exception = HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Could not validate credentials",
+ headers={"WWW-Authenticate": "Bearer"},
+ )
+
+ try:
+ token = credentials.credentials
+
+ # Check blacklist
+ if token_manager.is_blacklisted(token):
+ raise credentials_exception
+
+ payload = decode_token(token)
+ if payload is None:
+ raise credentials_exception
+
+ user_id: str = payload.get("sub")
+ if user_id is None:
+ raise credentials_exception
+
+ except JWTError:
+ raise credentials_exception
+
+ user = user_db.get_user_by_id(user_id)
+ if user is None:
+ raise credentials_exception
+
+ if not user.is_active:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Inactive user"
+ )
+
+ return User(**user.dict())
+
+async def get_current_user_by_api_key(
+ api_key: Optional[str] = Security(api_key_header)
+) -> Optional[User]:
+ """Authenticate user by API key"""
+ if not api_key:
+ return None
+
+ user = user_db.get_user_by_api_key(api_key)
+ if not user or not user.is_active:
+ return None
+
+ return User(**user.dict())
+
+async def get_current_user_flexible(
+ token_user: Optional[User] = Depends(get_current_user),
+ api_key_user: Optional[User] = Depends(get_current_user_by_api_key)
+) -> User:
+ """Authenticate user by token or API key (flexible authentication)"""
+ user = token_user or api_key_user
+
+ if not user:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Authentication required"
+ )
+
+ return user
+
+def require_permissions(*required_permissions: Permission):
+ """Dependency requiring specific permissions"""
+ def permission_checker(current_user: User = Depends(get_current_user_flexible)) -> User:
+ for permission in required_permissions:
+ if permission not in current_user.permissions:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail=f"Permission '{permission}' required"
+ )
+ return current_user
+
+ return permission_checker
+
+def require_roles(*required_roles):
+ """Dependency requiring specific roles"""
+ def role_checker(current_user: User = Depends(get_current_user_flexible)) -> User:
+ if current_user.role not in required_roles:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail=f"Role must be one of: {', '.join(required_roles)}"
+ )
+ return current_user
+
+ return role_checker
+
+# Common permission dependencies
+RequireAdmin = require_roles("admin")
+RequireReadItems = require_permissions(Permission.READ_ITEMS)
+RequireWriteItems = require_permissions(Permission.WRITE_ITEMS)
+RequireDeleteItems = require_permissions(Permission.DELETE_ITEMS)
+RequireMCPTools = require_permissions(Permission.USE_MCP_TOOLS)
+RequireAdminMCP = require_permissions(Permission.ADMIN_MCP)
+```
+
+### Authentication Router (`src/auth/routes.py`)
+
+```python
+from datetime import timedelta
+from fastapi import APIRouter, Depends, HTTPException, status
+from fastapi.security import OAuth2PasswordRequestForm
+
+from src.auth.models import (
+ User, UserCreate, UserUpdate, LoginRequest, TokenResponse,
+ user_db, UserRole
+)
+from src.auth.jwt_handler import (
+ verify_password, token_manager, verify_token, create_access_token
+)
+from src.auth.dependencies import get_current_user, RequireAdmin
+from src.core.config import settings
+
+router = APIRouter(prefix="/auth", tags=["authentication"])
+
+@router.post("/register", response_model=User)
+async def register_user(user_create: UserCreate):
+ """Register user"""
+ # Check duplicate username
+ if user_db.get_user_by_username(user_create.username):
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Username already registered"
+ )
+
+ # First user is automatically set as admin
+ if not user_db.users:
+ user_create.role = UserRole.ADMIN
+
+ user = user_db.create_user(user_create)
+ return User(**user.dict())
+
+@router.post("/login", response_model=TokenResponse)
+async def login_user(form_data: OAuth2PasswordRequestForm = Depends()):
+ """User login"""
+ user = user_db.get_user_by_username(form_data.username)
+
+ if not user or not verify_password(form_data.password, user.hashed_password):
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Incorrect username or password",
+ headers={"WWW-Authenticate": "Bearer"},
+ )
+
+ if not user.is_active:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Inactive user"
+ )
+
+ # Create token
+ tokens = token_manager.create_token_pair(user.id, user.role)
+
+ # Update last login time
+ user_db.update_last_login(user.id)
+
+ return TokenResponse(
+ access_token=tokens["access_token"],
+ refresh_token=tokens["refresh_token"],
+ token_type=tokens["token_type"],
+ expires_in=settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60,
+ user=User(**user.dict())
+ )
+
+@router.post("/refresh", response_model=dict)
+async def refresh_token(refresh_token: str):
+ """Refresh token"""
+ user_id = verify_token(refresh_token, "refresh")
+
+ if not user_id:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Invalid refresh token"
+ )
+
+ user = user_db.get_user_by_id(user_id)
+ if not user or not user.is_active:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="User not found or inactive"
+ )
+
+ # Create new token pair
+ tokens = token_manager.create_token_pair(user.id, user.role)
+
+ return {
+ "access_token": tokens["access_token"],
+ "refresh_token": tokens["refresh_token"],
+ "token_type": tokens["token_type"],
+ "expires_in": settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60
+ }
+
+@router.post("/logout")
+async def logout_user(current_user: User = Depends(get_current_user)):
+ """User logout"""
+ # In actual implementation, add token to blacklist
+ return {"message": "Successfully logged out"}
+
+@router.get("/me", response_model=User)
+async def get_current_user_info(current_user: User = Depends(get_current_user)):
+ """Get current user information"""
+ return current_user
+
+@router.put("/me", response_model=User)
+async def update_current_user(
+ user_update: UserUpdate,
+ current_user: User = Depends(get_current_user)
+):
+ """Update current user information"""
+ # Normal users cannot change role
+ if user_update.role and current_user.role != UserRole.ADMIN:
+ user_update.role = None
+
+ updated_user = user_db.update_user(current_user.id, user_update)
+ if not updated_user:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="User not found"
+ )
+
+ return User(**updated_user.dict())
+
+@router.get("/users", response_model=list[User])
+async def list_users(admin_user: User = Depends(RequireAdmin)):
+ """Get user list (admin only)"""
+ return [User(**user.dict()) for user in user_db.users.values()]
+
+@router.post("/users/{user_id}/generate-api-key")
+async def generate_api_key(
+ user_id: str,
+ admin_user: User = Depends(RequireAdmin)
+):
+ """Create user API key (admin only)"""
+ import uuid
+
+ user = user_db.get_user_by_id(user_id)
+ if not user:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="User not found"
+ )
+
+ # Create new API key
+ new_api_key = str(uuid.uuid4())
+ user.api_key = new_api_key
+
+ return {
+ "api_key": new_api_key,
+ "message": "API key generated successfully"
+ }
+```
+
+## Step 5: MCP Server Implementation
+
+### MCP Tool Definition (`src/mcp/tools.py`)
+
+```python
+from typing import Dict, Any, List, Optional
+from pydantic import BaseModel, Field
+from enum import Enum
+
+class ToolCategory(str, Enum):
+ """Tool category"""
+ DATA_MANAGEMENT = "data_management"
+ SEARCH = "search"
+ ANALYSIS = "analysis"
+ ADMIN = "admin"
+
+class MCPTool(BaseModel):
+ """MCP tool definition"""
+ name: str = Field(..., description="Tool name")
+ description: str = Field(..., description="Tool description")
+ category: ToolCategory = Field(..., description="Tool category")
+ parameters: Dict[str, Any] = Field(default_factory=dict, description="Parameter schema")
+ required_permissions: List[str] = Field(default_factory=list, description="Required permissions")
+ examples: List[Dict[str, Any]] = Field(default_factory=list, description="Usage examples")
+
+class ToolRegistry:
+ """Tool registry"""
+
+ def __init__(self):
+ self.tools: Dict[str, MCPTool] = {}
+ self._register_default_tools()
+
+ def _register_default_tools(self):
+ """Register default tools"""
+
+ # Create item tool
+ self.register_tool(MCPTool(
+ name="create_item",
+ description="Create a new item",
+ category=ToolCategory.DATA_MANAGEMENT,
+ parameters={
+ "type": "object",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Item name"
+ },
+ "description": {
+ "type": "string",
+ "description": "Item description"
+ },
+ "price": {
+ "type": "number",
+ "description": "Item price",
+ "minimum": 0
+ },
+ "category": {
+ "type": "string",
+ "description": "Item category"
+ }
+ },
+ "required": ["name", "price"]
+ },
+ required_permissions=["write:items"],
+ examples=[
+ {
+ "name": "Notebook",
+ "description": "High-performance gaming notebook",
+ "price": 1500000,
+ "category": "electronics"
+ }
+ ]
+ ))
+
+ # Search item tool
+ self.register_tool(MCPTool(
+ name="search_items",
+ description="Search for items",
+ category=ToolCategory.SEARCH,
+ parameters={
+ "type": "object",
+ "properties": {
+ "query": {
+ "type": "string",
+ "description": "Search query"
+ },
+ "category": {
+ "type": "string",
+ "description": "Category filter"
+ },
+ "min_price": {
+ "type": "number",
+ "description": "Minimum price"
+ },
+ "max_price": {
+ "type": "number",
+ "description": "Maximum price"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Result count limit",
+ "default": 10,
+ "maximum": 100
+ }
+ },
+ "required": ["query"]
+ },
+ required_permissions=["read:items"],
+ examples=[
+ {
+ "query": "Notebook",
+ "category": "electronics",
+ "max_price": 2000000,
+ "limit": 5
+ }
+ ]
+ ))
+
+ # Analyze item tool
+ self.register_tool(MCPTool(
+ name="analyze_items",
+ description="Analyze item data",
+ category=ToolCategory.ANALYSIS,
+ parameters={
+ "type": "object",
+ "properties": {
+ "analysis_type": {
+ "type": "string",
+ "enum": ["price_distribution", "category_breakdown", "trend_analysis"],
+ "description": "Analysis type"
+ },
+ "date_range": {
+ "type": "object",
+ "properties": {
+ "start_date": {"type": "string", "format": "date"},
+ "end_date": {"type": "string", "format": "date"}
+ },
+ "description": "Analysis period"
+ }
+ },
+ "required": ["analysis_type"]
+ },
+ required_permissions=["read:items"],
+ examples=[
+ {
+ "analysis_type": "price_distribution",
+ "date_range": {
+ "start_date": "2024-01-01",
+ "end_date": "2024-12-31"
+ }
+ }
+ ]
+ ))
+
+ # Manage user tool (admin only)
+ self.register_tool(MCPTool(
+ name="manage_users",
+ description="Manage users",
+ category=ToolCategory.ADMIN,
+ parameters={
+ "type": "object",
+ "properties": {
+ "action": {
+ "type": "string",
+ "enum": ["list", "create", "update", "deactivate"],
+ "description": "Action to perform"
+ },
+ "user_data": {
+ "type": "object",
+ "description": "User data (create/update)"
+ },
+ "user_id": {
+ "type": "string",
+ "description": "User ID (update/deactivate)"
+ }
+ },
+ "required": ["action"]
+ },
+ required_permissions=["manage:users"],
+ examples=[
+ {
+ "action": "list"
+ },
+ {
+ "action": "create",
+ "user_data": {
+ "username": "newuser",
+ "email": "newuser@example.com",
+ "role": "user"
+ }
+ }
+ ]
+ ))
+
+ def register_tool(self, tool: MCPTool):
+ """Register tool"""
+ self.tools[tool.name] = tool
+
+ def get_tool(self, tool_name: str) -> Optional[MCPTool]:
+ """Get tool"""
+ return self.tools.get(tool_name)
+
+ def list_tools(self, user_permissions: List[str] = None) -> List[MCPTool]:
+ """List tools by user permissions"""
+ if user_permissions is None:
+ return list(self.tools.values())
+
+ available_tools = []
+ for tool in self.tools.values():
+ # Check permissions
+ if all(perm in user_permissions for perm in tool.required_permissions):
+ available_tools.append(tool)
+
+ return available_tools
+
+ def get_tools_by_category(self, category: ToolCategory, user_permissions: List[str] = None) -> List[MCPTool]:
+ """List tools by category"""
+ tools = self.list_tools(user_permissions)
+ return [tool for tool in tools if tool.category == category]
+
+# Global tool registry
+tool_registry = ToolRegistry()
+```
+
+### MCP Server Implementation (`src/mcp/server.py`)
+
+```python
+from typing import Dict, Any, List, Optional
+from fastapi import HTTPException, status
+import asyncio
+import json
+
+from src.mcp.tools import tool_registry, ToolCategory
+from src.auth.models import User, Permission
+from src.api.routes.items import ItemCRUD
+from src.auth.models import user_db
+
+class MCPServer:
+ """Model Context Protocol server"""
+
+ def __init__(self):
+ self.item_crud = ItemCRUD()
+ self.active_sessions: Dict[str, Dict[str, Any]] = {}
+
+ async def create_session(self, user: User) -> str:
+ """Create MCP session"""
+ import uuid
+
+ session_id = str(uuid.uuid4())
+ self.active_sessions[session_id] = {
+ "user_id": user.id,
+ "user": user,
+ "created_at": datetime.utcnow(),
+ "context": {},
+ "tool_usage_count": 0,
+ "last_activity": datetime.utcnow()
+ }
+
+ return session_id
+
+ async def get_session(self, session_id: str) -> Optional[Dict[str, Any]]:
+ """Get session"""
+ session = self.active_sessions.get(session_id)
+ if session:
+ session["last_activity"] = datetime.utcnow()
+ return session
+
+ async def close_session(self, session_id: str):
+ """Close session"""
+ if session_id in self.active_sessions:
+ del self.active_sessions[session_id]
+
+ async def list_tools(self, user: User) -> List[Dict[str, Any]]:
+ """List tools available to user"""
+ user_permissions = [perm.value for perm in user.permissions]
+ tools = tool_registry.list_tools(user_permissions)
+
+ return [
+ {
+ "name": tool.name,
+ "description": tool.description,
+ "category": tool.category,
+ "parameters": tool.parameters,
+ "examples": tool.examples
+ }
+ for tool in tools
+ ]
+
+ async def execute_tool(
+ self,
+ tool_name: str,
+ parameters: Dict[str, Any],
+ user: User,
+ session_id: Optional[str] = None
+ ) -> Dict[str, Any]:
+ """Execute tool"""
+
+ # Check if tool exists
+ tool = tool_registry.get_tool(tool_name)
+ if not tool:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Tool '{tool_name}' not found"
+ )
+
+ # Check permissions
+ user_permissions = [perm.value for perm in user.permissions]
+ for required_perm in tool.required_permissions:
+ if required_perm not in user_permissions:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail=f"Permission '{required_perm}' required for tool '{tool_name}'"
+ )
+
+ # Update session
+ if session_id:
+ session = await self.get_session(session_id)
+ if session:
+ session["tool_usage_count"] += 1
+
+ # Execute tool
+ try:
+ result = await self._execute_tool_logic(tool_name, parameters, user)
+
+ return {
+ "success": True,
+ "tool": tool_name,
+ "result": result,
+ "timestamp": datetime.utcnow().isoformat()
+ }
+
+ except Exception as e:
+ return {
+ "success": False,
+ "tool": tool_name,
+ "error": str(e),
+ "timestamp": datetime.utcnow().isoformat()
+ }
+
+ async def _execute_tool_logic(
+ self,
+ tool_name: str,
+ parameters: Dict[str, Any],
+ user: User
+ ) -> Any:
+ """Execute tool logic"""
+
+ if tool_name == "create_item":
+ return await self._create_item(parameters)
+
+ elif tool_name == "search_items":
+ return await self._search_items(parameters)
+
+ elif tool_name == "analyze_items":
+ return await self._analyze_items(parameters)
+
+ elif tool_name == "manage_users":
+ return await self._manage_users(parameters, user)
+
+ else:
+ raise ValueError(f"Tool '{tool_name}' implementation not found")
+
+ async def _create_item(self, parameters: Dict[str, Any]) -> Dict[str, Any]:
+ """Create item tool implementation"""
+ from src.schemas.items import ItemCreate
+
+ try:
+ item_create = ItemCreate(**parameters)
+ created_item = await self.item_crud.create(item_create)
+
+ return {
+ "action": "create_item",
+ "item": created_item.dict(),
+ "message": f"Item '{created_item.name}' created successfully"
+ }
+ except Exception as e:
+ raise ValueError(f"Failed to create item: {str(e)}")
+
+ async def _search_items(self, parameters: Dict[str, Any]) -> Dict[str, Any]:
+ """Search item tool implementation"""
+ query = parameters.get("query", "")
+ category = parameters.get("category")
+ min_price = parameters.get("min_price")
+ max_price = parameters.get("max_price")
+ limit = parameters.get("limit", 10)
+
+ # Search logic implementation
+ all_items = await self.item_crud.get_all()
+ filtered_items = []
+
+ for item in all_items:
+ # Text search
+ if query.lower() not in item.name.lower() and query.lower() not in (item.description or "").lower():
+ continue
+
+ # Category filter
+ if category and getattr(item, 'category', None) != category:
+ continue
+
+ # Price filter
+ if min_price is not None and item.price < min_price:
+ continue
+ if max_price is not None and item.price > max_price:
+ continue
+
+ filtered_items.append(item)
+
+ # Result limit
+ result_items = filtered_items[:limit]
+
+ return {
+ "action": "search_items",
+ "query": query,
+ "total_found": len(filtered_items),
+ "returned_count": len(result_items),
+ "items": [item.dict() for item in result_items]
+ }
+
+ async def _analyze_items(self, parameters: Dict[str, Any]) -> Dict[str, Any]:
+ """Analyze item tool implementation"""
+ analysis_type = parameters.get("analysis_type")
+ date_range = parameters.get("date_range", {})
+
+ all_items = await self.item_crud.get_all()
+
+ if analysis_type == "price_distribution":
+ prices = [item.price for item in all_items]
+ if not prices:
+ return {"analysis": "price_distribution", "result": "No items found"}
+
+ return {
+ "analysis": "price_distribution",
+ "result": {
+ "total_items": len(prices),
+ "min_price": min(prices),
+ "max_price": max(prices),
+ "average_price": sum(prices) / len(prices),
+ "price_ranges": {
+ "under_100k": len([p for p in prices if p < 100000]),
+ "100k_to_500k": len([p for p in prices if 100000 <= p < 500000]),
+ "500k_to_1m": len([p for p in prices if 500000 <= p < 1000000]),
+ "over_1m": len([p for p in prices if p >= 1000000])
+ }
+ }
+ }
+
+ elif analysis_type == "category_breakdown":
+ categories = {}
+ for item in all_items:
+ category = getattr(item, 'category', 'uncategorized')
+ categories[category] = categories.get(category, 0) + 1
+
+ return {
+ "analysis": "category_breakdown",
+ "result": {
+ "total_categories": len(categories),
+ "categories": categories
+ }
+ }
+
+ else:
+ raise ValueError(f"Unknown analysis type: {analysis_type}")
+
+ async def _manage_users(self, parameters: Dict[str, Any], requesting_user: User) -> Dict[str, Any]:
+ """Manage user tool implementation"""
+ action = parameters.get("action")
+
+ # Check admin permissions
+ if Permission.MANAGE_USERS not in requesting_user.permissions:
+ raise ValueError("Insufficient permissions for user management")
+
+ if action == "list":
+ users = [User(**user.dict()) for user in user_db.users.values()]
+ return {
+ "action": "list_users",
+ "total_users": len(users),
+ "users": [user.dict() for user in users]
+ }
+
+ elif action == "create":
+ user_data = parameters.get("user_data", {})
+ from src.auth.models import UserCreate
+
+ user_create = UserCreate(**user_data)
+ created_user = user_db.create_user(user_create)
+
+ return {
+ "action": "create_user",
+ "user": User(**created_user.dict()).dict(),
+ "message": f"User '{created_user.username}' created successfully"
+ }
+
+ else:
+ raise ValueError(f"Unknown user management action: {action}")
+
+# Global MCP server instance
+mcp_server = MCPServer()
+```
+
+## Step 6: MCP API Endpoint implementation
+
+### MCP API Router (`src/api/routes/mcp.py`)
+
+```python
+from typing import Dict, Any, Optional
+from fastapi import APIRouter, Depends, HTTPException, status, BackgroundTasks
+from pydantic import BaseModel
+
+from src.auth.dependencies import get_current_user_flexible, RequireMCPTools
+from src.auth.models import User
+from src.mcp.server import mcp_server
+from src.mcp.tools import ToolCategory
+
+router = APIRouter(prefix="/mcp", tags=["MCP"])
+
+class ToolExecuteRequest(BaseModel):
+ """Tool execution request"""
+ tool_name: str
+ parameters: Dict[str, Any]
+ session_id: Optional[str] = None
+
+class SessionCreateResponse(BaseModel):
+ """Session creation response"""
+ session_id: str
+ message: str
+
+@router.post("/session", response_model=SessionCreateResponse)
+async def create_mcp_session(
+ current_user: User = Depends(RequireMCPTools)
+):
+ """Create MCP session"""
+ session_id = await mcp_server.create_session(current_user)
+
+ return SessionCreateResponse(
+ session_id=session_id,
+ message=f"MCP session created (User: {current_user.username})"
+ )
+
+@router.delete("/session/{session_id}")
+async def close_mcp_session(
+ session_id: str,
+ current_user: User = Depends(RequireMCPTools)
+):
+ """Close MCP session"""
+ session = await mcp_server.get_session(session_id)
+
+ if not session:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Session not found"
+ )
+
+ # Check session owner
+ if session["user_id"] != current_user.id:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Cannot close another user's session"
+ )
+
+ await mcp_server.close_session(session_id)
+
+ return {"message": "Session closed successfully"}
+
+@router.get("/tools")
+async def list_mcp_tools(
+ category: Optional[ToolCategory] = None,
+ current_user: User = Depends(RequireMCPTools)
+):
+ """List available MCP tools"""
+ tools = await mcp_server.list_tools(current_user)
+
+ if category:
+ tools = [tool for tool in tools if tool["category"] == category]
+
+ return {
+ "user": current_user.username,
+ "total_tools": len(tools),
+ "tools": tools
+ }
+
+@router.post("/execute")
+async def execute_mcp_tool(
+ request: ToolExecuteRequest,
+ background_tasks: BackgroundTasks,
+ current_user: User = Depends(RequireMCPTools)
+):
+ """Execute MCP tool"""
+
+ # Check session (optional)
+ if request.session_id:
+ session = await mcp_server.get_session(request.session_id)
+ if not session:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Session not found"
+ )
+
+ if session["user_id"] != current_user.id:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Cannot use another user's session"
+ )
+
+ # Execute tool
+ result = await mcp_server.execute_tool(
+ tool_name=request.tool_name,
+ parameters=request.parameters,
+ user=current_user,
+ session_id=request.session_id
+ )
+
+ # Log tool usage in background
+ background_tasks.add_task(
+ log_tool_usage,
+ current_user.id,
+ request.tool_name,
+ result["success"]
+ )
+
+ return result
+
+@router.get("/sessions")
+async def list_user_sessions(
+ current_user: User = Depends(RequireMCPTools)
+):
+ """List active user sessions"""
+ user_sessions = []
+
+ for session_id, session_data in mcp_server.active_sessions.items():
+ if session_data["user_id"] == current_user.id:
+ user_sessions.append({
+ "session_id": session_id,
+ "created_at": session_data["created_at"],
+ "tool_usage_count": session_data["tool_usage_count"],
+ "last_activity": session_data["last_activity"]
+ })
+
+ return {
+ "user": current_user.username,
+ "active_sessions": len(user_sessions),
+ "sessions": user_sessions
+ }
+
+@router.get("/stats")
+async def get_mcp_stats(
+ current_user: User = Depends(RequireMCPTools)
+):
+ """MCP usage statistics"""
+ total_sessions = len(mcp_server.active_sessions)
+ user_sessions = len([
+ s for s in mcp_server.active_sessions.values()
+ if s["user_id"] == current_user.id
+ ])
+
+ return {
+ "user_stats": {
+ "username": current_user.username,
+ "active_sessions": user_sessions,
+ "permissions": [perm.value for perm in current_user.permissions]
+ },
+ "server_stats": {
+ "total_active_sessions": total_sessions,
+ "available_tools": len(await mcp_server.list_tools(current_user))
+ }
+ }
+
+async def log_tool_usage(user_id: str, tool_name: str, success: bool):
+ """Log tool usage (background job)"""
+ import logging
+
+ logger = logging.getLogger("mcp.usage")
+ logger.info(
+ f"Tool usage - User: {user_id}, Tool: {tool_name}, Success: {success}"
+ )
+```
+
+## Step 7: Application Integration and Testing
+
+### Main Application (`src/main.py`)
+
+```python
+from fastapi import FastAPI
+from fastapi.middleware.cors import CORSMiddleware
+
+from src.auth.routes import router as auth_router
+from src.api.routes.items import router as items_router
+from src.api.routes.mcp import router as mcp_router
+from src.core.config import settings
+
+app = FastAPI(
+ title="AI Integrated API",
+ description="AI model integrated MCP-based API server",
+ version="1.0.0"
+)
+
+# CORS settings
+app.add_middleware(
+ CORSMiddleware,
+ allow_origins=settings.ALLOWED_HOSTS,
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+)
+
+# Include routers
+app.include_router(auth_router)
+app.include_router(items_router, prefix="/api/v1")
+app.include_router(mcp_router, prefix="/api/v1")
+
+@app.get("/")
+async def root():
+ return {
+ "message": "AI Integrated API with MCP Support",
+ "version": "1.0.0",
+ "endpoints": {
+ "authentication": "/auth",
+ "items": "/api/v1/items",
+ "mcp": "/api/v1/mcp",
+ "docs": "/docs"
+ }
+ }
+
+@app.get("/health")
+async def health_check():
+ """Health check endpoint"""
+ return {
+ "status": "healthy",
+ "version": "1.0.0",
+ "services": {
+ "auth": "operational",
+ "mcp": "operational",
+ "database": "operational"
+ }
+ }
+```
+
+### Run server and test
+
+
+
+```console
+$ cd ai-integrated-api
+$ fastkit runserver
+Starting FastAPI server at 127.0.0.1:8000...
+
+# User login
+$ curl -X POST "http://localhost:8000/auth/login" \
+ -H "Content-Type: application/x-www-form-urlencoded" \
+ -d "username=admin&password=admin123"
+
+{
+ "access_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
+ "refresh_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
+ "token_type": "bearer",
+ "expires_in": 1800,
+ "user": {
+ "id": "123e4567-e89b-12d3-a456-426614174000",
+ "email": "admin@example.com",
+ "username": "admin",
+ "role": "admin",
+ "permissions": ["read:items", "write:items", ...]
+ }
+}
+
+# Create MCP session
+$ curl -X POST "http://localhost:8000/api/v1/mcp/session" \
+ -H "Authorization: Bearer YOUR_ACCESS_TOKEN"
+
+{
+ "session_id": "abc123-def456-ghi789",
+ "message": "MCP session created (User: admin)"
+}
+
+# List available tools
+$ curl "http://localhost:8000/api/v1/mcp/tools" \
+ -H "Authorization: Bearer YOUR_ACCESS_TOKEN"
+
+{
+ "user": "admin",
+ "total_tools": 4,
+ "tools": [
+ {
+ "name": "create_item",
+ "description": "Create a new item",
+ "category": "data_management",
+ "parameters": {...},
+ "examples": [...]
+ },
+ ...
+ ]
+}
+
+# Execute MCP tool (create item)
+$ curl -X POST "http://localhost:8000/api/v1/mcp/execute" \
+ -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "tool_name": "create_item",
+ "parameters": {
+ "name": "AI generated item",
+ "description": "MCP through AI generated item",
+ "price": 500000,
+ "category": "ai_generated"
+ },
+ "session_id": "abc123-def456-ghi789"
+ }'
+
+{
+ "success": true,
+ "tool": "create_item",
+ "result": {
+ "action": "create_item",
+ "item": {
+ "id": 1,
+ "name": "AI generated item",
+ "description": "MCP through AI generated item",
+ "price": 500000,
+ "category": "ai_generated",
+ "created_at": "2024-01-01T12:00:00Z"
+ },
+ "message": "Item 'AI generated item' created successfully"
+ },
+ "timestamp": "2024-01-01T12:00:00.123456Z"
+}
+
+# Execute MCP tool (search item)
+$ curl -X POST "http://localhost:8000/api/v1/mcp/execute" \
+ -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "tool_name": "search_items",
+ "parameters": {
+ "query": "AI",
+ "limit": 5
+ }
+ }'
+```
+
+
+
+## Step 8: AI Client Example
+
+### Python MCP Client Example
+
+```python
+# client_example.py
+import asyncio
+import aiohttp
+from typing import Dict, Any, List
+
+class MCPClient:
+ """MCP client example"""
+
+ def __init__(self, base_url: str, api_key: str):
+ self.base_url = base_url
+ self.api_key = api_key
+ self.session_id = None
+ self.session = None
+
+ async def __aenter__(self):
+ self.session = aiohttp.ClientSession(
+ headers={"X-API-Key": self.api_key}
+ )
+ return self
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ if self.session_id:
+ await self.close_session()
+ if self.session:
+ await self.session.close()
+
+ async def create_session(self) -> str:
+ """Create MCP session"""
+ async with self.session.post(f"{self.base_url}/api/v1/mcp/session") as resp:
+ data = await resp.json()
+ self.session_id = data["session_id"]
+ return self.session_id
+
+ async def close_session(self):
+ """Close MCP session"""
+ if self.session_id:
+ async with self.session.delete(f"{self.base_url}/api/v1/mcp/session/{self.session_id}"):
+ pass
+ self.session_id = None
+
+ async def list_tools(self) -> List[Dict[str, Any]]:
+ """List available tools"""
+ async with self.session.get(f"{self.base_url}/api/v1/mcp/tools") as resp:
+ data = await resp.json()
+ return data["tools"]
+
+ async def execute_tool(self, tool_name: str, parameters: Dict[str, Any]) -> Dict[str, Any]:
+ """Execute tool"""
+ payload = {
+ "tool_name": tool_name,
+ "parameters": parameters,
+ "session_id": self.session_id
+ }
+
+ async with self.session.post(
+ f"{self.base_url}/api/v1/mcp/execute",
+ json=payload
+ ) as resp:
+ return await resp.json()
+
+ async def ai_assistant_workflow(self, user_request: str) -> str:
+ """AI assistant workflow simulation"""
+
+ # 1. Create session
+ await self.create_session()
+ print(f"Session created: {self.session_id}")
+
+ # 2. Analyze user request and select appropriate tool
+ if "Create item" in user_request or "Create" in user_request:
+ # Create item request
+ result = await self.execute_tool("create_item", {
+ "name": "AI recommended item",
+ "description": "AI generated item based on user request",
+ "price": 100000,
+ "category": "ai_recommended"
+ })
+
+ if result["success"]:
+ item_name = result["result"]["item"]["name"]
+ return f"✅ '{item_name}' item created successfully!"
+ else:
+ return f"❌ Item creation failed: {result.get('error', 'Unknown error')}"
+
+ elif "Search" in user_request or "Find" in user_request:
+ # Search request
+ search_query = "Item" # Actually extracted from NLP
+ result = await self.execute_tool("search_items", {
+ "query": search_query,
+ "limit": 5
+ })
+
+ if result["success"]:
+ items = result["result"]["items"]
+ item_list = "\n".join([f"- {item['name']} (₩{item['price']:,})" for item in items])
+ return f"🔍 Search results ({len(items)} items):\n{item_list}"
+ else:
+ return f"❌ Search failed: {result.get('error', 'Unknown error')}"
+
+ elif "Analyze" in user_request:
+ # Analyze request
+ result = await self.execute_tool("analyze_items", {
+ "analysis_type": "price_distribution"
+ })
+
+ if result["success"]:
+ analysis = result["result"]["result"]
+ return f"📊 Price analysis:\nAverage price: ₩{analysis['average_price']:,.0f}\nMinimum: ₩{analysis['min_price']:,} - Maximum: ₩{analysis['max_price']:,}"
+ else:
+ return f"❌ Analysis failed: {result.get('error', 'Unknown error')}"
+
+ else:
+ return "Sorry, I couldn't find a tool to handle that request."
+
+async def main():
+ """Client test"""
+ async with MCPClient("http://localhost:8000", "your-api-key-here") as client:
+
+ # List available tools
+ tools = await client.list_tools()
+ print(f"Available tools: {len(tools)}")
+ for tool in tools:
+ print(f"- {tool['name']}: {tool['description']}")
+
+ print("\n" + "="*50 + "\n")
+
+ # AI assistant simulation
+ test_requests = [
+ "Create a new item",
+ "Search for items",
+ "Analyze price distribution"
+ ]
+
+ for request in test_requests:
+ print(f"User request: {request}")
+ response = await client.ai_assistant_workflow(request)
+ print(f"AI response: {response}")
+ print("-" * 30)
+
+if __name__ == "__main__":
+ asyncio.run(main())
+```
+
+
+
+
+
+## Summary
+
+In this tutorial, we implemented MCP (Model Context Protocol) integration with:
+
+- ✅ JWT-based authentication system construction
+- ✅ Role-Based Access Control (RBAC) implementation
+- ✅ MCP server and tool system implementation
+- ✅ Session-based context management
+- ✅ Secure API communication with AI models
+- ✅ Tool permission management and usage tracking
+- ✅ Real AI client example implementation
+
+Now you can build a complete MCP-based system where AI models can safely and efficiently utilize API functionality!
diff --git a/mkdocs.yml b/mkdocs.yml
index 899bad1..c88c9bf 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -56,6 +56,12 @@ nav:
- Tutorial:
- Getting Started: tutorial/getting-started.md
- Your First Project: tutorial/first-project.md
+ - Implementing Basic API Server: tutorial/basic-api-server.md
+ - Implementing Asynchronous CRUD API: tutorial/async-crud-api.md
+ - Database Integration: tutorial/database-integration.md
+ - Docker Deployment: tutorial/docker-deployment.md
+ - Implementing Custom Response Handling: tutorial/custom-response-handling.md
+ - Implementing MCP Server: tutorial/mcp-integration.md
- Contributing:
- Development Setup: contributing/development-setup.md
- Code Guidelines: contributing/code-guidelines.md
diff --git a/src/fastapi_fastkit/backend/inspector.py b/src/fastapi_fastkit/backend/inspector.py
index be1cfc7..222dde1 100644
--- a/src/fastapi_fastkit/backend/inspector.py
+++ b/src/fastapi_fastkit/backend/inspector.py
@@ -54,13 +54,27 @@ def __init__(self, template_path: str):
self.template_path = Path(template_path)
self.errors: List[str] = []
self.warnings: List[str] = []
- self.temp_dir = os.path.join(os.path.dirname(__file__), "temp")
+ # Create unique temp directory for each template to avoid conflicts
+ template_name = Path(template_path).name
+ self.temp_dir = os.path.join(os.path.dirname(__file__), f"temp_{template_name}")
self._cleanup_needed = False
self.template_config: Optional[Dict[str, Any]] = None
def __enter__(self) -> "TemplateInspector":
"""Enter context manager - create temp directory and copy template."""
try:
+ # Clean up any existing temp directory for this template
+ if os.path.exists(self.temp_dir):
+ debug_log(
+ f"Cleaning up existing temp directory: {self.temp_dir}", "info"
+ )
+ try:
+ shutil.rmtree(self.temp_dir)
+ except OSError as e:
+ debug_log(
+ f"Failed to cleanup existing temp directory: {e}", "warning"
+ )
+
os.makedirs(self.temp_dir, exist_ok=True)
copy_and_convert_template(str(self.template_path), self.temp_dir)
@@ -84,11 +98,37 @@ def _cleanup(self) -> None:
"""Cleanup temp directory if it exists and cleanup is needed."""
if self._cleanup_needed and os.path.exists(self.temp_dir):
try:
- shutil.rmtree(self.temp_dir)
- debug_log(f"Cleaned up temp directory {self.temp_dir}", "debug")
- except OSError as e:
+ # First, try to cleanup any Docker services that might be running
+ self._cleanup_docker_services()
+
+ # Wait a moment for Docker cleanup to complete
+ import time
+
+ time.sleep(2)
+
+ # Remove temp directory with retry mechanism
+ max_retries = 3
+ for attempt in range(max_retries):
+ try:
+ shutil.rmtree(self.temp_dir)
+ debug_log(f"Cleaned up temp directory {self.temp_dir}", "debug")
+ break
+ except OSError as e:
+ if attempt < max_retries - 1:
+ debug_log(
+ f"Failed to cleanup temp directory {self.temp_dir} (attempt {attempt + 1}): {e}. Retrying...",
+ "warning",
+ )
+ time.sleep(1)
+ else:
+ debug_log(
+ f"Failed to cleanup temp directory {self.temp_dir} after {max_retries} attempts: {e}",
+ "warning",
+ )
+ except Exception as e:
debug_log(
- f"Failed to cleanup temp directory {self.temp_dir}: {e}", "warning"
+ f"Unexpected error during cleanup of {self.temp_dir}: {e}",
+ "warning",
)
finally:
self._cleanup_needed = False
@@ -1036,12 +1076,23 @@ def _cleanup_docker_services(self) -> None:
try:
debug_log("Cleaning up Docker services", "info")
subprocess.run(
- ["docker-compose", "down", "-v"],
+ ["docker-compose", "down", "-v", "--remove-orphans"],
cwd=self.temp_dir,
capture_output=True,
text=True,
timeout=60,
)
+
+ try:
+ subprocess.run(
+ ["docker", "system", "prune", "-f"],
+ capture_output=True,
+ text=True,
+ timeout=30,
+ )
+ except Exception:
+ pass
+
except Exception as e:
debug_log(f"Failed to cleanup Docker services: {e}", "warning")
diff --git a/src/fastapi_fastkit/backend/temp_tmpfa8m90nv/requirements.txt b/src/fastapi_fastkit/backend/temp_tmpfa8m90nv/requirements.txt
new file mode 100644
index 0000000..0318f59
--- /dev/null
+++ b/src/fastapi_fastkit/backend/temp_tmpfa8m90nv/requirements.txt
@@ -0,0 +1 @@
+fastapi>=0.100.0
diff --git a/src/fastapi_fastkit/backend/temp_tmpj91znt98/requirements.txt b/src/fastapi_fastkit/backend/temp_tmpj91znt98/requirements.txt
new file mode 100644
index 0000000..0318f59
--- /dev/null
+++ b/src/fastapi_fastkit/backend/temp_tmpj91znt98/requirements.txt
@@ -0,0 +1 @@
+fastapi>=0.100.0
diff --git a/src/fastapi_fastkit/backend/temp_tmpqnqxgjef/requirements.txt b/src/fastapi_fastkit/backend/temp_tmpqnqxgjef/requirements.txt
new file mode 100644
index 0000000..0318f59
--- /dev/null
+++ b/src/fastapi_fastkit/backend/temp_tmpqnqxgjef/requirements.txt
@@ -0,0 +1 @@
+fastapi>=0.100.0
diff --git a/tests/test_backends/test_inspector.py b/tests/test_backends/test_inspector.py
index 644526d..bd1838c 100644
--- a/tests/test_backends/test_inspector.py
+++ b/tests/test_backends/test_inspector.py
@@ -5,7 +5,9 @@
# --------------------------------------------------------------------------
import json
import os
+import subprocess
import tempfile
+import unittest.mock
from pathlib import Path
from unittest.mock import MagicMock, mock_open, patch
@@ -80,7 +82,10 @@ def test_init(self) -> None:
assert inspector.template_path == self.template_path
assert inspector.errors == []
assert inspector.warnings == []
- assert inspector.temp_dir.endswith("temp")
+ # Check that temp_dir uses template name for uniqueness
+ template_name = Path(self.template_path).name
+ expected_temp_dir = f"temp_{template_name}"
+ assert inspector.temp_dir.endswith(expected_temp_dir)
def test_check_file_structure_valid(self) -> None:
"""Test _check_file_structure with valid structure."""
@@ -1409,3 +1414,215 @@ def test_run_docker_exec_tests_failure(self, mock_run: MagicMock) -> None:
# then
assert result is False
+
+ def test_context_manager_cleanup_existing_temp_dir(self) -> None:
+ """Test context manager cleans up existing temp directory before creating new one."""
+ # given
+ self.create_valid_template_structure()
+
+ # First, create an inspector to get the temp_dir path
+ with patch("fastapi_fastkit.backend.transducer.copy_and_convert_template"):
+ inspector = TemplateInspector(str(self.template_path))
+ temp_dir_path = inspector.temp_dir
+
+ # Create the temp directory manually to simulate existing directory
+ os.makedirs(temp_dir_path, exist_ok=True)
+
+ # Create a file in the temp directory to verify it gets cleaned up
+ test_file = os.path.join(temp_dir_path, "test_file.txt")
+ with open(test_file, "w") as f:
+ f.write("test content")
+
+ assert os.path.exists(test_file)
+
+ # when
+ with patch("fastapi_fastkit.backend.transducer.copy_and_convert_template"):
+ with TemplateInspector(str(self.template_path)) as inspector:
+ # Should be properly initialized
+ assert inspector.template_path == self.template_path
+ assert inspector._cleanup_needed is True
+ assert os.path.exists(inspector.temp_dir)
+
+ # The temp directory should be clean (no previous test file)
+ test_file = os.path.join(inspector.temp_dir, "test_file.txt")
+ assert not os.path.exists(test_file)
+
+ # then
+ # The test verifies that existing temp directory gets cleaned up
+ # and new one is created properly
+
+ @patch(
+ "fastapi_fastkit.backend.inspector.TemplateInspector._cleanup_docker_services"
+ )
+ @patch("shutil.rmtree")
+ @patch("time.sleep")
+ def test_cleanup_method_with_retry_mechanism(
+ self,
+ mock_sleep: MagicMock,
+ mock_rmtree: MagicMock,
+ mock_docker_cleanup: MagicMock,
+ ) -> None:
+ """Test _cleanup method with retry mechanism for directory removal."""
+ # given
+ self.create_valid_template_structure()
+
+ with patch("fastapi_fastkit.backend.transducer.copy_and_convert_template"):
+ inspector = TemplateInspector(str(self.template_path))
+ inspector._cleanup_needed = True
+
+ # Create mock temp directory
+ import tempfile
+
+ mock_temp_dir = tempfile.mkdtemp()
+ inspector.temp_dir = mock_temp_dir
+
+ # Mock shutil.rmtree to fail twice, then succeed, then succeed again for cleanup
+ mock_rmtree.side_effect = [
+ OSError("Permission denied"), # First attempt fails
+ OSError("Device busy"), # Second attempt fails
+ None, # Third attempt succeeds
+ None, # Fourth attempt for manual cleanup
+ ]
+
+ # when
+ inspector._cleanup()
+
+ # then
+ assert inspector._cleanup_needed is False
+ assert mock_docker_cleanup.called
+ assert (
+ mock_sleep.call_count == 3
+ ) # Should sleep twice (between retries) + once after Docker cleanup
+ assert mock_rmtree.call_count == 3 # Should try 3 times
+
+ # Manual cleanup for test - this will use the 4th side_effect
+ import shutil
+
+ shutil.rmtree(mock_temp_dir, ignore_errors=True)
+
+ @patch(
+ "fastapi_fastkit.backend.inspector.TemplateInspector._cleanup_docker_services"
+ )
+ @patch("shutil.rmtree")
+ @patch("time.sleep")
+ def test_cleanup_method_max_retries_exceeded(
+ self,
+ mock_sleep: MagicMock,
+ mock_rmtree: MagicMock,
+ mock_docker_cleanup: MagicMock,
+ ) -> None:
+ """Test _cleanup method when max retries are exceeded."""
+ # given
+ self.create_valid_template_structure()
+
+ with patch("fastapi_fastkit.backend.transducer.copy_and_convert_template"):
+ inspector = TemplateInspector(str(self.template_path))
+ inspector._cleanup_needed = True
+
+ # Create mock temp directory
+ import tempfile
+
+ mock_temp_dir = tempfile.mkdtemp()
+ inspector.temp_dir = mock_temp_dir
+
+ # Mock shutil.rmtree to always fail, but succeed for manual cleanup
+ mock_rmtree.side_effect = [
+ OSError("Permission denied"), # First attempt fails
+ OSError("Permission denied"), # Second attempt fails
+ OSError("Permission denied"), # Third attempt fails
+ None, # Manual cleanup succeeds
+ ]
+
+ # when
+ inspector._cleanup()
+
+ # then
+ assert inspector._cleanup_needed is False
+ assert mock_docker_cleanup.called
+ assert (
+ mock_sleep.call_count == 3
+ ) # Should sleep twice (between retries) + once after Docker cleanup
+ assert mock_rmtree.call_count == 3 # Should try 3 times and give up
+
+ # Manual cleanup for test
+ import shutil
+
+ shutil.rmtree(mock_temp_dir, ignore_errors=True)
+
+ @patch("subprocess.run")
+ def test_cleanup_docker_services_success(self, mock_run: MagicMock) -> None:
+ """Test _cleanup_docker_services with successful cleanup."""
+ # given
+ self.create_valid_template_structure()
+
+ with patch("fastapi_fastkit.backend.transducer.copy_and_convert_template"):
+ inspector = TemplateInspector(str(self.template_path))
+ inspector.temp_dir = str(self.template_path)
+
+ # Mock successful Docker cleanup commands
+ mock_run.return_value = MagicMock(returncode=0)
+
+ # when
+ inspector._cleanup_docker_services()
+
+ # then
+ # Should call docker-compose down with proper arguments
+ expected_calls = [
+ unittest.mock.call(
+ ["docker-compose", "down", "-v", "--remove-orphans"],
+ cwd=str(self.template_path),
+ capture_output=True,
+ text=True,
+ timeout=60,
+ ),
+ unittest.mock.call(
+ ["docker", "system", "prune", "-f"],
+ capture_output=True,
+ text=True,
+ timeout=30,
+ ),
+ ]
+ mock_run.assert_has_calls(expected_calls)
+
+ @patch("subprocess.run")
+ def test_cleanup_docker_services_with_exception(self, mock_run: MagicMock) -> None:
+ """Test _cleanup_docker_services when Docker commands fail."""
+ # given
+ self.create_valid_template_structure()
+
+ with patch("fastapi_fastkit.backend.transducer.copy_and_convert_template"):
+ inspector = TemplateInspector(str(self.template_path))
+ inspector.temp_dir = str(self.template_path)
+
+ # Mock Docker cleanup to raise exception
+ mock_run.side_effect = subprocess.CalledProcessError(1, "docker-compose")
+
+ # when & then (should not raise exception)
+ inspector._cleanup_docker_services()
+
+ # Should have attempted to run the command
+ mock_run.assert_called()
+
+ @patch("subprocess.run")
+ def test_cleanup_docker_services_system_prune_fails(
+ self, mock_run: MagicMock
+ ) -> None:
+ """Test _cleanup_docker_services when system prune fails but main cleanup succeeds."""
+ # given
+ self.create_valid_template_structure()
+
+ with patch("fastapi_fastkit.backend.transducer.copy_and_convert_template"):
+ inspector = TemplateInspector(str(self.template_path))
+ inspector.temp_dir = str(self.template_path)
+
+ # Mock first call succeeds, second call (system prune) fails
+ mock_run.side_effect = [
+ MagicMock(returncode=0), # docker-compose down succeeds
+ subprocess.CalledProcessError(1, "docker"), # system prune fails
+ ]
+
+ # when & then (should not raise exception)
+ inspector._cleanup_docker_services()
+
+ # Should have attempted both commands
+ assert mock_run.call_count == 2