diff --git a/.codacy/cli.sh b/.codacy/cli.sh new file mode 100755 index 0000000..7057e3b --- /dev/null +++ b/.codacy/cli.sh @@ -0,0 +1,149 @@ +#!/usr/bin/env bash + + +set -e +o pipefail + +# Set up paths first +bin_name="codacy-cli-v2" + +# Determine OS-specific paths +os_name=$(uname) +arch=$(uname -m) + +case "$arch" in +"x86_64") + arch="amd64" + ;; +"x86") + arch="386" + ;; +"aarch64"|"arm64") + arch="arm64" + ;; +esac + +if [ -z "$CODACY_CLI_V2_TMP_FOLDER" ]; then + if [ "$(uname)" = "Linux" ]; then + CODACY_CLI_V2_TMP_FOLDER="$HOME/.cache/codacy/codacy-cli-v2" + elif [ "$(uname)" = "Darwin" ]; then + CODACY_CLI_V2_TMP_FOLDER="$HOME/Library/Caches/Codacy/codacy-cli-v2" + else + CODACY_CLI_V2_TMP_FOLDER=".codacy-cli-v2" + fi +fi + +version_file="$CODACY_CLI_V2_TMP_FOLDER/version.yaml" + + +get_version_from_yaml() { + if [ -f "$version_file" ]; then + local version=$(grep -o 'version: *"[^"]*"' "$version_file" | cut -d'"' -f2) + if [ -n "$version" ]; then + echo "$version" + return 0 + fi + fi + return 1 +} + +get_latest_version() { + local response + if [ -n "$GH_TOKEN" ]; then + response=$(curl -Lq --header "Authorization: Bearer $GH_TOKEN" "https://api.github.com/repos/codacy/codacy-cli-v2/releases/latest" 2>/dev/null) + else + response=$(curl -Lq "https://api.github.com/repos/codacy/codacy-cli-v2/releases/latest" 2>/dev/null) + fi + + handle_rate_limit "$response" + local version=$(echo "$response" | grep -m 1 tag_name | cut -d'"' -f4) + echo "$version" +} + +handle_rate_limit() { + local response="$1" + if echo "$response" | grep -q "API rate limit exceeded"; then + fatal "Error: GitHub API rate limit exceeded. Please try again later" + fi +} + +download_file() { + local url="$1" + + echo "Downloading from URL: ${url}" + if command -v curl > /dev/null 2>&1; then + curl -# -LS "$url" -O + elif command -v wget > /dev/null 2>&1; then + wget "$url" + else + fatal "Error: Could not find curl or wget, please install one." + fi +} + +download() { + local url="$1" + local output_folder="$2" + + ( cd "$output_folder" && download_file "$url" ) +} + +download_cli() { + # OS name lower case + suffix=$(echo "$os_name" | tr '[:upper:]' '[:lower:]') + + local bin_folder="$1" + local bin_path="$2" + local version="$3" + + if [ ! -f "$bin_path" ]; then + echo "πŸ“₯ Downloading CLI version $version..." + + remote_file="codacy-cli-v2_${version}_${suffix}_${arch}.tar.gz" + url="https://github.com/codacy/codacy-cli-v2/releases/download/${version}/${remote_file}" + + download "$url" "$bin_folder" + tar xzfv "${bin_folder}/${remote_file}" -C "${bin_folder}" + fi +} + +# Warn if CODACY_CLI_V2_VERSION is set and update is requested +if [ -n "$CODACY_CLI_V2_VERSION" ] && [ "$1" = "update" ]; then + echo "⚠️ Warning: Performing update with forced version $CODACY_CLI_V2_VERSION" + echo " Unset CODACY_CLI_V2_VERSION to use the latest version" +fi + +# Ensure version.yaml exists and is up to date +if [ ! -f "$version_file" ] || [ "$1" = "update" ]; then + echo "ℹ️ Fetching latest version..." + version=$(get_latest_version) + mkdir -p "$CODACY_CLI_V2_TMP_FOLDER" + echo "version: \"$version\"" > "$version_file" +fi + +# Set the version to use +if [ -n "$CODACY_CLI_V2_VERSION" ]; then + version="$CODACY_CLI_V2_VERSION" +else + version=$(get_version_from_yaml) +fi + + +# Set up version-specific paths +bin_folder="${CODACY_CLI_V2_TMP_FOLDER}/${version}" + +mkdir -p "$bin_folder" +bin_path="$bin_folder"/"$bin_name" + +# Download the tool if not already installed +download_cli "$bin_folder" "$bin_path" "$version" +chmod +x "$bin_path" + +run_command="$bin_path" +if [ -z "$run_command" ]; then + fatal "Codacy cli v2 binary could not be found." +fi + +if [ "$#" -eq 1 ] && [ "$1" = "download" ]; then + echo "Codacy cli v2 download succeeded" +else + eval "$run_command $*" +fi \ No newline at end of file diff --git a/.codacy/codacy.yaml b/.codacy/codacy.yaml new file mode 100644 index 0000000..15365c7 --- /dev/null +++ b/.codacy/codacy.yaml @@ -0,0 +1,15 @@ +runtimes: + - dart@3.7.2 + - go@1.22.3 + - java@17.0.10 + - node@22.2.0 + - python@3.11.11 +tools: + - dartanalyzer@3.7.2 + - eslint@8.57.0 + - lizard@1.17.31 + - pmd@7.11.0 + - pylint@3.3.6 + - revive@1.7.0 + - semgrep@1.78.0 + - trivy@0.66.0 diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..85adc25 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,62 @@ +# Git +.git +.gitignore + +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +*.egg-info/ +dist/ +build/ +.pytest_cache/ +.mypy_cache/ +.coverage +htmlcov/ + +# Virtual environments +venv/ +env/ +ENV/ +.venv + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ +.DS_Store + +# Documentation +docs/_build/ +*.md +LICENSE + +# Tests +tests/ +.pytest_cache/ + +# Docker +.dockerignore +Dockerfile +docker-compose.yml + +# CI/CD +.github/ + +# Development +Makefile +*.log + +# Data directories +data/ +postgres-data/ +minio-data/ +logs/ + +# Config examples +.env.example +config.example.yaml diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..a008e9a --- /dev/null +++ b/.env.example @@ -0,0 +1,39 @@ +# Database Router Environment Configuration + +# Database Configuration +DB_PROVIDER=postgresql +DB_HOST=localhost +DB_PORT=5432 +DB_DATABASE=database_router +DB_USERNAME=postgres +DB_PASSWORD=postgres +DB_POOL_SIZE=10 +DB_MAX_OVERFLOW=20 +DB_ECHO=false + +# Object Storage Configuration +OBJECT_PROVIDER=minio +OBJECT_ENDPOINT=localhost:9000 +OBJECT_ACCESS_KEY=minioadmin +OBJECT_SECRET_KEY=minioadmin +OBJECT_BUCKET_PREFIX=dev +OBJECT_USE_SSL=false +OBJECT_VERSIONING=true +OBJECT_RETENTION_DAYS=90 + +# API Configuration +API_HOST=0.0.0.0 +API_PORT=8000 +API_RELOAD=false +API_WORKERS=1 +API_SECRET_KEY=change-me-in-production +API_ALGORITHM=HS256 +API_ACCESS_TOKEN_EXPIRE_MINUTES=30 + +# Monitoring Configuration +MONITORING_PROMETHEUS_ENABLED=true +MONITORING_OPENTELEMETRY_ENABLED=false +MONITORING_LOG_LEVEL=INFO + +# Application Configuration +DEBUG=false diff --git a/.github/workflows/docker-multiarch.yml b/.github/workflows/docker-multiarch.yml new file mode 100644 index 0000000..057f01a --- /dev/null +++ b/.github/workflows/docker-multiarch.yml @@ -0,0 +1,55 @@ +name: Docker Multi-Arch Build & Push + +on: + push: + branches: [ main, copilot/add-database-router ] + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository_owner }}/database-router + steps: + - name: Checkout + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + + - name: Set up QEMU + uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + + - name: Login to GHCR + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Docker metadata + id: meta + uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1 + with: + images: | + ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=raw,value=${{ github.sha }} + type=raw,value={{date 'YYYYMMDD-HHmmss' tz='UTC'}} + type=ref,event=branch + type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }} + + - name: Build and push (distroless) + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . + file: Dockerfile.optimized + target: distroless + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..20fbf70 --- /dev/null +++ b/.gitignore @@ -0,0 +1,148 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +Pipfile.lock + +# PEP 582 +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# IDEs +.vscode/ +.idea/ +*.swp +*.swo +*~ +.DS_Store + +# Database +*.db +*.sqlite + +# Docker volumes +data/ +postgres-data/ +minio-data/ + +# Logs +logs/ +*.log + + +#Ignore vscode AI rules +.github/instructions/codacy.instructions.md diff --git a/ALMOps_v4/ALMOps_Financial_Model_v4.xlsx b/ALMOps_v4/ALMOps_Financial_Model_v4.xlsx new file mode 100644 index 0000000..7e2526e Binary files /dev/null and b/ALMOps_v4/ALMOps_Financial_Model_v4.xlsx differ diff --git a/ALMOps_v4/ALMOps_Hybrid_Report_v4.docx b/ALMOps_v4/ALMOps_Hybrid_Report_v4.docx new file mode 100644 index 0000000..7d75d4e Binary files /dev/null and b/ALMOps_v4/ALMOps_Hybrid_Report_v4.docx differ diff --git a/ALMOps_v4/ALMOps_Investor_Deck_v4.pptx b/ALMOps_v4/ALMOps_Investor_Deck_v4.pptx new file mode 100644 index 0000000..b5a3fc5 Binary files /dev/null and b/ALMOps_v4/ALMOps_Investor_Deck_v4.pptx differ diff --git a/ALMOps_v4/ALMOps_Investor_Report_v4.docx b/ALMOps_v4/ALMOps_Investor_Report_v4.docx new file mode 100644 index 0000000..1ff0237 Binary files /dev/null and b/ALMOps_v4/ALMOps_Investor_Report_v4.docx differ diff --git a/ALMOps_v4/ALMOps_Technical_Report_v4.docx b/ALMOps_v4/ALMOps_Technical_Report_v4.docx new file mode 100644 index 0000000..6a70c5e Binary files /dev/null and b/ALMOps_v4/ALMOps_Technical_Report_v4.docx differ diff --git a/ALMOps_v4_Deliverables.zip b/ALMOps_v4_Deliverables.zip new file mode 100644 index 0000000..30c056d Binary files /dev/null and b/ALMOps_v4_Deliverables.zip differ diff --git a/ALMOps_v4_Generate_Full.py b/ALMOps_v4_Generate_Full.py new file mode 100644 index 0000000..6ebfb13 --- /dev/null +++ b/ALMOps_v4_Generate_Full.py @@ -0,0 +1,141 @@ + +from pathlib import Path +from openpyxl import Workbook +from docx import Document +from pptx import Presentation +import zipfile + +# Create base directory +base = Path('ALMOps_v4') +base.mkdir(exist_ok=True) + +# ------------------ +# Excel Financial Model v4 with KPIs and Compliance +# ------------------ +wb = Workbook() + +# Assumptions sheet +ws = wb.active +ws.title = 'Assumptions' +assumptions = [ + ('Start MRR (€)', 10000), + ('Monthly Growth Rate (%)', 15), + ('Monthly Churn Rate (%)', 4), + ('Starting Customers', 10), + ('ARPA (€)', 1000), + ('CAC (€)', 2000), + ('Gross Margin (%)', 65), + ('Starting Cash (€)', 500000), + ('Exchange Rate (1€ -> $)', 1.07) +] +ws.append(['Parameter','Value']) +for k,v in assumptions: + ws.append([k,v]) + +# Revenue Forecast sheet (24 months) +ws2 = wb.create_sheet('Revenue_Forecast') +ws2.append(['Month Index','Month','MRR (€)','New Customers','Churned Customers','Total Customers','ARR (€)','MRR ($)','ARR ($)']) +mrr = assumptions[0][1] +growth = assumptions[1][1]/100 +churn = assumptions[2][1]/100 +customers = assumptions[3][1] +arpa = assumptions[4][1] +ex_rate = assumptions[8][1] +for i in range(1,25): + month_name = f'Month {i}' + projected_mrr = mrr*(1+growth)*(1-churn) + new_customers = max(0,int(round((projected_mrr-mrr)/arpa))) if projected_mrr>mrr else 0 + churned_customers = int(round(customers*churn)) + customers = customers + new_customers - churned_customers + arr = mrr*12 + mrr_usd = mrr*ex_rate + arr_usd = arr*ex_rate + ws2.append([i,month_name,round(mrr,2),new_customers,churned_customers,customers,round(arr,2),round(mrr_usd,2),round(arr_usd,2)]) + mrr = projected_mrr + +# P&L sheet +ws3 = wb.create_sheet('P&L') +ws3.append(['Year','Total Revenue (€)','COGS (€)','Gross Profit (€)','OPEX (€)','Net Profit (€)']) +rows = list(ws2.iter_rows(min_row=2, max_col=3, values_only=True)) +yr1_rev = sum(r[2] for r in rows[:12]) +yr2_rev = sum(r[2] for r in rows[12:24]) +cogs_pct = 1-(assumptions[6][1]/100) +yr1_cogs = round(yr1_rev*cogs_pct,2) +yr2_cogs = round(yr2_rev*cogs_pct,2) +yr1_gross = yr1_rev-yr1_cogs +yr2_gross = yr2_rev-yr2_cogs +yr1_opex,yr2_opex = 300000,400000 +ws3.append([1,round(yr1_rev,2),yr1_cogs,round(yr1_gross,2),yr1_opex,round(yr1_gross-yr1_opex,2)]) +ws3.append([2,round(yr2_rev,2),yr2_cogs,round(yr2_gross,2),yr2_opex,round(yr2_gross-yr2_opex,2)]) + +# KPIs sheet +ws4 = wb.create_sheet('KPIs') +ws4.append(['KPI','Value']) +mrr_final = list(ws2.iter_rows(min_row=2,min_col=3,max_col=3,values_only=True))[-1][0] +arr_final = mrr_final*12 +lifetime_months = 1/churn if churn>0 else 0 +ltv = arpa*lifetime_months*(assumptions[6][1]/100) +ws4.append(['MRR (€)',round(mrr_final,2)]) +ws4.append(['ARR (€)',round(arr_final,2)]) +ws4.append(['LTV (€)',round(ltv,2)]) +ws4.append(['CAC (€)',assumptions[5][1]]) +ws4.append(['LTV:CAC',round(ltv/assumptions[5][1],2)]) +ws4.append(['Churn Rate (%)',assumptions[2][1]]) +ws4.append(['Rule of 40',round((growth*100)+((yr2_gross-yr2_opex)/yr2_rev*100),2)]) +ws4.append(['Exchange Rate (1€->$)',assumptions[8][1]]) + +# Compliance Checklist sheet +ws5 = wb.create_sheet('Compliance_Checklist') +ws5.append(['Standard','Process Area','Clause','ALMOps Module','Coverage (%)','Notes']) +sample_compliance = [ + ('ASPICE','SYS.1','ISO 26262 Part6 Clause6','Requirements Mining Agent','100','Traces requirements to test cases'), + ('ASPICE','SYS.2','ISO 26262 Part6 Clause7','Architecture Validator Module','100','Validates architecture & ASIL decomposition'), + ('ASPICE','SWE.1','ISO 26262 Clause6','Req-2-Test Linker','100','Links requirements to test cases') +] +for row in sample_compliance: + ws5.append(row) + +excel_path = base/'ALMOps_Financial_Model_v4.xlsx' +wb.save(excel_path) + +# ------------------ +# DOCX reports +# ------------------ +def make_docx(path,title,sections): + doc = Document() + doc.add_heading(title,level=0) + for sec_title,sec_body in sections: + doc.add_heading(sec_title,level=1) + doc.add_paragraph(sec_body) + doc.save(path) + +investor_sections = [('Executive Summary','Pre-filled market data, TAM, CAGR, competitors'), + ('Market Opportunity','Verified 2024-2025 EU & US automotive software market data'), + ('Financials','Link to Excel financial model and KPIs')] +technical_sections = [('Technical Architecture','AI/Agentic Framework, Process Mining, MBSE pipeline diagrams'), + ('Compliance Mapping','ASPICE -> ISO 26262 -> ISO 21434 compliance table')] +hybrid_sections = [('Overview','Summary of investor + technical highlights'), + ('Roadmap','Concept -> MVP -> Release 24 months timeline')] + +make_docx(base/'ALMOps_Investor_Report_v4.docx','ALMOps Investor Report v4',investor_sections) +make_docx(base/'ALMOps_Technical_Report_v4.docx','ALMOps Technical Report v4',technical_sections) +make_docx(base/'ALMOps_Hybrid_Report_v4.docx','ALMOps Hybrid Report v4',hybrid_sections) + +# ------------------ +# PPTX Deck +# ------------------ +ppt = Presentation() +for i in range(25): + slide = ppt.slides.add_slide(ppt.slide_layouts[1]) + slide.shapes.title.text = f'Slide {i+1} Placeholder' +ppt.save(base/'ALMOps_Investor_Deck_v4.pptx') + +# ------------------ +# ZIP everything +# ------------------ +zip_path = Path('ALMOps_v4_Deliverables.zip') +with zipfile.ZipFile(zip_path,'w',zipfile.ZIP_DEFLATED) as zf: + for f in base.rglob('*'): + zf.write(f,arcname=f.relative_to(base.parent)) + +print(f'All ALMOps v4 deliverables generated in folder {base}, ZIP: {zip_path}') diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..7f098a2 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,90 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [0.1.0] - 2024-01-01 + +### Added +- Initial release of Database Router +- FastAPI-based REST API with comprehensive endpoints +- PostgreSQL + pgvector integration for structured data and vector embeddings +- MinIO/S3 compatible object storage support +- SQLAlchemy ORM models with multi-tenancy support +- Alembic database migrations +- Adapter pattern for pluggable database and storage backends +- Vector similarity search with pgvector (IVFFlat indexing) +- Presigned URL generation for object access +- Comprehensive API documentation (Swagger/ReDoc) +- Docker Compose setup for local development +- Prometheus metrics integration +- Health check endpoints +- Configuration management with Pydantic Settings +- Multi-tenant data isolation +- Soft delete support for data retention +- JSONB metadata storage for flexible attributes + +### Core Tables +- `tenants` - Multi-tenant isolation +- `users` - User management with RBAC +- `documents` - Document metadata +- `document_chunks` - Text chunks with embeddings for RAG +- `objects` - Object storage references +- `embeddings` - Flexible vector storage +- `configurations` - System configuration +- `backups` - Backup tracking + +### API Endpoints +- `/data/*` - CRUD operations for documents and chunks +- `/vector/*` - Vector similarity and hybrid search +- `/objects/*` - Object storage operations +- `/admin/*` - Health checks, configuration, backups + +### Documentation +- Comprehensive README with quick start guide +- API Reference (API.md) +- Architecture documentation (ARCHITECTURE.md) +- Deployment guide (DEPLOYMENT.md) +- Contributing guidelines (CONTRIBUTING.md) +- Quick start guide (QUICKSTART.md) + +### Development Tools +- Makefile for common tasks +- Test suite with pytest +- Utility modules for helpers and logging +- Development Docker Compose setup +- Example configuration files + +### Infrastructure +- Docker containerization +- PostgreSQL with pgvector extension +- MinIO object storage +- Environment-based configuration +- Logging and monitoring setup + +## [Unreleased] + +### Planned Features +- JWT authentication and authorization +- Advanced hybrid RAG with BM25 +- Multi-cloud object storage support (GCS, Azure Blob) +- GraphQL API +- Real-time subscriptions with WebSockets +- Advanced analytics and reporting +- Automated backup and restore +- Multi-region replication +- Enhanced security features +- Performance optimizations +- Caching layer with Redis +- Advanced monitoring with OpenTelemetry + +### Known Issues +- Authentication not yet implemented (planned for v0.2.0) +- Hybrid RAG search uses basic vector search (enhancement planned) +- Limited cloud provider support (expansion planned) + +--- + +For detailed changes, see the [Git commit history](https://github.com/SoftwareDevLabs/Database/commits). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..e89de09 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,344 @@ +# Contributing to Database Router + +Thank you for your interest in contributing to Database Router! This document provides guidelines and instructions for contributing. + +## Code of Conduct + +- Be respectful and inclusive +- Welcome newcomers and help them get started +- Focus on constructive feedback +- Respect differing viewpoints and experiences + +## How to Contribute + +### Reporting Bugs + +1. Check if the bug has already been reported in [Issues](https://github.com/SoftwareDevLabs/Database/issues) +2. If not, create a new issue with: + - Clear title and description + - Steps to reproduce + - Expected vs actual behavior + - System information (OS, Python version, etc.) + - Relevant logs or error messages + +### Suggesting Features + +1. Check existing issues for similar suggestions +2. Create a new issue with: + - Clear description of the feature + - Use cases and benefits + - Potential implementation approach + - Any drawbacks or considerations + +### Pull Requests + +1. Fork the repository +2. Create a feature branch (`git checkout -b feature/amazing-feature`) +3. Make your changes +4. Write or update tests +5. Ensure all tests pass +6. Run linters and formatters +7. Commit your changes (`git commit -m 'Add amazing feature'`) +8. Push to the branch (`git push origin feature/amazing-feature`) +9. Open a Pull Request + +## Development Setup + +### Prerequisites + +- Python 3.9+ +- Docker and Docker Compose +- Git + +### Setup Steps + +1. Clone the repository: +```bash +git clone https://github.com/SoftwareDevLabs/Database.git +cd Database +``` + +2. Create virtual environment: +```bash +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate +``` + +3. Install dependencies: +```bash +make dev-install +``` + +4. Copy environment file: +```bash +cp .env.example .env +``` + +5. Start development services: +```bash +make docker-up +``` + +6. Run migrations: +```bash +make db-upgrade +``` + +## Development Workflow + +### Running Tests + +```bash +# Run all tests +make test + +# Run specific test file +pytest tests/test_api.py -v + +# Run with coverage +pytest --cov=database_router --cov-report=html +``` + +### Code Quality + +```bash +# Format code +make format + +# Run linters +make lint + +# Type checking +mypy src/ +``` + +### Running the API + +```bash +# Development mode (with auto-reload) +make run + +# Production mode +make run-prod +``` + +### Database Migrations + +```bash +# Create new migration +make migrate + +# Apply migrations +make db-upgrade + +# Rollback last migration +make db-downgrade +``` + +## Coding Standards + +### Python Style + +- Follow PEP 8 +- Use Black for formatting (line length: 88) +- Use type hints for all functions +- Write docstrings for all public functions and classes + +Example: +```python +from typing import List, Optional + +def process_documents( + document_ids: List[str], + tenant_id: Optional[str] = None +) -> List[Dict[str, Any]]: + """ + Process multiple documents. + + Args: + document_ids: List of document IDs to process + tenant_id: Optional tenant ID for filtering + + Returns: + List of processed document dictionaries + + Raises: + ValueError: If document_ids is empty + """ + if not document_ids: + raise ValueError("document_ids cannot be empty") + + # Implementation... + return [] +``` + +### API Design + +- Follow RESTful conventions +- Use appropriate HTTP methods (GET, POST, PUT, DELETE) +- Return proper status codes +- Include comprehensive error messages +- Use Pydantic models for request/response validation + +### Database + +- Use migrations for schema changes +- Add indexes for frequently queried fields +- Use soft deletes where appropriate +- Include timestamps (created_at, updated_at) + +### Testing + +- Write unit tests for business logic +- Write integration tests for API endpoints +- Aim for 80%+ code coverage +- Use descriptive test names +- Mock external dependencies + +Example: +```python +import pytest +from fastapi.testclient import TestClient + +def test_create_document_success(client: TestClient, test_user): + """Test successful document creation.""" + response = client.post( + "/data/documents", + json={ + "title": "Test Doc", + "description": "Test description" + }, + headers={"Authorization": f"Bearer {test_user.token}"} + ) + + assert response.status_code == 200 + data = response.json() + assert data["title"] == "Test Doc" +``` + +## Documentation + +### Code Documentation + +- Add docstrings to all public functions/classes +- Include parameter types and return types +- Document exceptions that can be raised +- Provide usage examples for complex functions + +### API Documentation + +- Update OpenAPI schemas for new endpoints +- Add examples to request/response models +- Document error responses +- Keep `/docs` accurate + +### README and Guides + +- Update README for new features +- Create guides for complex features +- Include examples and use cases +- Keep deployment docs current + +## Commit Messages + +Follow conventional commits format: + +``` +(): + + + +