Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 70 additions & 0 deletions .github/scripts/generate-test-summary-pytest.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
#!/bin/bash
set -e

# Generate Test Summary from Pytest JUnit XML Output
# Usage: ./generate-test-summary-pytest.sh <path-to-junit-xml>

XML_FILE="${1:-test-results.xml}"

echo "## Test Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY

# Parse test results from JUnit XML
if [ -f "$XML_FILE" ]; then
# Extract test counts from XML
# JUnit XML structure: <testsuite tests="N" failures="N" errors="N" skipped="N">

tests=$(grep -oP 'tests="\K[0-9]+' "$XML_FILE" | head -1)
failures=$(grep -oP 'failures="\K[0-9]+' "$XML_FILE" | head -1)
errors=$(grep -oP 'errors="\K[0-9]+' "$XML_FILE" | head -1)
skipped=$(grep -oP 'skipped="\K[0-9]+' "$XML_FILE" | head -1)

# Default to 0 if values are empty
tests=${tests:-0}
failures=${failures:-0}
errors=${errors:-0}
skipped=${skipped:-0}

passed=$((tests - failures - errors - skipped))

echo "| Status | Count |" >> $GITHUB_STEP_SUMMARY
echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY
echo "| ✅ Passed | $passed |" >> $GITHUB_STEP_SUMMARY
echo "| ❌ Failed | $((failures + errors)) |" >> $GITHUB_STEP_SUMMARY
echo "| ⏭️ Skipped | $skipped |" >> $GITHUB_STEP_SUMMARY
echo "| **Total** | **$tests** |" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY

# List failed tests if any
if [ $((failures + errors)) -gt 0 ]; then
echo "### ❌ Failed Tests" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY

# Extract failed test names from XML
failed_tests_file=$(mktemp)

# Find testcase elements with failure or error children
grep -oP '<testcase[^>]*classname="[^"]*"[^>]*name="[^"]*"[^>]*>.*?<(failure|error)' "$XML_FILE" | \
grep -oP 'classname="\K[^"]*|name="\K[^"]*' | \
paste -d '.' - - >> "$failed_tests_file" 2>/dev/null || true

if [ -s "$failed_tests_file" ]; then
while IFS= read -r test; do
echo "- \`$test\`" >> $GITHUB_STEP_SUMMARY
done < "$failed_tests_file"
else
echo "_Unable to parse individual test names_" >> $GITHUB_STEP_SUMMARY
fi

echo "" >> $GITHUB_STEP_SUMMARY
echo "❌ **Tests failed!**" >> $GITHUB_STEP_SUMMARY
rm -f "$failed_tests_file"
exit 1
else
echo "✅ **All tests passed!**" >> $GITHUB_STEP_SUMMARY
fi
else
echo "⚠️ No test results found at: $XML_FILE" >> $GITHUB_STEP_SUMMARY
exit 1
fi

61 changes: 61 additions & 0 deletions .github/workflows/run-python-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
name: Run Python Tests

on:
pull_request_target:
branches:
- development
push:
branches:
- development

jobs:
test:
name: Run Python Tests
runs-on: ubuntu-latest
# Require manual approval for fork PRs
environment: testing

defaults:
run:
working-directory: server/python

steps:
- name: Checkout code
uses: actions/checkout@v5
with:
ref: ${{ github.event.pull_request.head.sha }}

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.13'
cache: 'pip'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt

- name: Run tests
run: pytest --verbose --tb=short --junit-xml=test-results.xml || true
env:
MONGO_URI: ${{ secrets.MFLIX_URI }}
MONGO_DB: sample_mflix

- name: Upload test results
uses: actions/upload-artifact@v4
if: always()
with:
name: test-results
path: |
server/python/test-results.xml
server/python/htmlcov/
retention-days: 30

- name: Generate Test Summary
if: always()
working-directory: .
run: |
chmod +x .github/scripts/generate-test-summary-pytest.sh
.github/scripts/generate-test-summary-pytest.sh server/python/test-results.xml

33 changes: 33 additions & 0 deletions server/python/pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
[pytest]
# Pytest configuration for Python FastAPI backend tests

# Test discovery patterns
python_files = test_*.py *_test.py
python_classes = Test*
python_functions = test_*

# Test paths
testpaths = tests

# Output options
addopts =
-v
--strict-markers
--tb=short
--asyncio-mode=auto
--color=yes

# Markers for categorizing tests
markers =
unit: Unit tests with mocked dependencies
integration: Integration tests requiring database
slow: Tests that take longer to run

# Async settings
asyncio_mode = auto
asyncio_default_fixture_loop_scope = function

# Coverage settings (optional)
# Uncomment to enable coverage reporting
# addopts = --cov=src --cov-report=html --cov-report=term

2 changes: 2 additions & 0 deletions server/python/tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Tests package

90 changes: 90 additions & 0 deletions server/python/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
"""
Pytest configuration and fixtures for testing.

This file contains shared fixtures and configuration for all tests.
"""

import pytest
import sys
from pathlib import Path

# Add the parent directory to the path so we can import from src
sys.path.insert(0, str(Path(__file__).parent.parent))

# Configure pytest-asyncio
pytest_plugins = ('pytest_asyncio',)


@pytest.fixture
def sample_movie():
"""Sample movie data for testing."""
return {
"_id": "507f1f77bcf86cd799439011",
"title": "Test Movie",
"year": 2024,
"plot": "A test movie plot",
"genres": ["Action", "Drama"],
"directors": ["Test Director"],
"cast": ["Actor 1", "Actor 2"],
"runtime": 120,
"rated": "PG-13"
}


@pytest.fixture
def sample_movies():
"""Multiple sample movies for testing."""
return [
{
"_id": "507f1f77bcf86cd799439011",
"title": "Test Movie 1",
"year": 2024,
"plot": "First test movie",
"genres": ["Action"],
},
{
"_id": "507f1f77bcf86cd799439012",
"title": "Test Movie 2",
"year": 2023,
"plot": "Second test movie",
"genres": ["Comedy"],
},
{
"_id": "507f1f77bcf86cd799439013",
"title": "Test Movie 3",
"year": 2024,
"plot": "Third test movie",
"genres": ["Drama"],
}
]


@pytest.fixture
def mock_success_response():
"""Mock success response structure."""
def _create_response(data, message="Success"):
return {
"success": True,
"message": message,
"data": data,
"timestamp": "2024-01-01T00:00:00.000Z"
}
return _create_response


@pytest.fixture
def mock_error_response():
"""Mock error response structure."""
def _create_response(message, code=None, details=None):
return {
"success": False,
"message": message,
"error": {
"message": message,
"code": code,
"details": details
},
"timestamp": "2024-01-01T00:00:00.000Z"
}
return _create_response

Loading