From 3c2042f72e962dc66acb71d69c7f3ece5a27299c Mon Sep 17 00:00:00 2001 From: dacharyc Date: Thu, 6 Nov 2025 10:58:31 -0500 Subject: [PATCH 1/5] Rename test file and add unit tests for routes --- server/python/tests/test_movie_routes.py | 685 ++++++++++++++++++---- server/python/tests/test_movie_schemas.py | 160 +++++ 2 files changed, 729 insertions(+), 116 deletions(-) create mode 100644 server/python/tests/test_movie_schemas.py diff --git a/server/python/tests/test_movie_routes.py b/server/python/tests/test_movie_routes.py index a4735ca..2220ec7 100644 --- a/server/python/tests/test_movie_routes.py +++ b/server/python/tests/test_movie_routes.py @@ -1,16 +1,16 @@ """ Unit Tests for Movie Routes -These tests verify basic API validation and structure. -Following the pattern from PR #21 Express tests - simple validation tests. - -Note: These are basic validation tests only. Full integration tests -would require a test database or mocking strategy that handles -AsyncMongoClient event loop binding issues. +These tests verify the route handler logic using mocked MongoDB operations. +Tests use unittest.mock.AsyncMock to mock database calls without requiring +an actual database connection or server instance. """ import pytest -from pydantic import ValidationError +from unittest.mock import AsyncMock, MagicMock, patch +from bson import ObjectId +from bson.errors import InvalidId + from src.models.models import CreateMovieRequest, UpdateMovieRequest @@ -20,141 +20,594 @@ @pytest.mark.unit -class TestMovieCreateValidation: - """Tests for CreateMovieRequest model validation.""" - - def test_create_movie_with_valid_data(self): - """Should accept valid movie data.""" - movie_data = { +@pytest.mark.asyncio +class TestGetMovieById: + """Tests for GET /api/movies/{id} endpoint.""" + + @patch('src.routers.movies.get_collection') + async def test_get_movie_by_id_success(self, mock_get_collection): + """Should return movie when valid ID is provided and movie exists.""" + # Setup mock + mock_collection = AsyncMock() + mock_movie = { + "_id": ObjectId(TEST_MOVIE_ID), "title": "Test Movie", "year": 2024, - "plot": "A test movie plot", - "genres": ["Action", "Drama"], - "runtime": 120 + "plot": "A test movie plot" } + mock_collection.find_one.return_value = mock_movie + mock_get_collection.return_value = mock_collection + + # Import and call the route handler + from src.routers.movies import get_movie_by_id + result = await get_movie_by_id(TEST_MOVIE_ID) + + # Assertions + assert result.success is True + assert result.data["title"] == "Test Movie" + assert result.data["_id"] == TEST_MOVIE_ID + mock_collection.find_one.assert_called_once_with({"_id": ObjectId(TEST_MOVIE_ID)}) + + @patch('src.routers.movies.get_collection') + async def test_get_movie_by_id_not_found(self, mock_get_collection): + """Should return error when movie does not exist.""" + # Setup mock + mock_collection = AsyncMock() + mock_collection.find_one.return_value = None + mock_get_collection.return_value = mock_collection + + # Import and call the route handler + from src.routers.movies import get_movie_by_id + result = await get_movie_by_id(TEST_MOVIE_ID) + + # Assertions + assert result.success is False + assert "not found" in result.message.lower() + + async def test_get_movie_by_id_invalid_id(self): + """Should return error when invalid ObjectId format is provided.""" + # Import and call the route handler + from src.routers.movies import get_movie_by_id + result = await get_movie_by_id(INVALID_MOVIE_ID) + + # Assertions + assert result.success is False + assert "invalid" in result.message.lower() + + @patch('src.routers.movies.get_collection') + async def test_get_movie_by_id_database_error(self, mock_get_collection): + """Should return error when database operation fails.""" + # Setup mock to raise exception + mock_collection = AsyncMock() + mock_collection.find_one.side_effect = Exception("Database connection failed") + mock_get_collection.return_value = mock_collection + + # Import and call the route handler + from src.routers.movies import get_movie_by_id + result = await get_movie_by_id(TEST_MOVIE_ID) + + # Assertions + assert result.success is False + assert "error" in result.message.lower() - movie = CreateMovieRequest(**movie_data) - assert movie.title == "Test Movie" - assert movie.year == 2024 - assert movie.plot == "A test movie plot" - def test_create_movie_missing_required_field(self): - """Should raise ValidationError when title is missing.""" - movie_data = { +@pytest.mark.unit +@pytest.mark.asyncio +class TestCreateMovie: + """Tests for POST /api/movies/ endpoint.""" + + @patch('src.routers.movies.get_collection') + async def test_create_movie_success(self, mock_get_collection): + """Should create movie and return created movie data.""" + # Setup mock + mock_collection = AsyncMock() + mock_result = MagicMock() + mock_result.acknowledged = True + mock_result.inserted_id = ObjectId(TEST_MOVIE_ID) + mock_collection.insert_one.return_value = mock_result + + mock_created_movie = { + "_id": ObjectId(TEST_MOVIE_ID), + "title": "New Movie", "year": 2024, - "plot": "A movie without title" + "plot": "A new movie" } + mock_collection.find_one.return_value = mock_created_movie + mock_get_collection.return_value = mock_collection + + # Create request + from src.routers.movies import create_movie + movie_request = CreateMovieRequest( + title="New Movie", + year=2024, + plot="A new movie" + ) + result = await create_movie(movie_request) + + # Assertions + assert result.success is True + assert result.data["title"] == "New Movie" + assert result.data["_id"] == TEST_MOVIE_ID + mock_collection.insert_one.assert_called_once() + + @patch('src.routers.movies.get_collection') + async def test_create_movie_database_error(self, mock_get_collection): + """Should return error when database insert fails.""" + # Setup mock to raise exception + mock_collection = AsyncMock() + mock_collection.insert_one.side_effect = Exception("Insert failed") + mock_get_collection.return_value = mock_collection + + # Create request + from src.routers.movies import create_movie + movie_request = CreateMovieRequest(title="New Movie") + result = await create_movie(movie_request) + + # Assertions + assert result.success is False + assert "error" in result.message.lower() - with pytest.raises(ValidationError) as exc_info: - CreateMovieRequest(**movie_data) - - errors = exc_info.value.errors() - assert any(error["loc"] == ("title",) for error in errors) - def test_create_movie_invalid_year_type(self): - """Should raise ValidationError when year is not an integer.""" - movie_data = { - "title": "Test Movie", - "year": "not-a-number" +@pytest.mark.unit +@pytest.mark.asyncio +class TestUpdateMovie: + """Tests for PATCH /api/movies/{id} endpoint.""" + + @patch('src.routers.movies.get_collection') + async def test_update_movie_success(self, mock_get_collection): + """Should update movie and return updated movie data.""" + # Setup mock + mock_collection = AsyncMock() + mock_result = MagicMock() + mock_result.matched_count = 1 + mock_result.modified_count = 1 + mock_collection.update_one.return_value = mock_result + + mock_updated_movie = { + "_id": ObjectId(TEST_MOVIE_ID), + "title": "Updated Movie", + "year": 2025, + "plot": "Updated plot" } + mock_collection.find_one.return_value = mock_updated_movie + mock_get_collection.return_value = mock_collection + + # Create request + from src.routers.movies import update_movie + update_request = UpdateMovieRequest(title="Updated Movie", year=2025) + result = await update_movie(update_request, TEST_MOVIE_ID) + + # Assertions + assert result.success is True + assert result.data["title"] == "Updated Movie" + mock_collection.update_one.assert_called_once() + + @patch('src.routers.movies.get_collection') + async def test_update_movie_not_found(self, mock_get_collection): + """Should return error when movie to update does not exist.""" + # Setup mock + mock_collection = AsyncMock() + mock_result = MagicMock() + mock_result.matched_count = 0 + mock_collection.update_one.return_value = mock_result + mock_get_collection.return_value = mock_collection + + # Create request + from src.routers.movies import update_movie + update_request = UpdateMovieRequest(title="Updated Movie") + result = await update_movie(update_request, TEST_MOVIE_ID) + + # Assertions + assert result.success is False + assert "was found" in result.message.lower() or "not found" in result.message.lower() + + async def test_update_movie_invalid_id(self): + """Should return error when invalid ObjectId format is provided.""" + # Create request + from src.routers.movies import update_movie + update_request = UpdateMovieRequest(title="Updated Movie") + result = await update_movie(update_request, INVALID_MOVIE_ID) + + # Assertions + assert result.success is False + assert "invalid" in result.message.lower() - with pytest.raises(ValidationError) as exc_info: - CreateMovieRequest(**movie_data) - errors = exc_info.value.errors() - assert any(error["loc"] == ("year",) for error in errors) +@pytest.mark.unit +@pytest.mark.asyncio +class TestDeleteMovie: + """Tests for DELETE /api/movies/{id} endpoint.""" + + @patch('src.routers.movies.get_collection') + async def test_delete_movie_success(self, mock_get_collection): + """Should delete movie and return success response.""" + # Setup mock + mock_collection = AsyncMock() + mock_result = MagicMock() + mock_result.deleted_count = 1 + mock_collection.delete_one.return_value = mock_result + mock_get_collection.return_value = mock_collection + + # Call the route handler + from src.routers.movies import delete_movie_by_id + result = await delete_movie_by_id(TEST_MOVIE_ID) + + # Assertions + assert result.success is True + assert result.data["deletedCount"] == 1 + mock_collection.delete_one.assert_called_once_with({"_id": ObjectId(TEST_MOVIE_ID)}) + + @patch('src.routers.movies.get_collection') + async def test_delete_movie_not_found(self, mock_get_collection): + """Should return error when movie to delete does not exist.""" + # Setup mock + mock_collection = AsyncMock() + mock_result = MagicMock() + mock_result.deleted_count = 0 + mock_collection.delete_one.return_value = mock_result + mock_get_collection.return_value = mock_collection + + # Call the route handler + from src.routers.movies import delete_movie_by_id + result = await delete_movie_by_id(TEST_MOVIE_ID) + + # Assertions + assert result.success is False + assert "not found" in result.message.lower() + + async def test_delete_movie_invalid_id(self): + """Should return error when invalid ObjectId format is provided.""" + # Call the route handler + from src.routers.movies import delete_movie_by_id + result = await delete_movie_by_id(INVALID_MOVIE_ID) + + # Assertions + assert result.success is False + assert "invalid" in result.message.lower() + + @patch('src.routers.movies.get_collection') + async def test_delete_movie_database_error(self, mock_get_collection): + """Should return error when database operation fails.""" + # Setup mock to raise exception + mock_collection = AsyncMock() + mock_collection.delete_one.side_effect = Exception("Delete failed") + mock_get_collection.return_value = mock_collection + + # Call the route handler + from src.routers.movies import delete_movie_by_id + result = await delete_movie_by_id(TEST_MOVIE_ID) + + # Assertions + assert result.success is False + assert "error" in result.message.lower() - def test_create_movie_with_optional_fields(self): - """Should accept movie with only required fields.""" - movie_data = { - "title": "Minimal Movie" - } - movie = CreateMovieRequest(**movie_data) - assert movie.title == "Minimal Movie" - assert movie.year is None - assert movie.plot is None @pytest.mark.unit -class TestMovieUpdateValidation: - """Tests for UpdateMovieRequest model validation.""" - - def test_update_movie_with_valid_data(self): - """Should accept valid update data.""" - update_data = { - "title": "Updated Title", - "year": 2025 - } +@pytest.mark.asyncio +class TestGetAllMovies: + """Tests for GET /api/movies/ endpoint.""" + + @patch('src.routers.movies.get_collection') + async def test_get_all_movies_success(self, mock_get_collection): + """Should return list of movies with default pagination.""" + # Setup mock with proper cursor chaining + mock_collection = MagicMock() + mock_cursor = MagicMock() + + # Mock the chaining: find().sort().skip().limit() + mock_cursor.sort.return_value = mock_cursor + mock_cursor.skip.return_value = mock_cursor + mock_cursor.limit.return_value = mock_cursor + + # Mock async iteration + mock_cursor.__aiter__.return_value = iter([ + {"_id": ObjectId(TEST_MOVIE_ID), "title": "Movie 1", "year": 2024}, + {"_id": ObjectId("507f1f77bcf86cd799439012"), "title": "Movie 2", "year": 2023} + ]) + + mock_collection.find.return_value = mock_cursor + mock_get_collection.return_value = mock_collection + + # Call the route handler + from src.routers.movies import get_all_movies + result = await get_all_movies() + + # Assertions + assert result.success is True + assert len(result.data) == 2 + assert result.data[0]["title"] == "Movie 1" + mock_collection.find.assert_called_once() + + @patch('src.routers.movies.get_collection') + async def test_get_all_movies_with_filters(self, mock_get_collection): + """Should filter movies by genre and year.""" + # Setup mock with proper cursor chaining + mock_collection = MagicMock() + mock_cursor = MagicMock() + + # Mock the chaining: find().sort().skip().limit() + mock_cursor.sort.return_value = mock_cursor + mock_cursor.skip.return_value = mock_cursor + mock_cursor.limit.return_value = mock_cursor + + # Mock async iteration + mock_cursor.__aiter__.return_value = iter([ + {"_id": ObjectId(TEST_MOVIE_ID), "title": "Action Movie", "year": 2024, "genres": ["Action"]} + ]) + + mock_collection.find.return_value = mock_cursor + mock_get_collection.return_value = mock_collection + + # Call the route handler with filters + from src.routers.movies import get_all_movies + result = await get_all_movies(genre="Action", year=2024) + + # Assertions + assert result.success is True + assert len(result.data) == 1 + assert "Action" in result.data[0]["genres"] + + @patch('src.routers.movies.get_collection') + async def test_get_all_movies_empty_result(self, mock_get_collection): + """Should return empty list when no movies match filters.""" + # Setup mock with proper cursor chaining + mock_collection = MagicMock() + mock_cursor = MagicMock() + + # Mock the chaining: find().sort().skip().limit() + mock_cursor.sort.return_value = mock_cursor + mock_cursor.skip.return_value = mock_cursor + mock_cursor.limit.return_value = mock_cursor + + # Mock async iteration with empty list + mock_cursor.__aiter__.return_value = iter([]) + + mock_collection.find.return_value = mock_cursor + mock_get_collection.return_value = mock_collection + + # Call the route handler + from src.routers.movies import get_all_movies + result = await get_all_movies(year=1800) + + # Assertions + assert result.success is True + assert len(result.data) == 0 + + @patch('src.routers.movies.get_collection') + async def test_get_all_movies_database_error(self, mock_get_collection): + """Should return error when database operation fails.""" + # Setup mock to raise exception - use MagicMock since find() is synchronous + mock_collection = MagicMock() + mock_collection.find.side_effect = Exception("Database error") + mock_get_collection.return_value = mock_collection + + # Call the route handler + from src.routers.movies import get_all_movies + result = await get_all_movies() + + # Assertions + assert result.success is False + assert "error" in result.message.lower() - movie_update = UpdateMovieRequest(**update_data) - assert movie_update.title == "Updated Title" - assert movie_update.year == 2025 - def test_update_movie_with_partial_data(self): - """Should accept partial update data.""" - update_data = { - "title": "Only Title Updated" - } - - movie_update = UpdateMovieRequest(**update_data) - assert movie_update.title == "Only Title Updated" - assert movie_update.year is None - - def test_update_movie_empty_data(self): - """Should accept empty update (all fields optional).""" - update_data = {} +@pytest.mark.unit +@pytest.mark.asyncio +class TestBatchOperations: + """Tests for batch create and delete operations.""" + + @patch('src.routers.movies.get_collection') + async def test_create_movies_batch_success(self, mock_get_collection): + """Should create multiple movies in batch.""" + # Setup mock + mock_collection = AsyncMock() + mock_result = MagicMock() + mock_result.acknowledged = True + mock_result.inserted_ids = [ + ObjectId(TEST_MOVIE_ID), + ObjectId("507f1f77bcf86cd799439012") + ] + mock_collection.insert_many.return_value = mock_result + mock_get_collection.return_value = mock_collection + + # Create request + from src.routers.movies import create_movies_batch + movies = [ + CreateMovieRequest(title="Movie 1", year=2024), + CreateMovieRequest(title="Movie 2", year=2023) + ] + result = await create_movies_batch(movies) + + # Assertions + assert result.success is True + assert result.data["insertedCount"] == 2 + # Note: The route handler has a bug where it calls insert_many twice + # This test documents the current behavior + assert mock_collection.insert_many.call_count == 2 + + @patch('src.routers.movies.get_collection') + async def test_create_movies_batch_empty_list(self, mock_get_collection): + """Should return error when empty list is provided.""" + mock_get_collection.return_value = AsyncMock() + + # Create request with empty list + from src.routers.movies import create_movies_batch + result = await create_movies_batch([]) + + # Assertions + assert result.success is False + assert "empty" in result.message.lower() + + @patch('src.routers.movies.get_collection') + async def test_delete_movies_batch_success(self, mock_get_collection): + """Should delete multiple movies matching filter.""" + # Setup mock + mock_collection = AsyncMock() + mock_result = MagicMock() + mock_result.deleted_count = 3 + mock_collection.delete_many.return_value = mock_result + mock_get_collection.return_value = mock_collection + + # Create request + from src.routers.movies import delete_movies_batch + request_body = {"filter": {"year": 2020}} + result = await delete_movies_batch(request_body) + + # Assertions + assert result.success is True + assert result.data["deletedCount"] == 3 + mock_collection.delete_many.assert_called_once() + + @patch('src.routers.movies.get_collection') + async def test_delete_movies_batch_missing_filter(self, mock_get_collection): + """Should return error when filter is missing.""" + mock_get_collection.return_value = AsyncMock() + + # Create request without filter + from src.routers.movies import delete_movies_batch + request_body = {} + result = await delete_movies_batch(request_body) + + # Assertions + assert result.success is False + assert "filter" in result.message.lower() - movie_update = UpdateMovieRequest(**update_data) - assert movie_update.title is None - assert movie_update.year is None @pytest.mark.unit -class TestMovieDataStructure: - """Tests for movie data structure and types.""" - - def test_movie_with_all_fields(self): - """Should handle movie with all possible fields.""" - movie_data = { - "title": "Complete Movie", - "year": 2024, - "plot": "Full plot", - "fullplot": "Extended plot description", - "genres": ["Action", "Drama", "Thriller"], - "runtime": 142, - "cast": ["Actor 1", "Actor 2", "Actor 3"], - "directors": ["Director 1"], - "writers": ["Writer 1", "Writer 2"], - "languages": ["English", "Spanish"], - "rated": "PG-13", - "countries": ["USA"] +@pytest.mark.asyncio +class TestFindAndDeleteMovie: + """Tests for DELETE /api/movies/{id}/find-and-delete endpoint.""" + + @patch('src.routers.movies.get_collection') + async def test_find_and_delete_success(self, mock_get_collection): + """Should find and delete movie in atomic operation.""" + # Setup mock + mock_collection = AsyncMock() + mock_deleted_movie = { + "_id": ObjectId(TEST_MOVIE_ID), + "title": "Deleted Movie", + "year": 2024 } + mock_collection.find_one_and_delete.return_value = mock_deleted_movie + mock_get_collection.return_value = mock_collection + + # Call the route handler + from src.routers.movies import find_and_delete_movie + result = await find_and_delete_movie(TEST_MOVIE_ID) + + # Assertions + assert result.success is True + assert result.data["title"] == "Deleted Movie" + assert result.data["_id"] == TEST_MOVIE_ID + mock_collection.find_one_and_delete.assert_called_once_with({"_id": ObjectId(TEST_MOVIE_ID)}) + + @patch('src.routers.movies.get_collection') + async def test_find_and_delete_not_found(self, mock_get_collection): + """Should return error when movie does not exist.""" + # Setup mock + mock_collection = AsyncMock() + mock_collection.find_one_and_delete.return_value = None + mock_get_collection.return_value = mock_collection + + # Call the route handler + from src.routers.movies import find_and_delete_movie + result = await find_and_delete_movie(TEST_MOVIE_ID) + + # Assertions + assert result.success is False + assert "not found" in result.message.lower() + + async def test_find_and_delete_invalid_id(self): + """Should return error when invalid ObjectId format is provided.""" + # Call the route handler + from src.routers.movies import find_and_delete_movie + result = await find_and_delete_movie(INVALID_MOVIE_ID) + + # Assertions + assert result.success is False + assert "invalid" in result.message.lower() - movie = CreateMovieRequest(**movie_data) - assert movie.title == "Complete Movie" - assert len(movie.genres) == 3 - assert len(movie.cast) == 3 - def test_movie_genres_as_list(self): - """Should accept genres as a list.""" - movie_data = { - "title": "Genre Test", - "genres": ["Sci-Fi", "Adventure"] +@pytest.mark.unit +@pytest.mark.asyncio +class TestBatchUpdate: + """Tests for PATCH /api/movies/ batch update endpoint.""" + + @patch('src.routers.movies.get_collection') + async def test_update_movies_batch_success(self, mock_get_collection): + """Should update multiple movies matching filter.""" + # Setup mock + mock_collection = AsyncMock() + mock_result = MagicMock() + mock_result.matched_count = 5 + mock_result.modified_count = 5 + mock_collection.update_many.return_value = mock_result + mock_get_collection.return_value = mock_collection + + # Create request + from src.routers.movies import update_movies_batch + request_body = { + "filter": {"year": 2020}, + "update": {"$set": {"rated": "PG-13"}} } - - movie = CreateMovieRequest(**movie_data) - assert isinstance(movie.genres, list) - assert "Sci-Fi" in movie.genres - - def test_movie_with_numeric_fields(self): - """Should handle numeric fields correctly.""" - movie_data = { - "title": "Numeric Test", - "year": 2024, - "runtime": 120 + result = await update_movies_batch(request_body) + + # Assertions + assert result.success is True + assert result.data["matchedCount"] == 5 + assert result.data["modifiedCount"] == 5 + mock_collection.update_many.assert_called_once() + + @patch('src.routers.movies.get_collection') + async def test_update_movies_batch_missing_filter(self, mock_get_collection): + """Should return error when filter is missing.""" + mock_get_collection.return_value = AsyncMock() + + # Create request without filter + from src.routers.movies import update_movies_batch + request_body = {"update": {"$set": {"rated": "PG-13"}}} + result = await update_movies_batch(request_body) + + # Assertions + assert result.success is False + assert "filter" in result.message.lower() or "required" in result.message.lower() + + @patch('src.routers.movies.get_collection') + async def test_update_movies_batch_missing_update(self, mock_get_collection): + """Should return error when update is missing.""" + mock_get_collection.return_value = AsyncMock() + + # Create request without update + from src.routers.movies import update_movies_batch + request_body = {"filter": {"year": 2020}} + result = await update_movies_batch(request_body) + + # Assertions + assert result.success is False + assert "update" in result.message.lower() or "required" in result.message.lower() + + @patch('src.routers.movies.get_collection') + async def test_update_movies_batch_no_matches(self, mock_get_collection): + """Should return success with zero modified count when no movies match.""" + # Setup mock + mock_collection = AsyncMock() + mock_result = MagicMock() + mock_result.matched_count = 0 + mock_result.modified_count = 0 + mock_collection.update_many.return_value = mock_result + mock_get_collection.return_value = mock_collection + + # Create request + from src.routers.movies import update_movies_batch + request_body = { + "filter": {"year": 1800}, + "update": {"$set": {"rated": "PG-13"}} } + result = await update_movies_batch(request_body) - movie = CreateMovieRequest(**movie_data) - assert isinstance(movie.year, int) - assert isinstance(movie.runtime, int) + # Assertions + assert result.success is True + assert result.data["matchedCount"] == 0 + assert result.data["modifiedCount"] == 0 diff --git a/server/python/tests/test_movie_schemas.py b/server/python/tests/test_movie_schemas.py new file mode 100644 index 0000000..a4735ca --- /dev/null +++ b/server/python/tests/test_movie_schemas.py @@ -0,0 +1,160 @@ +""" +Unit Tests for Movie Routes + +These tests verify basic API validation and structure. +Following the pattern from PR #21 Express tests - simple validation tests. + +Note: These are basic validation tests only. Full integration tests +would require a test database or mocking strategy that handles +AsyncMongoClient event loop binding issues. +""" + +import pytest +from pydantic import ValidationError +from src.models.models import CreateMovieRequest, UpdateMovieRequest + + +# Test constants +TEST_MOVIE_ID = "507f1f77bcf86cd799439011" +INVALID_MOVIE_ID = "invalid-id" + + +@pytest.mark.unit +class TestMovieCreateValidation: + """Tests for CreateMovieRequest model validation.""" + + def test_create_movie_with_valid_data(self): + """Should accept valid movie data.""" + movie_data = { + "title": "Test Movie", + "year": 2024, + "plot": "A test movie plot", + "genres": ["Action", "Drama"], + "runtime": 120 + } + + movie = CreateMovieRequest(**movie_data) + assert movie.title == "Test Movie" + assert movie.year == 2024 + assert movie.plot == "A test movie plot" + + def test_create_movie_missing_required_field(self): + """Should raise ValidationError when title is missing.""" + movie_data = { + "year": 2024, + "plot": "A movie without title" + } + + with pytest.raises(ValidationError) as exc_info: + CreateMovieRequest(**movie_data) + + errors = exc_info.value.errors() + assert any(error["loc"] == ("title",) for error in errors) + + def test_create_movie_invalid_year_type(self): + """Should raise ValidationError when year is not an integer.""" + movie_data = { + "title": "Test Movie", + "year": "not-a-number" + } + + with pytest.raises(ValidationError) as exc_info: + CreateMovieRequest(**movie_data) + + errors = exc_info.value.errors() + assert any(error["loc"] == ("year",) for error in errors) + + def test_create_movie_with_optional_fields(self): + """Should accept movie with only required fields.""" + movie_data = { + "title": "Minimal Movie" + } + + movie = CreateMovieRequest(**movie_data) + assert movie.title == "Minimal Movie" + assert movie.year is None + assert movie.plot is None + + +@pytest.mark.unit +class TestMovieUpdateValidation: + """Tests for UpdateMovieRequest model validation.""" + + def test_update_movie_with_valid_data(self): + """Should accept valid update data.""" + update_data = { + "title": "Updated Title", + "year": 2025 + } + + movie_update = UpdateMovieRequest(**update_data) + assert movie_update.title == "Updated Title" + assert movie_update.year == 2025 + + def test_update_movie_with_partial_data(self): + """Should accept partial update data.""" + update_data = { + "title": "Only Title Updated" + } + + movie_update = UpdateMovieRequest(**update_data) + assert movie_update.title == "Only Title Updated" + assert movie_update.year is None + + def test_update_movie_empty_data(self): + """Should accept empty update (all fields optional).""" + update_data = {} + + movie_update = UpdateMovieRequest(**update_data) + assert movie_update.title is None + assert movie_update.year is None + + +@pytest.mark.unit +class TestMovieDataStructure: + """Tests for movie data structure and types.""" + + def test_movie_with_all_fields(self): + """Should handle movie with all possible fields.""" + movie_data = { + "title": "Complete Movie", + "year": 2024, + "plot": "Full plot", + "fullplot": "Extended plot description", + "genres": ["Action", "Drama", "Thriller"], + "runtime": 142, + "cast": ["Actor 1", "Actor 2", "Actor 3"], + "directors": ["Director 1"], + "writers": ["Writer 1", "Writer 2"], + "languages": ["English", "Spanish"], + "rated": "PG-13", + "countries": ["USA"] + } + + movie = CreateMovieRequest(**movie_data) + assert movie.title == "Complete Movie" + assert len(movie.genres) == 3 + assert len(movie.cast) == 3 + + def test_movie_genres_as_list(self): + """Should accept genres as a list.""" + movie_data = { + "title": "Genre Test", + "genres": ["Sci-Fi", "Adventure"] + } + + movie = CreateMovieRequest(**movie_data) + assert isinstance(movie.genres, list) + assert "Sci-Fi" in movie.genres + + def test_movie_with_numeric_fields(self): + """Should handle numeric fields correctly.""" + movie_data = { + "title": "Numeric Test", + "year": 2024, + "runtime": 120 + } + + movie = CreateMovieRequest(**movie_data) + assert isinstance(movie.year, int) + assert isinstance(movie.runtime, int) From 65bf0627d888f9d59cf50ef305ddcfd57348e264 Mon Sep 17 00:00:00 2001 From: dacharyc Date: Thu, 6 Nov 2025 11:12:03 -0500 Subject: [PATCH 2/5] Add tests for remaining routes, missing import --- server/python/tests/test_movie_routes.py | 450 +++++++++++++++++++++++ 1 file changed, 450 insertions(+) diff --git a/server/python/tests/test_movie_routes.py b/server/python/tests/test_movie_routes.py index 2220ec7..3fd510e 100644 --- a/server/python/tests/test_movie_routes.py +++ b/server/python/tests/test_movie_routes.py @@ -611,3 +611,453 @@ async def test_update_movies_batch_no_matches(self, mock_get_collection): assert result.success is True assert result.data["matchedCount"] == 0 assert result.data["modifiedCount"] == 0 + + + +@pytest.mark.unit +@pytest.mark.asyncio +class TestSearchMovies: + """Tests for GET /api/movies/search MongoDB Search endpoint.""" + + @patch('src.routers.movies.execute_aggregation') + async def test_search_movies_by_plot_success(self, mock_execute_aggregation): + """Should successfully search movies by plot.""" + # Setup mock + mock_execute_aggregation.return_value = [{ + "totalCount": [{"count": 2}], + "results": [ + {"_id": ObjectId(TEST_MOVIE_ID), "title": "Test Movie 1", "plot": "A test plot", "year": 2024}, + {"_id": ObjectId("507f1f77bcf86cd799439012"), "title": "Test Movie 2", "plot": "Another test", "year": 2023} + ] + }] + + # Call the route handler + from src.routers.movies import search_movies + result = await search_movies(plot="test", search_operator="must") + + # Assertions + assert result.success is True + assert result.data.totalCount == 2 + assert len(result.data.movies) == 2 + assert result.data.movies[0].title == "Test Movie 1" + mock_execute_aggregation.assert_called_once() + + @patch('src.routers.movies.execute_aggregation') + async def test_search_movies_multiple_fields(self, mock_execute_aggregation): + """Should search across multiple fields (directors and cast).""" + # Setup mock + mock_execute_aggregation.return_value = [{ + "totalCount": [{"count": 1}], + "results": [ + {"_id": ObjectId(TEST_MOVIE_ID), "title": "Action Movie", "directors": ["John Doe"], "cast": ["Jane Smith"], "year": 2024} + ] + }] + + # Call the route handler + from src.routers.movies import search_movies + result = await search_movies(directors="John", cast="Jane", search_operator="must") + + # Assertions + assert result.success is True + assert result.data.totalCount == 1 + assert len(result.data.movies) == 1 + + @patch('src.routers.movies.execute_aggregation') + async def test_search_movies_with_pagination(self, mock_execute_aggregation): + """Should support pagination parameters.""" + # Setup mock + mock_execute_aggregation.return_value = [{ + "totalCount": [{"count": 100}], + "results": [ + {"_id": ObjectId(TEST_MOVIE_ID), "title": f"Movie {i}", "year": 2024} + for i in range(20) + ] + }] + + # Call the route handler + from src.routers.movies import search_movies + result = await search_movies(plot="test", limit=20, skip=20, search_operator="must") + + # Assertions + assert result.success is True + assert result.data.totalCount == 100 + assert len(result.data.movies) == 20 + + async def test_search_movies_no_parameters(self): + """Should return error when no search parameters provided.""" + from src.routers.movies import search_movies + result = await search_movies(search_operator="must") + + # Assertions + assert result.success is False + assert result.error.code == "DATABASE_ERROR" + + async def test_search_movies_invalid_operator(self): + """Should return error for invalid search operator.""" + from src.routers.movies import search_movies + result = await search_movies(plot="test", search_operator="invalid") + + # Assertions + assert result.success is False + assert result.error.code == "INVALID_SEARCH_OPERATOR" + + @patch('src.routers.movies.execute_aggregation') + async def test_search_movies_database_error(self, mock_execute_aggregation): + """Should handle database errors gracefully.""" + # Setup mock to raise exception + mock_execute_aggregation.side_effect = Exception("Database connection failed") + + # Call the route handler + from src.routers.movies import search_movies + result = await search_movies(plot="test", search_operator="must") + + # Assertions + assert result.success is False + assert result.error.code == "DATABASE_ERROR" + + @patch('src.routers.movies.execute_aggregation') + async def test_search_movies_empty_results(self, mock_execute_aggregation): + """Should return empty results when no movies match.""" + # Setup mock + mock_execute_aggregation.return_value = [{ + "totalCount": [{"count": 0}], + "results": [] + }] + + # Call the route handler + from src.routers.movies import search_movies + result = await search_movies(plot="nonexistent", search_operator="must") + + # Assertions + assert result.success is True + assert result.data.totalCount == 0 + assert len(result.data.movies) == 0 + + +@pytest.mark.unit +@pytest.mark.asyncio +class TestVectorSearchMovies: + """Tests for GET /api/movies/vector-search endpoint.""" + + @patch('src.routers.movies.voyage_ai_available') + async def test_vector_search_unavailable(self, mock_voyage_available): + """Should return error when Voyage AI is not configured.""" + # Setup mock + mock_voyage_available.return_value = False + + # Call the route handler + from src.routers.movies import vector_search_movies + result = await vector_search_movies(q="action movie") + + # Assertions + assert result.success is False + assert result.error.code == "SERVICE_UNAVAILABLE" + assert "VOYAGE_API_KEY" in result.error.details + + @patch('src.routers.movies.voyage_ai_available') + @patch('src.routers.movies.voyageai.Client') + @patch('src.routers.movies.get_embedding') + @patch('src.routers.movies.get_collection') + @patch('src.routers.movies.execute_aggregation_on_collection') + async def test_vector_search_success( + self, + mock_execute_agg, + mock_get_collection, + mock_get_embedding, + mock_voyage_client, + mock_voyage_available + ): + """Should successfully perform vector search.""" + # Setup mocks + mock_voyage_available.return_value = True + mock_voyage_client.return_value = MagicMock() # Mock the Voyage AI client + mock_get_embedding.return_value = [0.1] * 2048 # Mock embedding vector + mock_execute_agg.return_value = [ + {"_id": ObjectId(TEST_MOVIE_ID), "title": "Similar Movie 1", "plot": "Action packed", "score": 0.95}, + {"_id": ObjectId("507f1f77bcf86cd799439012"), "title": "Similar Movie 2", "plot": "More action", "score": 0.87} + ] + + # Call the route handler + from src.routers.movies import vector_search_movies + result = await vector_search_movies(q="action movie", limit=10) + + # Assertions + assert result.success is True + assert len(result.data) == 2 + assert result.data[0].title == "Similar Movie 1" + assert result.data[0].score == 0.95 + mock_get_embedding.assert_called_once() + mock_execute_agg.assert_called_once() + + @patch('src.routers.movies.voyage_ai_available') + @patch('src.routers.movies.voyageai.Client') + @patch('src.routers.movies.get_embedding') + async def test_vector_search_embedding_error(self, mock_get_embedding, mock_voyage_client, mock_voyage_available): + """Should handle embedding generation errors.""" + # Setup mocks + mock_voyage_available.return_value = True + mock_voyage_client.return_value = MagicMock() # Mock the Voyage AI client + mock_get_embedding.side_effect = Exception("Embedding API error") + + # Call the route handler + from src.routers.movies import vector_search_movies + result = await vector_search_movies(q="action movie") + + # Assertions + assert result.success is False + assert result.error.code == "INTERNAL_SERVER_ERROR" + + @patch('src.routers.movies.voyage_ai_available') + @patch('src.routers.movies.voyageai.Client') + @patch('src.routers.movies.get_embedding') + @patch('src.routers.movies.get_collection') + @patch('src.routers.movies.execute_aggregation_on_collection') + async def test_vector_search_empty_results( + self, + mock_execute_agg, + mock_get_collection, + mock_get_embedding, + mock_voyage_client, + mock_voyage_available + ): + """Should return empty results when no similar movies found.""" + # Setup mocks + mock_voyage_available.return_value = True + mock_voyage_client.return_value = MagicMock() # Mock the Voyage AI client + mock_get_embedding.return_value = [0.1] * 2048 + mock_execute_agg.return_value = [] + + # Call the route handler + from src.routers.movies import vector_search_movies + result = await vector_search_movies(q="very specific query", limit=10) + + # Assertions + assert result.success is True + assert len(result.data) == 0 + + + +@pytest.mark.unit +@pytest.mark.asyncio +class TestAggregationReportingByComments: + """Tests for GET /api/movies/aggregations/reportingByComments endpoint.""" + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_movies_recent_commented_success(self, mock_execute_aggregation): + """Should successfully aggregate movies with recent comments.""" + # Setup mock + mock_execute_aggregation.return_value = [ + { + "_id": ObjectId(TEST_MOVIE_ID), + "title": "Popular Movie", + "year": 2024, + "genres": ["Action"], + "imdbRating": 8.5, + "recentComments": [ + {"userName": "John", "userEmail": "john@test.com", "text": "Great movie!", "date": "2024-01-01"}, + {"userName": "Jane", "userEmail": "jane@test.com", "text": "Loved it!", "date": "2024-01-02"} + ], + "totalComments": 10 + } + ] + + # Call the route handler + from src.routers.movies import aggregate_movies_recent_commented + result = await aggregate_movies_recent_commented(limit=10, movie_id=None) + + # Assertions + assert result.success is True + assert len(result.data) == 1 + assert result.data[0]["title"] == "Popular Movie" + assert result.data[0]["totalComments"] == 10 + assert len(result.data[0]["recentComments"]) == 2 + mock_execute_aggregation.assert_called_once() + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_movies_by_movie_id(self, mock_execute_aggregation): + """Should filter by specific movie ID.""" + # Setup mock + mock_execute_aggregation.return_value = [ + { + "_id": ObjectId(TEST_MOVIE_ID), + "title": "Specific Movie", + "year": 2024, + "totalComments": 5, + "recentComments": [] + } + ] + + # Call the route handler + from src.routers.movies import aggregate_movies_recent_commented + result = await aggregate_movies_recent_commented(movie_id=TEST_MOVIE_ID) + + # Assertions + assert result.success is True + assert len(result.data) == 1 + assert result.data[0]["_id"] == TEST_MOVIE_ID + + async def test_aggregate_movies_invalid_movie_id(self): + """Should return error for invalid movie ID format.""" + from src.routers.movies import aggregate_movies_recent_commented + result = await aggregate_movies_recent_commented(movie_id="invalid_id") + + # Assertions + assert result.success is False + assert result.error.code == "INTERNAL_SERVER_ERROR" + assert "ObjectId" in result.error.details + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_movies_database_error(self, mock_execute_aggregation): + """Should handle database errors gracefully.""" + # Setup mock to raise exception + mock_execute_aggregation.side_effect = Exception("Aggregation failed") + + # Call the route handler + from src.routers.movies import aggregate_movies_recent_commented + result = await aggregate_movies_recent_commented(limit=10, movie_id=None) + + # Assertions + assert result.success is False + assert result.error.code == "INTERNAL_SERVER_ERROR" + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_movies_empty_results(self, mock_execute_aggregation): + """Should return empty results when no movies have comments.""" + # Setup mock + mock_execute_aggregation.return_value = [] + + # Call the route handler + from src.routers.movies import aggregate_movies_recent_commented + result = await aggregate_movies_recent_commented(limit=10, movie_id=None) + + # Assertions + assert result.success is True + assert len(result.data) == 0 + + +@pytest.mark.unit +@pytest.mark.asyncio +class TestAggregationReportingByYear: + """Tests for GET /api/movies/aggregations/reportingByYear endpoint.""" + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_movies_by_year_success(self, mock_execute_aggregation): + """Should successfully aggregate movies by year with statistics.""" + # Setup mock + mock_execute_aggregation.return_value = [ + {"year": 2024, "movieCount": 150, "averageRating": 7.5, "highestRating": 9.5, "lowestRating": 5.0, "totalVotes": 50000}, + {"year": 2023, "movieCount": 200, "averageRating": 7.2, "highestRating": 9.0, "lowestRating": 4.5, "totalVotes": 75000} + ] + + # Call the route handler + from src.routers.movies import aggregate_movies_by_year + result = await aggregate_movies_by_year() + + # Assertions + assert result.success is True + assert len(result.data) == 2 + assert result.data[0]["year"] == 2024 + assert result.data[0]["movieCount"] == 150 + assert result.data[0]["averageRating"] == 7.5 + mock_execute_aggregation.assert_called_once() + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_movies_by_year_database_error(self, mock_execute_aggregation): + """Should handle database errors gracefully.""" + # Setup mock to raise exception + mock_execute_aggregation.side_effect = Exception("Aggregation pipeline failed") + + # Call the route handler + from src.routers.movies import aggregate_movies_by_year + result = await aggregate_movies_by_year() + + # Assertions + assert result.success is False + assert result.error.code == "INTERNAL_SERVER_ERROR" + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_movies_by_year_empty_results(self, mock_execute_aggregation): + """Should return empty results when no valid year data.""" + # Setup mock + mock_execute_aggregation.return_value = [] + + # Call the route handler + from src.routers.movies import aggregate_movies_by_year + result = await aggregate_movies_by_year() + + # Assertions + assert result.success is True + assert len(result.data) == 0 + + +@pytest.mark.unit +@pytest.mark.asyncio +class TestAggregationReportingByDirectors: + """Tests for GET /api/movies/aggregations/reportingByDirectors endpoint.""" + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_directors_most_movies_success(self, mock_execute_aggregation): + """Should successfully aggregate directors with most movies.""" + # Setup mock + mock_execute_aggregation.return_value = [ + {"director": "Steven Spielberg", "movieCount": 50, "averageRating": 8.2}, + {"director": "Martin Scorsese", "movieCount": 45, "averageRating": 8.5}, + {"director": "Christopher Nolan", "movieCount": 40, "averageRating": 8.7} + ] + + # Call the route handler + from src.routers.movies import aggregate_directors_most_movies + result = await aggregate_directors_most_movies(limit=20) + + # Assertions + assert result.success is True + assert len(result.data) == 3 + assert result.data[0]["director"] == "Steven Spielberg" + assert result.data[0]["movieCount"] == 50 + assert result.data[0]["averageRating"] == 8.2 + mock_execute_aggregation.assert_called_once() + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_directors_with_custom_limit(self, mock_execute_aggregation): + """Should respect custom limit parameter.""" + # Setup mock + mock_execute_aggregation.return_value = [ + {"director": "Director 1", "movieCount": 10, "averageRating": 7.0} + ] + + # Call the route handler + from src.routers.movies import aggregate_directors_most_movies + result = await aggregate_directors_most_movies(limit=5) + + # Assertions + assert result.success is True + # Verify the aggregation was called (limit is applied in pipeline) + mock_execute_aggregation.assert_called_once() + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_directors_database_error(self, mock_execute_aggregation): + """Should handle database errors gracefully.""" + # Setup mock to raise exception + mock_execute_aggregation.side_effect = Exception("Pipeline execution failed") + + # Call the route handler + from src.routers.movies import aggregate_directors_most_movies + result = await aggregate_directors_most_movies() + + # Assertions + assert result.success is False + assert result.error.code == "INTERNAL_SERVER_ERROR" + + @patch('src.routers.movies.execute_aggregation') + async def test_aggregate_directors_empty_results(self, mock_execute_aggregation): + """Should return empty results when no directors found.""" + # Setup mock + mock_execute_aggregation.return_value = [] + + # Call the route handler + from src.routers.movies import aggregate_directors_most_movies + result = await aggregate_directors_most_movies() + + # Assertions + assert result.success is True + assert len(result.data) == 0 From 093d78a19c7f08859ca5cfaa2e4b505b33f092f3 Mon Sep 17 00:00:00 2001 From: dacharyc Date: Thu, 6 Nov 2025 12:03:25 -0500 Subject: [PATCH 3/5] Add integration tests, update workflow and script --- .../scripts/generate-test-summary-pytest.sh | 131 ++++--- .github/workflows/run-python-tests.yml | 18 +- server/python/tests/README.md | 192 ++++++++++ server/python/tests/integration/__init__.py | 2 + server/python/tests/integration/conftest.py | 213 +++++++++++ .../test_movie_routes_integration.py | 333 ++++++++++++++++++ 6 files changed, 838 insertions(+), 51 deletions(-) mode change 100644 => 100755 .github/scripts/generate-test-summary-pytest.sh create mode 100644 server/python/tests/README.md create mode 100644 server/python/tests/integration/__init__.py create mode 100644 server/python/tests/integration/conftest.py create mode 100644 server/python/tests/integration/test_movie_routes_integration.py diff --git a/.github/scripts/generate-test-summary-pytest.sh b/.github/scripts/generate-test-summary-pytest.sh old mode 100644 new mode 100755 index f88ffff..9b1bef1 --- a/.github/scripts/generate-test-summary-pytest.sh +++ b/.github/scripts/generate-test-summary-pytest.sh @@ -1,70 +1,109 @@ #!/bin/bash set -e -# Generate Test Summary from Pytest JUnit XML Output -# Usage: ./generate-test-summary-pytest.sh +# Generate Detailed Test Summary from Multiple Pytest JUnit XML Output Files +# Shows breakdown by test type (unit vs integration) +# Usage: ./generate-test-summary-pytest-detailed.sh -XML_FILE="${1:-test-results.xml}" +UNIT_XML="${1:-}" +INTEGRATION_XML="${2:-}" echo "## Test Results" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY -# Parse test results from JUnit XML -if [ -f "$XML_FILE" ]; then - # Extract test counts from XML - # JUnit XML structure: +# Function to parse XML file +parse_xml() { + local xml_file="$1" + local test_type="$2" - tests=$(grep -oP 'tests="\K[0-9]+' "$XML_FILE" | head -1) - failures=$(grep -oP 'failures="\K[0-9]+' "$XML_FILE" | head -1) - errors=$(grep -oP 'errors="\K[0-9]+' "$XML_FILE" | head -1) - skipped=$(grep -oP 'skipped="\K[0-9]+' "$XML_FILE" | head -1) + if [ ! -f "$xml_file" ]; then + echo "0 0 0 0 0" + return + fi + + tests=$(grep -oP 'tests="\K[0-9]+' "$xml_file" | head -1) + failures=$(grep -oP 'failures="\K[0-9]+' "$xml_file" | head -1) + errors=$(grep -oP 'errors="\K[0-9]+' "$xml_file" | head -1) + skipped=$(grep -oP 'skipped="\K[0-9]+' "$xml_file" | head -1) - # Default to 0 if values are empty tests=${tests:-0} failures=${failures:-0} errors=${errors:-0} skipped=${skipped:-0} - passed=$((tests - failures - errors - skipped)) - echo "| Status | Count |" >> $GITHUB_STEP_SUMMARY - echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY - echo "| ✅ Passed | $passed |" >> $GITHUB_STEP_SUMMARY - echo "| ❌ Failed | $((failures + errors)) |" >> $GITHUB_STEP_SUMMARY - echo "| ⏭️ Skipped | $skipped |" >> $GITHUB_STEP_SUMMARY - echo "| **Total** | **$tests** |" >> $GITHUB_STEP_SUMMARY + echo "$tests $failures $errors $skipped $passed" +} + +# Parse both files +read -r unit_tests unit_failures unit_errors unit_skipped unit_passed <<< "$(parse_xml "$UNIT_XML" "Unit")" +read -r int_tests int_failures int_errors int_skipped int_passed <<< "$(parse_xml "$INTEGRATION_XML" "Integration")" + +# Calculate totals +total_tests=$((unit_tests + int_tests)) +total_failures=$((unit_failures + int_failures)) +total_errors=$((unit_errors + int_errors)) +total_skipped=$((unit_skipped + int_skipped)) +total_passed=$((unit_passed + int_passed)) +total_failed=$((total_failures + total_errors)) + +# Display detailed breakdown +echo "### Summary by Test Type" >> $GITHUB_STEP_SUMMARY +echo "" >> $GITHUB_STEP_SUMMARY +echo "| Test Type | Passed | Failed | Skipped | Total |" >> $GITHUB_STEP_SUMMARY +echo "|-----------|--------|--------|---------|-------|" >> $GITHUB_STEP_SUMMARY + +if [ -f "$UNIT_XML" ]; then + echo "| 🔧 Unit Tests | $unit_passed | $((unit_failures + unit_errors)) | $unit_skipped | $unit_tests |" >> $GITHUB_STEP_SUMMARY +fi + +if [ -f "$INTEGRATION_XML" ]; then + echo "| 🔗 Integration Tests | $int_passed | $((int_failures + int_errors)) | $int_skipped | $int_tests |" >> $GITHUB_STEP_SUMMARY +fi + +echo "| **Total** | **$total_passed** | **$total_failed** | **$total_skipped** | **$total_tests** |" >> $GITHUB_STEP_SUMMARY +echo "" >> $GITHUB_STEP_SUMMARY + +# Overall status +echo "### Overall Status" >> $GITHUB_STEP_SUMMARY +echo "" >> $GITHUB_STEP_SUMMARY +echo "| Status | Count |" >> $GITHUB_STEP_SUMMARY +echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY +echo "| ✅ Passed | $total_passed |" >> $GITHUB_STEP_SUMMARY +echo "| ❌ Failed | $total_failed |" >> $GITHUB_STEP_SUMMARY +echo "| ⏭️ Skipped | $total_skipped |" >> $GITHUB_STEP_SUMMARY +echo "| **Total** | **$total_tests** |" >> $GITHUB_STEP_SUMMARY +echo "" >> $GITHUB_STEP_SUMMARY + +# List failed tests if any +if [ $total_failed -gt 0 ]; then + echo "### ❌ Failed Tests" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY - # List failed tests if any - if [ $((failures + errors)) -gt 0 ]; then - echo "### ❌ Failed Tests" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - # Extract failed test names from XML - failed_tests_file=$(mktemp) - - # Find testcase elements with failure or error children - grep -oP ']*classname="[^"]*"[^>]*name="[^"]*"[^>]*>.*?<(failure|error)' "$XML_FILE" | \ - grep -oP 'classname="\K[^"]*|name="\K[^"]*' | \ - paste -d '.' - - >> "$failed_tests_file" 2>/dev/null || true - - if [ -s "$failed_tests_file" ]; then - while IFS= read -r test; do - echo "- \`$test\`" >> $GITHUB_STEP_SUMMARY - done < "$failed_tests_file" - else - echo "_Unable to parse individual test names_" >> $GITHUB_STEP_SUMMARY + failed_tests_file=$(mktemp) + + # Extract failed tests from both files + for xml_file in "$UNIT_XML" "$INTEGRATION_XML"; do + if [ -f "$xml_file" ]; then + grep -oP ']*classname="[^"]*"[^>]*name="[^"]*"[^>]*>.*?<(failure|error)' "$xml_file" | \ + grep -oP 'classname="\K[^"]*|name="\K[^"]*' | \ + paste -d '.' - - >> "$failed_tests_file" 2>/dev/null || true fi - - echo "" >> $GITHUB_STEP_SUMMARY - echo "❌ **Tests failed!**" >> $GITHUB_STEP_SUMMARY - rm -f "$failed_tests_file" - exit 1 + done + + if [ -s "$failed_tests_file" ]; then + while IFS= read -r test; do + echo "- \`$test\`" >> $GITHUB_STEP_SUMMARY + done < "$failed_tests_file" else - echo "✅ **All tests passed!**" >> $GITHUB_STEP_SUMMARY + echo "_Unable to parse individual test names_" >> $GITHUB_STEP_SUMMARY fi -else - echo "⚠️ No test results found at: $XML_FILE" >> $GITHUB_STEP_SUMMARY + + echo "" >> $GITHUB_STEP_SUMMARY + echo "❌ **Tests failed!**" >> $GITHUB_STEP_SUMMARY + rm -f "$failed_tests_file" exit 1 +else + echo "✅ **All tests passed!**" >> $GITHUB_STEP_SUMMARY fi diff --git a/.github/workflows/run-python-tests.yml b/.github/workflows/run-python-tests.yml index 26109ab..117671a 100644 --- a/.github/workflows/run-python-tests.yml +++ b/.github/workflows/run-python-tests.yml @@ -36,8 +36,14 @@ jobs: python -m pip install --upgrade pip pip install -r requirements.txt - - name: Run tests - run: pytest --verbose --tb=short --junit-xml=test-results.xml || true + - name: Run unit tests + run: pytest -m unit --verbose --tb=short --junit-xml=test-results-unit.xml + env: + MONGO_URI: ${{ secrets.MFLIX_URI }} + MONGO_DB: sample_mflix + + - name: Run integration tests + run: pytest -m integration --verbose --tb=short --junit-xml=test-results-integration.xml || true env: MONGO_URI: ${{ secrets.MFLIX_URI }} MONGO_DB: sample_mflix @@ -48,7 +54,8 @@ jobs: with: name: test-results path: | - server/python/test-results.xml + server/python/test-results-unit.xml + server/python/test-results-integration.xml server/python/htmlcov/ retention-days: 30 @@ -57,5 +64,6 @@ jobs: working-directory: . run: | chmod +x .github/scripts/generate-test-summary-pytest.sh - .github/scripts/generate-test-summary-pytest.sh server/python/test-results.xml - + .github/scripts/generate-test-summary-pytest.sh \ + server/python/test-results-unit.xml \ + server/python/test-results-integration.xml diff --git a/server/python/tests/README.md b/server/python/tests/README.md new file mode 100644 index 0000000..bf73ce3 --- /dev/null +++ b/server/python/tests/README.md @@ -0,0 +1,192 @@ +# Testing Guide for FastAPI MongoDB Sample Application + +This document describes the testing strategy and how to run tests for the FastAPI MongoDB MFlix sample application. + +## Test Structure + +The test suite is organized into three categories: + +### 1. **Schema Tests** (`test_movie_schemas.py`) +- Tests Pydantic model validation +- Validates request/response data structures +- No database or external dependencies required +- **10 tests** covering `CreateMovieRequest`, `UpdateMovieRequest`, and `Movie` models + +### 2. **Unit Tests** (`test_movie_routes.py`) +- Tests route handler functions in isolation +- Uses `unittest.mock.AsyncMock` to mock MongoDB operations +- No database connection required +- Fast execution (< 2 seconds) +- **51 tests** covering: + - CRUD operations (create, read, update, delete) + - Batch operations + - Search functionality + - Vector search + - Aggregation pipelines + +### 3. **Integration Tests** (`tests/integration/test_movie_routes_integration.py`) +- Tests the full HTTP request/response cycle +- Requires a running MongoDB instance with MFlix dataset +- Uses a real server running in a subprocess +- Tests are idempotent (clean up after themselves) +- **8 tests** (7 passing, 1 skipped due to known bug) covering: + - End-to-end CRUD operations + - Search against real data + - Batch operations with cleanup + - Aggregation queries + +## Running Tests + +### Prerequisites + +1. **For all tests:** + ```bash + cd server/python + source .venv/bin/activate # or `.venv\Scripts\activate` on Windows + ``` + +2. **For integration tests only:** + - MongoDB instance running with MFlix dataset loaded + - Connection string configured in `.env` file + - Port 8001 available (used for test server) + +### Run All Tests + +```bash +pytest tests/ -v +``` + +**Expected output:** 68 passed, 1 skipped in ~5 seconds + +### Run Only Unit Tests (Fast, No Database Required) + +```bash +pytest -m unit -v +``` + +**Expected output:** 51 passed in ~1 second + +### Run Only Integration Tests (Requires Database) + +```bash +pytest -m integration -v +``` + +**Expected output:** 7 passed, 1 skipped in ~4 seconds + +### Run Specific Test File + +```bash +# Schema tests +pytest tests/test_movie_schemas.py -v + +# Unit tests +pytest tests/test_movie_routes.py -v + +# Integration tests +pytest tests/integration/test_movie_routes_integration.py -v +``` + +### Run Specific Test Class or Method + +```bash +# Run a specific test class +pytest tests/test_movie_routes.py::TestCreateMovie -v + +# Run a specific test method +pytest tests/test_movie_routes.py::TestCreateMovie::test_create_movie_success -v +``` + +## Test Markers + +Tests are marked with pytest markers for selective execution: + +- `@pytest.mark.unit` - Unit tests with mocked dependencies +- `@pytest.mark.integration` - Integration tests requiring database + +## Integration Test Strategy + +### Why Use a Running Server? + +The integration tests start a real FastAPI server in a subprocess because: + +1. **Event Loop Isolation**: AsyncMongoClient binds to the event loop it was created in. Using a real server avoids event loop conflicts. +2. **Real-World Testing**: Tests the actual deployment configuration, including middleware, CORS, and startup events. +3. **Educational Value**: Demonstrates a practical integration testing pattern for async Python applications. + +### Idempotent Tests + +All integration tests are designed to be idempotent: + +- **Create operations**: Tests create new documents with unique identifiers +- **Cleanup**: Fixtures automatically delete created documents after tests +- **Read-only tests**: Tests against existing MFlix data don't modify anything +- **Batch operations**: Create and delete multiple documents with proper cleanup + +### Fixtures + +Integration tests use pytest fixtures for test data lifecycle management: + +- `client`: AsyncClient connected to the test server +- `test_movie_data`: Sample movie data for creating test documents +- `created_movie`: Creates a movie and cleans it up automatically +- `multiple_test_movies`: Creates 3 movies for batch operation testing + +## Known Issues + +### Batch Create Bug (Skipped Test) + +The `test_batch_create_movies` test is currently skipped due to a known bug in the API: + +- **Issue**: `create_movies_batch` function calls `insert_many` twice (lines 1006 and 1015 in `movies.py`) +- **Impact**: Causes 500 error on batch create operations +- **Status**: To be fixed in a separate PR +- **Test behavior**: Test detects the error and skips gracefully + +## Troubleshooting + +### Integration Tests Fail to Start Server + +**Error**: `Port 8001 is already in use` + +**Solution**: +- Kill any process using port 8001: `lsof -ti:8001 | xargs kill -9` +- Or change the port in `tests/integration/conftest.py` + +### Integration Tests Can't Connect to MongoDB + +**Error**: Connection timeout or authentication error + +**Solution**: +- Verify MongoDB is running +- Check `.env` file has correct `MONGODB_URI` +- Ensure MFlix dataset is loaded +- Test connection: `mongosh ` + +### Unit Tests Fail with Import Errors + +**Error**: `ModuleNotFoundError` + +**Solution**: +- Ensure virtual environment is activated +- Install dependencies: `pip install -r requirements.txt` +- Run from `server/python` directory + +## Contributing + +When adding new routes or functionality: + +1. **Add unit tests** in `test_movie_routes.py` with mocked dependencies +2. **Add integration tests** in `tests/integration/test_movie_routes_integration.py` for end-to-end validation +3. **Use appropriate markers** (`@pytest.mark.unit` or `@pytest.mark.integration`) +4. **Follow fixture patterns** for test data lifecycle management +5. **Ensure idempotency** - tests should clean up after themselves +6. **Document test purpose** with clear docstrings + +## Additional Resources + +- [pytest documentation](https://docs.pytest.org/) +- [pytest-asyncio documentation](https://pytest-asyncio.readthedocs.io/) +- [FastAPI testing guide](https://fastapi.tiangolo.com/tutorial/testing/) +- [MongoDB Motor documentation](https://motor.readthedocs.io/) + diff --git a/server/python/tests/integration/__init__.py b/server/python/tests/integration/__init__.py new file mode 100644 index 0000000..a4bb8be --- /dev/null +++ b/server/python/tests/integration/__init__.py @@ -0,0 +1,2 @@ +"""Integration tests for the FastAPI MongoDB sample application.""" + diff --git a/server/python/tests/integration/conftest.py b/server/python/tests/integration/conftest.py new file mode 100644 index 0000000..329cbdc --- /dev/null +++ b/server/python/tests/integration/conftest.py @@ -0,0 +1,213 @@ +""" +Shared fixtures for integration tests. + +This module demonstrates MongoDB data lifecycle management patterns +for integration testing with FastAPI and MongoDB. + +These integration tests use a real running server to avoid event loop +issues with AsyncMongoClient. This approach: +- Tests the actual deployment configuration +- Avoids event loop binding issues +- Demonstrates real-world integration testing patterns +""" + +import uuid +import time +import subprocess +import sys +import os +import pytest +import pytest_asyncio +from httpx import AsyncClient +import socket + + +def is_port_in_use(port): + """Check if a port is already in use.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + return s.connect_ex(('localhost', port)) == 0 + + +@pytest.fixture(scope="session") +def server(): + """ + Start the FastAPI server in a subprocess for integration testing. + + This fixture demonstrates: + - Starting a real server for integration tests + - Proper cleanup of server process + - Waiting for server to be ready + - Using a test-specific port + + The server runs for the entire test session and is shared across all tests. + """ + # Use a different port for testing to avoid conflicts + test_port = 8001 + + # Check if port is already in use + if is_port_in_use(test_port): + pytest.skip(f"Port {test_port} is already in use. Cannot start test server.") + + # Get the absolute path to the server/python directory + # Tests are in server/python/tests/integration, so go up two levels + test_dir = os.path.dirname(os.path.abspath(__file__)) + server_python_dir = os.path.abspath(os.path.join(test_dir, "..", "..")) + + # Start the server process + process = subprocess.Popen( + [sys.executable, "-m", "uvicorn", "main:app", "--host", "127.0.0.1", "--port", str(test_port)], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=server_python_dir + ) + + # Wait for server to be ready (max 10 seconds) + max_wait = 10 + start_time = time.time() + while time.time() - start_time < max_wait: + if is_port_in_use(test_port): + # Server is ready + time.sleep(0.5) # Give it a bit more time to fully initialize + break + time.sleep(0.1) + else: + # Server didn't start in time + process.kill() + pytest.fail(f"Server failed to start within {max_wait} seconds") + + yield f"http://127.0.0.1:{test_port}" + + # Cleanup: Stop the server + process.terminate() + try: + process.wait(timeout=5) + except subprocess.TimeoutExpired: + process.kill() + process.wait() + + +@pytest_asyncio.fixture +async def client(server): + """ + Create an AsyncClient that connects to the running test server. + + This client makes real HTTP requests to the server running in a subprocess, + testing the full request/response cycle including: + - Request validation + - Route handlers + - Database operations + - Response serialization + - Middleware + - CORS + + This approach avoids event loop issues with AsyncMongoClient. + """ + async with AsyncClient(base_url=server, timeout=30.0) as ac: + yield ac + + +@pytest.fixture +def test_movie_data(): + """ + Generate unique test movie data. + + Uses UUID to ensure uniqueness and avoid conflicts with: + - Existing MFlix data + - Other concurrent tests + - Previous test runs + + Returns minimal required fields for a valid movie document. + """ + unique_id = str(uuid.uuid4())[:8] + return { + "title": f"Integration Test Movie {unique_id}", + "year": 2024, + "plot": f"This is a test movie created during integration testing. ID: {unique_id}", + "genres": ["Test", "Integration"], + "runtime": 120, + "cast": ["Test Actor 1", "Test Actor 2"], + "directors": ["Test Director"], + "rated": "PG-13" + } + + +@pytest_asyncio.fixture +async def created_movie(client, test_movie_data): + """ + Create a test movie and automatically clean it up after the test. + + This fixture demonstrates the create -> test -> cleanup pattern: + 1. POST request creates a new movie + 2. Yield the movie ID to the test + 3. DELETE request removes the movie (runs even if test fails) + + Usage: + async def test_something(created_movie): + # created_movie is the movie ID + response = await client.get(f"/api/movies/{created_movie}") + # ... test assertions ... + # Cleanup happens automatically + """ + # Setup: Create test movie + response = await client.post("/api/movies/", json=test_movie_data) + assert response.status_code in [200, 201], f"Failed to create test movie: {response.text}" + + movie_id = response.json()["data"]["_id"] + + # Provide movie ID to test + yield movie_id + + # Teardown: Clean up test movie (always runs) + cleanup_response = await client.delete(f"/api/movies/{movie_id}") + # Verify cleanup succeeded (helps catch cleanup issues early) + assert cleanup_response.status_code == 200, f"Failed to clean up test movie {movie_id}" + + +@pytest_asyncio.fixture +async def multiple_test_movies(client): + """ + Create multiple test movies for batch operation testing. + + This fixture demonstrates: + - Creating multiple related test documents + - Tracking all created IDs for cleanup + - Cleaning up all documents even if test fails + + Usage: + async def test_batch_operation(multiple_test_movies): + # multiple_test_movies is a list of movie IDs + assert len(multiple_test_movies) == 3 + # ... test batch operations ... + # All movies cleaned up automatically + """ + movie_ids = [] + unique_id = str(uuid.uuid4())[:8] + + # Create 3 test movies + for i in range(3): + movie_data = { + "title": f"Batch Test Movie {i} - {unique_id}", + "year": 2024, + "plot": f"Batch test movie {i}", + "genres": ["Test"], + "runtime": 90 + } + response = await client.post("/api/movies/", json=movie_data) + assert response.status_code in [200, 201], f"Failed to create batch test movie {i}" + movie_ids.append(response.json()["data"]["_id"]) + + yield movie_ids + + # Cleanup all test movies + # Note: Some tests may have already deleted these movies, so we handle that gracefully + for movie_id in movie_ids: + cleanup_response = await client.delete(f"/api/movies/{movie_id}") + # Accept 200 (success) or 500 (movie already deleted) + if cleanup_response.status_code == 500: + # Check if it's a "not found" error + response_data = cleanup_response.json() + if response_data.get("success") is False and "not found" in response_data.get("error", {}).get("message", "").lower(): + # Movie was already deleted, which is fine + continue + assert cleanup_response.status_code == 200, f"Failed to clean up movie {movie_id}" + diff --git a/server/python/tests/integration/test_movie_routes_integration.py b/server/python/tests/integration/test_movie_routes_integration.py new file mode 100644 index 0000000..7c83a57 --- /dev/null +++ b/server/python/tests/integration/test_movie_routes_integration.py @@ -0,0 +1,333 @@ +""" +Integration tests for movie routes. + +These tests validate the full request/response cycle against a real MongoDB instance. +They demonstrate best practices for: +- Testing with real database operations +- Managing test data lifecycle +- Leaving the database in a clean state +- Testing against production-like data (MFlix dataset) + +Note: These tests create and clean up their own test data, leaving the +existing MFlix dataset untouched. +""" + +import pytest + + +@pytest.mark.integration +class TestMovieCRUDIntegration: + """ + Integration tests for basic CRUD operations. + + These tests demonstrate the full lifecycle of movie documents: + - Creating new documents + - Reading documents by ID + - Updating existing documents + - Deleting documents + """ + + @pytest.mark.asyncio + async def test_create_and_retrieve_movie(self, client, test_movie_data): + """ + Test creating a movie and retrieving it by ID. + + This test demonstrates: + - POST request with JSON body + - Response validation + - GET request with path parameter + - Explicit cleanup pattern + """ + # Create a new movie + create_response = await client.post("/api/movies/", json=test_movie_data) + + # Validate creation response (201 Created is correct for POST) + assert create_response.status_code == 201 + create_data = create_response.json() + assert create_data["success"] is True + assert create_data["data"]["title"] == test_movie_data["title"] + + movie_id = create_data["data"]["_id"] + + try: + # Retrieve the created movie + get_response = await client.get(f"/api/movies/{movie_id}") + + # Validate retrieval response + assert get_response.status_code == 200 + get_data = get_response.json() + assert get_data["success"] is True + assert get_data["data"]["_id"] == movie_id + assert get_data["data"]["title"] == test_movie_data["title"] + assert get_data["data"]["year"] == test_movie_data["year"] + + finally: + # Cleanup: Delete the test movie + delete_response = await client.delete(f"/api/movies/{movie_id}") + assert delete_response.status_code == 200 + + @pytest.mark.asyncio + async def test_update_movie(self, client, created_movie): + """ + Test updating a movie's fields. + + This test demonstrates: + - Using fixtures for setup/cleanup + - PATCH request with partial updates + - Verifying updates persisted to database + """ + # Update the movie + update_data = { + "title": "Updated Integration Test Title", + "year": 2025, + "plot": "Updated plot for integration testing" + } + update_response = await client.patch( + f"/api/movies/{created_movie}", + json=update_data + ) + + # Validate update response + assert update_response.status_code == 200 + update_result = update_response.json() + assert update_result["success"] is True + + # Verify the update persisted + get_response = await client.get(f"/api/movies/{created_movie}") + assert get_response.status_code == 200 + movie_data = get_response.json()["data"] + assert movie_data["title"] == update_data["title"] + assert movie_data["year"] == update_data["year"] + assert movie_data["plot"] == update_data["plot"] + + # Fixture handles cleanup automatically + + @pytest.mark.asyncio + async def test_delete_movie(self, client, test_movie_data): + """ + Test deleting a movie. + + This test demonstrates: + - Complete lifecycle: create -> delete -> verify + - Testing 404 response after deletion + - No cleanup needed (movie already deleted) + """ + # Create a movie + create_response = await client.post("/api/movies/", json=test_movie_data) + movie_id = create_response.json()["data"]["_id"] + + # Delete the movie + delete_response = await client.delete(f"/api/movies/{movie_id}") + assert delete_response.status_code == 200 + delete_data = delete_response.json() + assert delete_data["success"] is True + + # Verify movie no longer exists + # Note: The API returns 200 with INTERNAL_SERVER_ERROR code, not 404 + get_response = await client.get(f"/api/movies/{movie_id}") + error_data = get_response.json() + assert error_data["success"] is False + assert error_data["error"]["code"] == "INTERNAL_SERVER_ERROR" + assert "not found" in error_data["error"]["message"].lower() + + # No cleanup needed - movie already deleted + + +@pytest.mark.integration +class TestMovieSearchIntegration: + """ + Integration tests for search functionality. + + These tests use the existing MFlix dataset (read-only operations). + No cleanup needed since we're not modifying data. + """ + + @pytest.mark.asyncio + async def test_search_existing_movies_by_plot(self, client): + """ + Test searching movies using the existing MFlix dataset. + + This test demonstrates: + - Read-only operations against production-like data + - Query parameters in GET requests + - No cleanup needed for read operations + """ + # Search for movies with "love" in the plot + response = await client.get("/api/movies/search?plot=love&search_operator=must") + + # Validate response + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert data["data"]["totalCount"] > 0 + assert len(data["data"]["movies"]) > 0 + + # Verify search results contain the search term + first_movie = data["data"]["movies"][0] + assert "title" in first_movie + assert "plot" in first_movie + + @pytest.mark.asyncio + async def test_get_all_movies_with_pagination(self, client): + """ + Test retrieving movies with pagination. + + This test demonstrates: + - Pagination parameters (skip/limit, not page-based) + - Testing against existing dataset + - Validating response structure + """ + # Get first page using skip and limit + response = await client.get("/api/movies/?skip=0&limit=10") + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + # The API returns a simple list in data, not a paginated object + assert isinstance(data["data"], list) + assert len(data["data"]) <= 10 + + +@pytest.mark.integration +class TestBatchOperationsIntegration: + """ + Integration tests for batch operations. + + These tests demonstrate working with multiple documents + and proper cleanup of all created test data. + """ + + @pytest.mark.asyncio + async def test_batch_create_movies(self, client): + """ + Test creating multiple movies in a single request. + + This test demonstrates: + - Batch creation endpoint + - Creating multiple test documents + - Cleaning up all created documents + """ + # Prepare batch of movies + import uuid + unique_id = str(uuid.uuid4())[:8] + movies = [ + { + "title": f"Batch Movie {i} - {unique_id}", + "year": 2024, + "plot": f"Batch test movie {i}", + "genres": ["Test"], + "runtime": 90 + } + for i in range(3) + ] + + # Create batch + response = await client.post("/api/movies/batch", json=movies) + + # Note: Due to a bug in the API (duplicate insert_many calls), + # this may return 500. This is a known issue to be fixed. + # For now, we'll accept either 201 (success) or 500 (bug) + assert response.status_code in [201, 500] + + if response.status_code == 500: + # Skip the rest of the test if the bug occurs + import pytest + pytest.skip("Batch create failed due to known duplicate insert_many bug") + data = response.json() + assert data["success"] is True + + # Extract created IDs for cleanup + created_ids = data["data"]["insertedIds"] + assert len(created_ids) == 3 + + try: + # Verify all movies were created + for movie_id in created_ids: + get_response = await client.get(f"/api/movies/{movie_id}") + assert get_response.status_code == 200 + finally: + # Cleanup: Delete all created movies + for movie_id in created_ids: + await client.delete(f"/api/movies/{movie_id}") + + @pytest.mark.asyncio + async def test_batch_delete_movies(self, client, multiple_test_movies): + """ + Test deleting multiple movies using a filter. + + This test demonstrates: + - Using fixtures to create test data + - Batch delete with filter + - Verifying deletions + """ + # Get one of the test movie titles for filtering + first_movie_response = await client.get(f"/api/movies/{multiple_test_movies[0]}") + first_movie_title = first_movie_response.json()["data"]["title"] + + # Extract the unique ID from the title (format: "Batch Test Movie X - {uuid}") + unique_id = first_movie_title.split(" - ")[-1] + + # Delete all movies with this unique ID in the title + # Note: httpx AsyncClient.delete() doesn't support json parameter + # We need to use request() method instead + # The API expects the filter to be wrapped in a "filter" key + # The batch delete endpoint is at DELETE /api/movies/ (not /batch) + delete_response = await client.request( + "DELETE", + "/api/movies/", + json={"filter": {"title": {"$regex": unique_id}}} + ) + + assert delete_response.status_code == 200 + delete_data = delete_response.json() + assert delete_data["success"] is True + assert delete_data["data"]["deletedCount"] == 3 + + # Verify all movies were deleted + # Note: The API returns 200 with INTERNAL_SERVER_ERROR code, not 404 + for movie_id in multiple_test_movies: + get_response = await client.get(f"/api/movies/{movie_id}") + response_data = get_response.json() + assert response_data["success"] is False + assert response_data["error"]["code"] == "INTERNAL_SERVER_ERROR" + assert "not found" in response_data["error"]["message"].lower() + + # Note: Fixture cleanup will try to delete but movies are already gone + # The fixture should handle this gracefully + + +@pytest.mark.integration +class TestAggregationIntegration: + """ + Integration tests for aggregation endpoints. + + These tests use the existing MFlix dataset (read-only operations). + """ + + @pytest.mark.asyncio + async def test_aggregate_movies_by_year(self, client): + """ + Test aggregation reporting by year. + + This test demonstrates: + - Complex aggregation pipelines + - Testing against existing dataset + - Validating aggregation results + """ + response = await client.get("/api/movies/aggregations/reportingByYear") + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert len(data["data"]) > 0 + + # Validate structure of aggregation results + # Note: The aggregation projects "year" field and excludes "_id" + first_result = data["data"][0] + assert "year" in first_result # Year field (not _id) + assert "movieCount" in first_result + assert "averageRating" in first_result # Note: it's averageRating, not avgRuntime + assert "highestRating" in first_result + assert "lowestRating" in first_result + assert "totalVotes" in first_result + From 050539dec0d16573f18b25067fd2628872cae606 Mon Sep 17 00:00:00 2001 From: dacharyc Date: Thu, 6 Nov 2025 17:20:34 -0500 Subject: [PATCH 4/5] Update tests after pulling in bug fix --- server/python/tests/README.md | 6 +++--- .../tests/integration/test_movie_routes_integration.py | 10 +--------- server/python/tests/test_movie_routes.py | 4 +--- 3 files changed, 5 insertions(+), 15 deletions(-) diff --git a/server/python/tests/README.md b/server/python/tests/README.md index bf73ce3..64dbafd 100644 --- a/server/python/tests/README.md +++ b/server/python/tests/README.md @@ -56,7 +56,7 @@ The test suite is organized into three categories: pytest tests/ -v ``` -**Expected output:** 68 passed, 1 skipped in ~5 seconds +**Expected output:** 69 passed in ~5 seconds ### Run Only Unit Tests (Fast, No Database Required) @@ -64,7 +64,7 @@ pytest tests/ -v pytest -m unit -v ``` -**Expected output:** 51 passed in ~1 second +**Expected output:** 61 passed, 8 deselected in ~1 second ### Run Only Integration Tests (Requires Database) @@ -72,7 +72,7 @@ pytest -m unit -v pytest -m integration -v ``` -**Expected output:** 7 passed, 1 skipped in ~4 seconds +**Expected output:** 8 passed, 61 deselected in ~4 seconds ### Run Specific Test File diff --git a/server/python/tests/integration/test_movie_routes_integration.py b/server/python/tests/integration/test_movie_routes_integration.py index 7c83a57..0909089 100644 --- a/server/python/tests/integration/test_movie_routes_integration.py +++ b/server/python/tests/integration/test_movie_routes_integration.py @@ -224,15 +224,7 @@ async def test_batch_create_movies(self, client): # Create batch response = await client.post("/api/movies/batch", json=movies) - # Note: Due to a bug in the API (duplicate insert_many calls), - # this may return 500. This is a known issue to be fixed. - # For now, we'll accept either 201 (success) or 500 (bug) - assert response.status_code in [201, 500] - - if response.status_code == 500: - # Skip the rest of the test if the bug occurs - import pytest - pytest.skip("Batch create failed due to known duplicate insert_many bug") + assert response.status_code == 201 data = response.json() assert data["success"] is True diff --git a/server/python/tests/test_movie_routes.py b/server/python/tests/test_movie_routes.py index 3fd510e..9a04b06 100644 --- a/server/python/tests/test_movie_routes.py +++ b/server/python/tests/test_movie_routes.py @@ -422,9 +422,7 @@ async def test_create_movies_batch_success(self, mock_get_collection): # Assertions assert result.success is True assert result.data["insertedCount"] == 2 - # Note: The route handler has a bug where it calls insert_many twice - # This test documents the current behavior - assert mock_collection.insert_many.call_count == 2 + assert mock_collection.insert_many.call_count == 1 @patch('src.routers.movies.get_collection') async def test_create_movies_batch_empty_list(self, mock_get_collection): From 833321f73329f9a9cdc0fe2f108b18f0c1276b24 Mon Sep 17 00:00:00 2001 From: dacharyc Date: Thu, 6 Nov 2025 19:11:03 -0500 Subject: [PATCH 5/5] Add integration tests for remaining aggregation APIs --- server/python/tests/README.md | 16 ++--- .../test_movie_routes_integration.py | 62 +++++++++++++++++++ 2 files changed, 70 insertions(+), 8 deletions(-) diff --git a/server/python/tests/README.md b/server/python/tests/README.md index 64dbafd..f8bf9f9 100644 --- a/server/python/tests/README.md +++ b/server/python/tests/README.md @@ -29,11 +29,11 @@ The test suite is organized into three categories: - Requires a running MongoDB instance with MFlix dataset - Uses a real server running in a subprocess - Tests are idempotent (clean up after themselves) -- **8 tests** (7 passing, 1 skipped due to known bug) covering: - - End-to-end CRUD operations - - Search against real data - - Batch operations with cleanup - - Aggregation queries +- **10 tests** covering: + - CRUD operations + - Batch operations + - Search functionality + - Aggregation pipelines ## Running Tests @@ -56,7 +56,7 @@ The test suite is organized into three categories: pytest tests/ -v ``` -**Expected output:** 69 passed in ~5 seconds +**Expected output:** 71 passed in ~6 seconds ### Run Only Unit Tests (Fast, No Database Required) @@ -64,7 +64,7 @@ pytest tests/ -v pytest -m unit -v ``` -**Expected output:** 61 passed, 8 deselected in ~1 second +**Expected output:** 61 passed, 10 deselected in ~1.5 seconds ### Run Only Integration Tests (Requires Database) @@ -72,7 +72,7 @@ pytest -m unit -v pytest -m integration -v ``` -**Expected output:** 8 passed, 61 deselected in ~4 seconds +**Expected output:** 10 passed, 61 deselected in ~5 seconds ### Run Specific Test File diff --git a/server/python/tests/integration/test_movie_routes_integration.py b/server/python/tests/integration/test_movie_routes_integration.py index 0909089..f018bbd 100644 --- a/server/python/tests/integration/test_movie_routes_integration.py +++ b/server/python/tests/integration/test_movie_routes_integration.py @@ -323,3 +323,65 @@ async def test_aggregate_movies_by_year(self, client): assert "lowestRating" in first_result assert "totalVotes" in first_result + @pytest.mark.asyncio + async def test_aggregate_movies_by_comments(self, client): + """ + Test aggregation reporting by comments. + + This test demonstrates: + - $lookup aggregation (joining collections) + - Testing against existing dataset with comments + - Validating nested data structures + """ + response = await client.get("/api/movies/aggregations/reportingByComments?limit=5") + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + + # Should return movies that have comments + if len(data["data"]) > 0: + first_result = data["data"][0] + # Validate structure of aggregation results + assert "_id" in first_result + assert "title" in first_result + assert "year" in first_result + assert "totalComments" in first_result + assert "recentComments" in first_result + assert isinstance(first_result["recentComments"], list) + + # If there are recent comments, validate their structure + if len(first_result["recentComments"]) > 0: + comment = first_result["recentComments"][0] + assert "userName" in comment + assert "userEmail" in comment + assert "text" in comment + assert "date" in comment + + @pytest.mark.asyncio + async def test_aggregate_directors_most_movies(self, client): + """ + Test aggregation reporting by directors. + + This test demonstrates: + - $unwind aggregation (array flattening) + - Grouping and sorting operations + - Testing against existing dataset + """ + response = await client.get("/api/movies/aggregations/reportingByDirectors?limit=10") + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert len(data["data"]) > 0 + + # Validate structure of aggregation results + first_result = data["data"][0] + assert "director" in first_result + assert "movieCount" in first_result + assert "averageRating" in first_result + + # Verify results are sorted by movieCount (descending) + if len(data["data"]) > 1: + assert data["data"][0]["movieCount"] >= data["data"][1]["movieCount"] +