Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions mcp_plex/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
PlexPartialObject = object # type: ignore[assignment]


logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

T = TypeVar("T")
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "mcp-plex"
version = "0.26.22"
version = "0.26.24"

description = "Plex-Oriented Model Context Protocol Server"
requires-python = ">=3.11,<3.13"
Expand Down
30 changes: 30 additions & 0 deletions tests/test_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from mcp_plex.cache import MediaCache


def test_media_cache_eviction_and_clear():
cache = MediaCache(size=2)
cache.set_payload(
"tt0111161", {"id": "tt0111161", "title": "The Shawshank Redemption"}
)
cache.set_payload("tt0068646", {"id": "tt0068646", "title": "The Godfather"})
cache.get_payload("tt0111161")
cache.set_payload("tt1375666", {"id": "tt1375666", "title": "Inception"})
assert cache.get_payload("tt0111161") == {
"id": "tt0111161",
"title": "The Shawshank Redemption",
}
assert cache.get_payload("tt0068646") is None
assert cache.get_payload("tt1375666") == {
"id": "tt1375666",
"title": "Inception",
}

assert cache.get_poster("missing") is None
cache.set_poster("tt0111161", "https://example.com/shawshank.jpg")
cache.set_background("tt0111161", "https://example.com/shawshank-bg.jpg")
assert cache.get_poster("tt0111161") == "https://example.com/shawshank.jpg"
assert cache.get_background("tt0111161") == "https://example.com/shawshank-bg.jpg"
cache.clear()
assert cache.get_payload("tt0111161") is None
assert cache.get_poster("tt0111161") is None
assert cache.get_background("tt0111161") is None
16 changes: 16 additions & 0 deletions tests/test_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import pytest
from pydantic import ValidationError

from mcp_plex.config import Settings


def test_settings_env_override(monkeypatch):
monkeypatch.setenv("QDRANT_PORT", "7001")
settings = Settings()
assert settings.qdrant_port == 7001


def test_settings_invalid_cache_size(monkeypatch):
monkeypatch.setenv("CACHE_SIZE", "notint")
with pytest.raises(ValidationError):
Settings()
5 changes: 5 additions & 0 deletions tests/test_gather_in_batches.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import asyncio
import logging
import pytest

from mcp_plex import loader

Expand Down Expand Up @@ -29,3 +30,7 @@ async def fake_gather(*coros):
assert "Processed 4/5 items" in caplog.text
assert "Processed 5/5 items" in caplog.text

def test_gather_in_batches_zero_batch_size():
tasks = [_echo(i) for i in range(3)]
with pytest.raises(ValueError):
asyncio.run(loader._gather_in_batches(tasks, 0))
46 changes: 46 additions & 0 deletions tests/test_imdb_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import json
from pathlib import Path

from mcp_plex.imdb_cache import IMDbCache


def test_imdb_cache_loads_existing_and_persists(tmp_path: Path):
path = tmp_path / "cache.json"
path.write_text(
json.dumps(
{
"tt0111161": {
"id": "tt0111161",
"primaryTitle": "The Shawshank Redemption",
}
}
)
)
cache = IMDbCache(path)
assert cache.get("tt0111161") == {
"id": "tt0111161",
"primaryTitle": "The Shawshank Redemption",
}

cache.set(
"tt0068646", {"id": "tt0068646", "primaryTitle": "The Godfather"}
)
assert json.loads(path.read_text()) == {
"tt0111161": {
"id": "tt0111161",
"primaryTitle": "The Shawshank Redemption",
},
"tt0068646": {
"id": "tt0068646",
"primaryTitle": "The Godfather",
},
}


def test_imdb_cache_invalid_file(tmp_path: Path):
path = tmp_path / "cache.json"
path.write_text("not json")
cache = IMDbCache(path)
assert cache.get("tt0111161") is None
cache.set("tt0111161", {"id": "tt0111161"})
assert cache.get("tt0111161") == {"id": "tt0111161"}
29 changes: 29 additions & 0 deletions tests/test_loader_cli.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import asyncio
import runpy
import sys

import pytest
from click.testing import CliRunner
Expand Down Expand Up @@ -64,6 +66,26 @@ async def invoke():
asyncio.run(invoke())


def test_run_requires_tmdb_api_key(monkeypatch):
monkeypatch.setattr(loader, "PlexServer", object)

async def invoke():
await loader.run("http://localhost", "token", None, None, None, None)

with pytest.raises(RuntimeError, match="TMDB_API_KEY must be provided"):
asyncio.run(invoke())


def test_run_requires_plexapi(monkeypatch):
monkeypatch.setattr(loader, "PlexServer", None)

async def invoke():
await loader.run("http://localhost", "token", "key", None, None, None)

with pytest.raises(RuntimeError, match="plexapi is required for live loading"):
asyncio.run(invoke())


def test_cli_model_overrides(monkeypatch):
captured: dict[str, str] = {}

Expand Down Expand Up @@ -114,3 +136,10 @@ async def fake_run(*args, **kwargs):

assert captured["dense"] == "foo"
assert captured["sparse"] == "bar"


def test_loader_script_entrypoint(monkeypatch):
monkeypatch.setattr(sys, "argv", ["loader", "--help"])
with pytest.raises(SystemExit) as exc:
runpy.run_module("mcp_plex.loader", run_name="__main__")
assert exc.value.code == 0
25 changes: 25 additions & 0 deletions tests/test_loader_integration.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import asyncio
import json
from pathlib import Path

from qdrant_client.async_qdrant_client import AsyncQdrantClient
Expand Down Expand Up @@ -53,3 +54,27 @@ def test_run_writes_points(monkeypatch):
)


def test_run_processes_imdb_queue(monkeypatch, tmp_path):
monkeypatch.setattr(loader, "AsyncQdrantClient", CaptureClient)
queue_file = tmp_path / "queue.json"
queue_file.write_text(json.dumps(["tt0111161"]))
sample_dir = Path(__file__).resolve().parents[1] / "sample-data"

async def fake_fetch(client, imdb_id):
return None

monkeypatch.setattr(loader, "_fetch_imdb", fake_fetch)

asyncio.run(
loader.run(
None,
None,
None,
sample_dir,
None,
None,
imdb_queue_path=queue_file,
)
)

assert json.loads(queue_file.read_text()) == ["tt0111161"]
120 changes: 116 additions & 4 deletions tests/test_loader_unit.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import asyncio
import builtins
import importlib
import json
import types
from pathlib import Path
Expand All @@ -24,6 +26,21 @@
from mcp_plex.types import TMDBSeason, TMDBShow


def test_loader_import_fallback(monkeypatch):
real_import = builtins.__import__

def fake_import(name, globals=None, locals=None, fromlist=(), level=0):
if name.startswith("plexapi"):
raise ModuleNotFoundError
return real_import(name, globals, locals, fromlist, level)

monkeypatch.setattr(builtins, "__import__", fake_import)
module = importlib.reload(loader)
assert module.PlexServer is None
assert module.PlexPartialObject is object
importlib.reload(loader)


def test_extract_external_ids():
guid_objs = [
types.SimpleNamespace(id="imdb://tt0133093"),
Expand Down Expand Up @@ -234,6 +251,38 @@ async def main():
assert all(len(c) <= 5 for c in calls)


def test_fetch_imdb_batch_all_cached(monkeypatch, tmp_path):
cache_path = tmp_path / "cache.json"
cache_path.write_text(
json.dumps(
{
"tt0111161": {
"id": "tt0111161",
"type": "movie",
"primaryTitle": "The Shawshank Redemption",
},
"tt0068646": {
"id": "tt0068646",
"type": "movie",
"primaryTitle": "The Godfather",
},
}
)
)
monkeypatch.setattr(loader, "_imdb_cache", IMDbCache(cache_path))

async def error_mock(request):
raise AssertionError("network should not be called")

async def main():
async with httpx.AsyncClient(transport=httpx.MockTransport(error_mock)) as client:
result = await _fetch_imdb_batch(client, ["tt0111161", "tt0068646"])
assert result["tt0111161"].primaryTitle == "The Shawshank Redemption"
assert result["tt0068646"].primaryTitle == "The Godfather"

asyncio.run(main())


def test_fetch_imdb_retries_on_429(monkeypatch, tmp_path):
cache_path = tmp_path / "cache.json"
monkeypatch.setattr(loader, "_imdb_cache", IMDbCache(cache_path))
Expand Down Expand Up @@ -279,17 +328,24 @@ async def first_transport(request):
return httpx.Response(429)

async def second_transport(request):
return httpx.Response(200, json={"id": "tt1", "type": "movie", "primaryTitle": "T"})
return httpx.Response(
200,
json={
"id": "tt0111161",
"type": "movie",
"primaryTitle": "The Shawshank Redemption",
},
)

async def first_run():
_load_imdb_retry_queue(queue_path)
async with httpx.AsyncClient(transport=httpx.MockTransport(first_transport)) as client:
await _process_imdb_retry_queue(client)
await _fetch_imdb(client, "tt1")
await _fetch_imdb(client, "tt0111161")
_persist_imdb_retry_queue(queue_path)

asyncio.run(first_run())
assert json.loads(queue_path.read_text()) == ["tt1"]
assert json.loads(queue_path.read_text()) == ["tt0111161"]

async def second_run():
_load_imdb_retry_queue(queue_path)
Expand All @@ -299,7 +355,33 @@ async def second_run():

asyncio.run(second_run())
assert json.loads(queue_path.read_text()) == []
assert loader._imdb_cache.get("tt1") is not None
assert loader._imdb_cache.get("tt0111161") is not None


def test_load_imdb_retry_queue_invalid_json(tmp_path):
path = tmp_path / "queue.json"
path.write_text("not json")
_load_imdb_retry_queue(path)
assert loader._imdb_retry_queue is not None
assert loader._imdb_retry_queue.qsize() == 0


def test_process_imdb_retry_queue_requeues(monkeypatch):
queue: asyncio.Queue[str] = asyncio.Queue()
queue.put_nowait("tt0111161")
monkeypatch.setattr(loader, "_imdb_retry_queue", queue)

async def fake_fetch(client, imdb_id):
return None

monkeypatch.setattr(loader, "_fetch_imdb", fake_fetch)

async def run_test():
async with httpx.AsyncClient() as client:
await _process_imdb_retry_queue(client)

asyncio.run(run_test())
assert queue.qsize() == 1


def test_resolve_tmdb_season_number_matches_name():
Expand Down Expand Up @@ -334,3 +416,33 @@ def test_resolve_tmdb_season_number_parent_year_fallback():
seasons=[TMDBSeason(season_number=5, name="Season 5", air_date="2018-06-01")],
)
assert resolve_tmdb_season_number(show, episode) == 5


def test_resolve_tmdb_season_number_numeric_match():
episode = types.SimpleNamespace(parentIndex=2, parentTitle="Season 2")
show = TMDBShow(
id=1,
name="Show",
seasons=[TMDBSeason(season_number=2, name="Season 2")],
)
assert resolve_tmdb_season_number(show, episode) == 2


def test_resolve_tmdb_season_number_title_year():
episode = types.SimpleNamespace(parentTitle="2018")
show = TMDBShow(
id=1,
name="Show",
seasons=[TMDBSeason(season_number=7, name="Season 7", air_date="2018-02-03")],
)
assert resolve_tmdb_season_number(show, episode) == 7


def test_resolve_tmdb_season_number_parent_index_str():
episode = types.SimpleNamespace(parentIndex="3")
assert resolve_tmdb_season_number(None, episode) == 3


def test_resolve_tmdb_season_number_parent_title_digit():
episode = types.SimpleNamespace(parentTitle="4")
assert resolve_tmdb_season_number(None, episode) == 4
2 changes: 1 addition & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.