Skip to content

Commit 59c89ec

Browse files
committed
feat: Enhance magnet parsing to handle mixed prefixes and add site configuration support
1 parent 933c630 commit 59c89ec

File tree

8 files changed

+213
-98
lines changed

8 files changed

+213
-98
lines changed

app/api/qbittorrent/torrents.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -62,18 +62,21 @@ def torrents_add(
6262

6363
magnet = urls.splitlines()[0].strip()
6464
logger.debug(f"Parsing magnet: {magnet}")
65-
payload = parse_magnet(magnet)
66-
67-
# Determine which prefix is used (aw_ or sto_)
68-
prefix = "aw" if "aw_slug" in payload else "sto"
69-
70-
slug = payload[f"{prefix}_slug"]
71-
season = int(payload[f"{prefix}_s"])
72-
episode = int(payload[f"{prefix}_e"])
73-
language = payload[f"{prefix}_lang"]
74-
site = payload.get(f"{prefix}_site", "aniworld.to" if prefix == "aw" else "s.to")
75-
name = payload.get("dn", f"{slug}.S{season:02d}E{episode:02d}.{language}")
76-
xt = payload["xt"]
65+
try:
66+
payload = parse_magnet(magnet)
67+
prefix = "aw" if "aw_slug" in payload else "sto"
68+
69+
slug = payload[f"{prefix}_slug"]
70+
season = int(payload[f"{prefix}_s"])
71+
episode = int(payload[f"{prefix}_e"])
72+
language = payload[f"{prefix}_lang"]
73+
site = payload.get(f"{prefix}_site", "aniworld.to" if prefix == "aw" else "s.to")
74+
name = payload.get("dn", f"{slug}.S{season:02d}E{episode:02d}.{language}")
75+
xt = payload["xt"]
76+
except (KeyError, ValueError) as exc:
77+
logger.warning(f"Malformed magnet parameters: {exc}")
78+
raise HTTPException(status_code=400, detail="malformed magnet parameters") from exc
79+
7780
btih = xt.split(":")[-1].lower()
7881

7982
logger.info(

app/api/torznab/api.py

Lines changed: 20 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from sqlmodel import Session
1111

1212
from app.config import (
13+
CATALOG_SITE_CONFIGS,
1314
TORZNAB_CAT_ANIME,
1415
TORZNAB_RETURN_TEST_RESULT,
1516
TORZNAB_TEST_EPISODE,
@@ -24,6 +25,23 @@
2425
from .utils import _build_item, _caps_xml, _require_apikey, _rss_root
2526

2627

28+
def _default_languages_for_site(site: str) -> List[str]:
29+
"""
30+
Return the default language preference ordering for a catalogue site.
31+
Falls back to AniWorld defaults when a site-specific mapping is not found.
32+
"""
33+
cfg = CATALOG_SITE_CONFIGS.get(site)
34+
if cfg:
35+
languages = cfg.get("default_languages")
36+
if isinstance(languages, list) and languages:
37+
# return a shallow copy to avoid accidental mutation of config state
38+
return list(languages)
39+
fallback = CATALOG_SITE_CONFIGS.get("aniworld.to", {}).get(
40+
"default_languages", ["German Dub", "German Sub", "English Sub"]
41+
)
42+
return list(fallback)
43+
44+
2745
@router.get("/api", response_class=FastAPIResponse)
2846
def torznab_api(
2947
request: Request,
@@ -120,12 +138,7 @@ def torznab_api(
120138
session, slug=slug, season=season_i, episode=ep_i, site=site_found
121139
)
122140

123-
# Default languages based on site
124-
if site_found == "s.to":
125-
default_langs = ["German Dub", "English Dub", "German Sub"]
126-
else:
127-
default_langs = ["German Dub", "German Sub", "English Sub"]
128-
141+
default_langs = _default_languages_for_site(site_found)
129142
candidate_langs: List[str] = (
130143
cached_langs if cached_langs else default_langs
131144
)
@@ -260,12 +273,7 @@ def torznab_api(
260273
session, slug=slug, season=season_i, episode=ep_i, site=site_found
261274
)
262275

263-
# Default languages based on site
264-
if site_found == "s.to":
265-
default_langs = ["German Dub", "English Dub", "German Sub"]
266-
else:
267-
default_langs = ["German Dub", "German Sub", "English Sub"]
268-
276+
default_langs = _default_languages_for_site(site_found)
269277
candidate_langs: List[str] = cached_langs if cached_langs else default_langs
270278
logger.debug(
271279
f"Candidate languages for slug '{slug}', season {season_i}, episode {ep_i}, site '{site_found}': {candidate_langs}"

app/config.py

Lines changed: 32 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
import sys
21
import os
2+
from copy import deepcopy
3+
from typing import Any
34
from pathlib import Path
45
from dotenv import load_dotenv
56
from loguru import logger
@@ -283,9 +284,7 @@ def _ensure_dir(candidates: list[Path], label: str) -> Path:
283284
).strip()
284285

285286
# S.to (series)
286-
STO_BASE_URL = os.getenv(
287-
"STO_BASE_URL", "https://s.to"
288-
).strip() # valid options are https://s.to, https://serienstream.to and http://186.2.175.5
287+
STO_BASE_URL = os.getenv("STO_BASE_URL", "https://s.to").strip()
289288
STO_ALPHABET_HTML = Path(
290289
os.getenv("STO_ALPHABET_HTML", DATA_DIR / "sto-alphabeth.html")
291290
)
@@ -323,6 +322,35 @@ def _ensure_dir(candidates: list[Path], label: str) -> Path:
323322
f"RELEASE_GROUP_ANIWORLD={RELEASE_GROUP_ANIWORLD}, RELEASE_GROUP_STO={RELEASE_GROUP_STO}"
324323
)
325324

325+
_DEFAULT_SITE_CONFIGS: dict[str, dict[str, Any]] = {
326+
"aniworld.to": {
327+
"base_url": ANIWORLD_BASE_URL,
328+
"alphabet_html": ANIWORLD_ALPHABET_HTML,
329+
"alphabet_url": ANIWORLD_ALPHABET_URL,
330+
"titles_refresh_hours": ANIWORLD_TITLES_REFRESH_HOURS,
331+
"default_languages": ["German Dub", "German Sub", "English Sub"],
332+
"release_group": RELEASE_GROUP_ANIWORLD,
333+
},
334+
"s.to": {
335+
"base_url": STO_BASE_URL,
336+
"alphabet_html": STO_ALPHABET_HTML,
337+
"alphabet_url": STO_ALPHABET_URL,
338+
"titles_refresh_hours": STO_TITLES_REFRESH_HOURS,
339+
"default_languages": ["German Dub", "English Dub", "German Sub"],
340+
"release_group": RELEASE_GROUP_STO,
341+
},
342+
}
343+
344+
CATALOG_SITE_CONFIGS: dict[str, dict[str, Any]] = {}
345+
for site in CATALOG_SITES_LIST:
346+
base_cfg = _DEFAULT_SITE_CONFIGS.get(site)
347+
if not base_cfg:
348+
logger.warning(
349+
f"No built-in configuration for catalogue site '{site}'. Provide environment overrides to enable it."
350+
)
351+
continue
352+
CATALOG_SITE_CONFIGS[site] = deepcopy(base_cfg)
353+
326354
# ---- Provider-Fallback ----
327355
# Kommagetrennte Liste, z. B. "VOE,Filemoon,Streamtape,Vidmoly,SpeedFiles,Doodstream,LoadX,Luluvdo,Vidoza"
328356
# Reihenfolge = Priorität

app/core/scheduler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ def schedule_download(req: dict) -> str:
190190
raise RuntimeError("executor not available")
191191

192192
with Session(engine) as s:
193-
job = create_job(s)
193+
job = create_job(s, source_site=req.get("site"))
194194

195195
stop_event = threading.Event()
196196
fut = EXECUTOR.submit(_run_download, job.id, req, stop_event)

app/db/models.py

Lines changed: 81 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,12 @@
11
from __future__ import annotations
2-
import sys
3-
import os
2+
43
from typing import Optional, Literal, Generator, Any, List
54
from datetime import datetime, timezone, timedelta
65
from uuid import uuid4
76
from loguru import logger
7+
from app.utils.logger import config as configure_logger
88

9-
LOG_LEVEL = os.environ.get("LOG_LEVEL", "INFO").upper()
10-
logger.remove()
11-
logger.add(
12-
sys.stdout,
13-
level=LOG_LEVEL,
14-
colorize=True,
15-
format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level: <8}</level> | "
16-
"<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - <level>{message}</level>",
17-
)
9+
configure_logger()
1810

1911
from sqlmodel import SQLModel, Field, Session, create_engine, select, Column, JSON
2012
from sqlalchemy.orm import registry as sa_registry
@@ -146,9 +138,82 @@ class ClientTask(ModelBase, table=True):
146138
logger.debug("SQLModel engine created.")
147139

148140

141+
def _migrate_episode_availability_table() -> None:
142+
"""
143+
Ensure the episodeavailability table includes the site column in its primary key.
144+
Performs an in-place SQLite migration when running against an existing database
145+
created before multi-site support was introduced.
146+
"""
147+
try:
148+
with engine.begin() as conn:
149+
table_present = conn.exec_driver_sql(
150+
"SELECT name FROM sqlite_master WHERE type='table' AND name='episodeavailability'"
151+
).fetchone()
152+
if not table_present:
153+
logger.debug(
154+
"episodeavailability table not found; no migration necessary."
155+
)
156+
return
157+
158+
columns = conn.exec_driver_sql(
159+
"PRAGMA table_info('episodeavailability')"
160+
).fetchall()
161+
if any(col[1] == "site" for col in columns):
162+
logger.debug(
163+
"episodeavailability table already contains 'site' column."
164+
)
165+
return
166+
167+
logger.info(
168+
"Migrating episodeavailability table to include site column."
169+
)
170+
conn.exec_driver_sql(
171+
"""
172+
CREATE TABLE episodeavailability_new (
173+
slug TEXT NOT NULL,
174+
season INTEGER NOT NULL,
175+
episode INTEGER NOT NULL,
176+
language TEXT NOT NULL,
177+
site TEXT NOT NULL DEFAULT 'aniworld.to',
178+
available BOOLEAN NOT NULL,
179+
height INTEGER,
180+
vcodec TEXT,
181+
provider TEXT,
182+
checked_at DATETIME NOT NULL,
183+
extra JSON,
184+
PRIMARY KEY (slug, season, episode, language, site)
185+
)
186+
"""
187+
)
188+
conn.exec_driver_sql(
189+
"""
190+
INSERT INTO episodeavailability_new (
191+
slug, season, episode, language, site,
192+
available, height, vcodec, provider, checked_at, extra
193+
)
194+
SELECT
195+
slug, season, episode, language,
196+
'aniworld.to' AS site,
197+
available, height, vcodec, provider, checked_at, extra
198+
FROM episodeavailability
199+
"""
200+
)
201+
conn.exec_driver_sql("DROP TABLE episodeavailability")
202+
conn.exec_driver_sql(
203+
"ALTER TABLE episodeavailability_new RENAME TO episodeavailability"
204+
)
205+
logger.success(
206+
"episodeavailability table migrated to include site column."
207+
)
208+
except Exception as exc:
209+
logger.error(f"Failed to migrate episodeavailability table: {exc}")
210+
raise
211+
212+
149213
def create_db_and_tables() -> None:
150214
logger.debug("Creating DB and tables if not exist.")
151215
try:
216+
_migrate_episode_availability_table()
152217
# Use this module's private metadata
153218
ModelBase.metadata.create_all(engine)
154219
logger.success("Database and tables created or already exist.")
@@ -180,10 +245,13 @@ def dispose_engine() -> None:
180245

181246

182247
# --- Jobs CRUD
183-
def create_job(session: Session) -> Job:
248+
def create_job(session: Session, *, source_site: Optional[str] = None) -> Job:
184249
logger.debug("Creating new job entry in DB.")
185250
try:
186-
job = Job()
251+
job_kwargs: dict[str, Any] = {}
252+
if source_site:
253+
job_kwargs["source_site"] = source_site
254+
job = Job(**job_kwargs)
187255
session.add(job)
188256
session.commit()
189257
session.refresh(job)

app/utils/magnet.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -116,16 +116,21 @@ def parse_magnet(magnet: str) -> Dict[str, str]:
116116
flat[k] = v[0]
117117
logger.debug(f"Magnet param parsed: {k}={v[0]}")
118118

119-
# Determine which prefix is used
120-
has_aw = any(k.startswith("aw_") for k in flat.keys())
121-
has_sto = any(k.startswith("sto_") for k in flat.keys())
122-
123-
if has_aw:
124-
prefix = "aw"
125-
elif has_sto:
126-
prefix = "sto"
127-
else:
128-
# Backward compatibility: default to aw_
119+
# Determine which prefix is used while rejecting mixed usage
120+
prefix: str | None = None
121+
for key in flat.keys():
122+
if key.startswith("aw_"):
123+
if prefix and prefix != "aw":
124+
logger.error("Magnet contains mixed prefixes: aw_ and sto_")
125+
raise ValueError("mixed magnet prefixes: aw_ and sto_")
126+
prefix = "aw"
127+
elif key.startswith("sto_"):
128+
if prefix and prefix != "sto":
129+
logger.error("Magnet contains mixed prefixes: aw_ and sto_")
130+
raise ValueError("mixed magnet prefixes: aw_ and sto_")
131+
prefix = "sto"
132+
if prefix is None:
133+
# Backward compatibility: default to aw_ when no explicit prefix detected
129134
prefix = "aw"
130135

131136
# Check required params

0 commit comments

Comments
 (0)