Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: use cached request session when fetching schedule #2989

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
45 changes: 44 additions & 1 deletion api-client/libretime_api_client/_client.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import logging
from contextlib import contextmanager
from typing import Optional

from requests import Response, Session as BaseSession
from requests import PreparedRequest, Response, Session as BaseSession
from requests.adapters import HTTPAdapter
from requests.exceptions import RequestException
from urllib3.util import Retry
Expand Down Expand Up @@ -62,17 +63,43 @@ def create_url(self, url):
return f"{self.base_url.rstrip('/')}/{url.lstrip('/')}"


class CachedSession(Session):
cache: dict[str, Response]

def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.cache = {}

def send(self, request: PreparedRequest, **kwargs) -> Response: # type: ignore[no-untyped-def]
"""
Send a given PreparedRequest.
"""
if request.method != "GET" or request.url is None:
return super().send(request, **kwargs)

if request.url in self.cache:
return self.cache[request.url]

response = super().send(request, **kwargs)
if response.ok:
self.cache[request.url] = response

return response


# pylint: disable=too-few-public-methods
class AbstractApiClient:
session: Session
base_url: str
retry: Optional[Retry]

def __init__(
self,
base_url: str,
retry: Optional[Retry] = None,
):
self.base_url = base_url
self.retry = retry
self.session = Session(
base_url=base_url,
retry=retry,
Expand All @@ -92,3 +119,19 @@ def _request(
except RequestException as exception:
logger.error(exception)
raise exception

@contextmanager
def cached_session(self):
"""
Swap the client session during the scope of the context. The session will cache
all GET requests.

Cached response will not expire, therefore the cached client must not be used
for long living scopes.
"""
original_session = self.session
self.session = CachedSession(base_url=self.base_url, retry=self.retry)
try:
yield
finally:
self.session = original_session
39 changes: 20 additions & 19 deletions playout/libretime_playout/player/schedule.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,25 +54,26 @@ def get_schedule(api_client: ApiClient) -> Events:
).json()

events: Dict[str, AnyEvent] = {}
for item in sorted(schedule, key=itemgetter("starts_at")):
item["starts_at"] = event_isoparse(item["starts_at"])
item["ends_at"] = event_isoparse(item["ends_at"])

show_instance = api_client.get_show_instance(item["instance"]).json()
show = api_client.get_show(show_instance["show"]).json()

if show["live_enabled"]:
show_instance["starts_at"] = event_isoparse(show_instance["starts_at"])
show_instance["ends_at"] = event_isoparse(show_instance["ends_at"])
generate_live_events(events, show_instance, stream_preferences)

if item["file"]:
file = api_client.get_file(item["file"]).json()
generate_file_events(events, item, file, show, stream_preferences)

elif item["stream"]:
webstream = api_client.get_webstream(item["stream"]).json()
generate_webstream_events(events, item, webstream, show)
with api_client.cached_session():
for item in sorted(schedule, key=itemgetter("starts_at")):
item["starts_at"] = event_isoparse(item["starts_at"])
item["ends_at"] = event_isoparse(item["ends_at"])

show_instance = api_client.get_show_instance(item["instance"]).json()
show = api_client.get_show(show_instance["show"]).json()

if show["live_enabled"]:
show_instance["starts_at"] = event_isoparse(show_instance["starts_at"])
show_instance["ends_at"] = event_isoparse(show_instance["ends_at"])
generate_live_events(events, show_instance, stream_preferences)

if item["file"]:
file = api_client.get_file(item["file"]).json()
generate_file_events(events, item, file, show, stream_preferences)

elif item["stream"]:
webstream = api_client.get_webstream(item["stream"]).json()
generate_webstream_events(events, item, webstream, show)

return dict(sorted(events.items()))

Expand Down