Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ FEAT ] - Caching #72

Merged
merged 5 commits into from
Apr 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 14 additions & 1 deletion .github/workflows/dev-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,20 @@ jobs:

- name: Starting Testing Suites
run: |
python -m pytest ./tests
ENMA_CACHING_MANGADEX_PAGINATE_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGADEX_SEARCH_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGADEX_GET_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGADEX_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_PAGINATE_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_SEARCH_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_GET_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_AUTHOR_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_PAGINATE_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_SEARCH_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_GET_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS='0' \
pytest --cov=. --cov-report=xml ./tests

- name: Building and Deploying
run: |
Expand Down
15 changes: 14 additions & 1 deletion .github/workflows/pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,20 @@ jobs:

- name: Starting Testing Suites
run: |
python -m pytest ./tests
ENMA_CACHING_MANGADEX_PAGINATE_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGADEX_SEARCH_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGADEX_GET_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGADEX_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_PAGINATE_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_SEARCH_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_GET_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_AUTHOR_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_PAGINATE_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_SEARCH_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_GET_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS='0' \
pytest --cov=. --cov-report=xml ./tests

- name: Building and Deploying
run: |
Expand Down
13 changes: 13 additions & 0 deletions .github/workflows/testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,19 @@ jobs:

- name: Starting Testing Suites
run: |
ENMA_CACHING_MANGADEX_PAGINATE_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGADEX_SEARCH_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGADEX_GET_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGADEX_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_PAGINATE_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_SEARCH_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_GET_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS='0' \
ENMA_CACHING_NHENTAI_AUTHOR_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_PAGINATE_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_SEARCH_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_GET_TTL_IN_SECONDS='0' \
ENMA_CACHING_MANGANATO_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS='0' \
pytest --cov=. --cov-report=xml ./tests

- name: Upload Report to Codecov
Expand Down
2 changes: 1 addition & 1 deletion enma/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '2.4.0'
__version__ = '2.4.1'
17 changes: 13 additions & 4 deletions enma/infra/adapters/repositories/mangadex.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
"""
from datetime import datetime
from enum import Enum
import os
from typing import Any, Optional, Union, cast
from urllib.parse import urljoin, urlparse

from requests import Response

import requests

from enma._version import __version__
from enma.application.core.handlers.error import (ExceedRateLimit,
Forbidden,
NotFound,
Expand All @@ -34,6 +34,7 @@
IHash, IManga, IMangaTag, IRelations,
ISearchResult,
IVolumesResponse)
from enma.infra.core.utils.cache import Cache


class Sort(Enum):
Expand All @@ -52,7 +53,7 @@ def __init__(self) -> None:
self.__HASH_URL = 'https://api.mangadex.org/at-home/server/'
self.__CHAPTER_PAGE_URL = 'https://cmdxd98sb0x3yprd.mangadex.network/data/'

def __handle_source_response(self, response: Response):
def __handle_source_response(self, response: requests.Response):
"""
Evaluates the HTTP response from the Mangadex API, raising specific exceptions based on the HTTP status code
to indicate various error conditions such as rate limits exceeded, forbidden access, or resource not found.
Expand Down Expand Up @@ -100,7 +101,7 @@ def __make_request(self,
logger.debug(f'Fetching {url} with headers {headers} and params {params}')

response = requests.get(url=urlparse(url).geturl(),
headers={**headers, "User-Agent": "Enma/2.4.0"},
headers={**headers, "User-Agent": f"Enma/{__version__}"},
params=params)

self.__handle_source_response(response)
Expand All @@ -125,6 +126,8 @@ def __create_cover_uri(self,
"""
return urljoin(self.__COVER_URL, f'{manga_id}/{file_name}.512.jpg')

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_MANGADEX_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS', 100)),
max_size=20).cache
def fetch_chapter_by_symbolic_link(self,
link: SymbolicLink) -> Chapter:
"""
Expand Down Expand Up @@ -379,6 +382,8 @@ def __parse_thumb(self, manga: IManga) -> Thumb:
cover=self.__get_cover(manga_id=manga.get('id'),
relations=manga.get('relationships', list())))

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_MANGADEX_GET_TTL_IN_SECONDS', 300)),
max_size=20).cache
def get(self,
identifier: str,
with_symbolic_links: bool = False) -> Manga:
Expand Down Expand Up @@ -415,6 +420,8 @@ def __make_sort_query(self, sort: Sort) -> dict[str, str]:
"""
return { f'order[{sort.value if isinstance(sort, Sort) else sort}]': 'desc' }

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_MANGADEX_SEARCH_TTL_IN_SECONDS', 100)),
max_size=5).cache
def search(self,
query: str,
page: int,
Expand Down Expand Up @@ -459,6 +466,8 @@ def search(self,

return search_result

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_MANGADEX_PAGINATE_TTL_IN_SECONDS', 100)),
max_size=5).cache
def paginate(self, page: int) -> Pagination:
"""
Retrieves a specific page of manga listings from the Mangadex API, returning a Pagination object
Expand Down
19 changes: 16 additions & 3 deletions enma/infra/adapters/repositories/manganato.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,25 @@
"""
This module provides an adapter for the nhentai repository.
It contains functions and classes to interact with the nhentai API and retrieve manga data.
This module provides an adapter for the MANGANATO repository.
It contains functions and classes to interact with the MANGANATO API and retrieve manga data.
"""

from concurrent.futures import ThreadPoolExecutor
from datetime import datetime
from multiprocessing import cpu_count
import os
from typing import Any, Optional, Union, cast
from urllib.parse import urlparse, urljoin
from bs4 import BeautifulSoup, Tag

import requests

from enma._version import __version__
from enma.application.core.interfaces.manga_repository import IMangaRepository
from enma.application.core.utils.logger import logger
from enma.domain.entities.author_page import AuthorPage
from enma.domain.entities.manga import Author, Chapter, Genre, Image, Manga, SymbolicLink, Title
from enma.domain.entities.search_result import Pagination, SearchResult, Thumb
from enma.infra.core.utils.cache import Cache

class Manganato(IMangaRepository):
"""
Expand All @@ -38,7 +41,9 @@ def __make_request(self,
logger.debug(f'Fetching {url} with headers {headers} and params {params}')

return requests.get(url=urlparse(url).geturl(),
headers={**headers, 'Referer': 'https://chapmanganato.com/'},
headers={**headers,
'Referer': 'https://chapmanganato.com/',
"User-Agent": f"Enma/{__version__}"},
params={**params})

def __create_title(self,
Expand Down Expand Up @@ -87,6 +92,8 @@ def __create_chapter(self, url: str, symbolic: bool = False) -> Union[Chapter, N
height=0))
return chapter

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_MANGANATO_GET_TTL_IN_SECONDS', 300)),
max_size=20).cache
def get(self,
identifier: str,
with_symbolic_links: bool = False) -> Union[Manga, None]:
Expand Down Expand Up @@ -163,6 +170,8 @@ def get(self,
cover=Image(uri=cover), # type: ignore
chapters=chapters) # type: ignore

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_MANGANATO_SEARCH_TTL_IN_SECONDS', 100)),
max_size=5).cache
def search(self,
query: str,
page: int) -> SearchResult:
Expand Down Expand Up @@ -195,6 +204,8 @@ def search(self,
total_pages=total_pages,
results=thumbs)

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_MANGANATO_PAGINATE_TTL_IN_SECONDS', 100)),
max_size=5).cache
def paginate(self, page: int) -> Pagination:
response = self.__make_request(url=f'{self.__BASE_URL}/genre-all/{page}')

Expand Down Expand Up @@ -236,6 +247,8 @@ def set_config(self, **kwargs) -> None:
def author_page(self, author: str, page: int) -> AuthorPage:
raise NotImplementedError('Manganato does not support author_page')

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_MANGANATO_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS', 100)),
max_size=20).cache
def fetch_chapter_by_symbolic_link(self, link: SymbolicLink) -> Chapter:
chapter = self.__create_chapter(url=link.link)

Expand Down
14 changes: 13 additions & 1 deletion enma/infra/adapters/repositories/nhentai.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"""
from datetime import datetime, timezone
from enum import Enum
import os
from typing import Any, Literal, Optional, Union, cast
from urllib.parse import urljoin, urlparse
from pydantic import BaseModel, field_validator
Expand All @@ -20,6 +21,7 @@
Title)
from enma.domain.entities.search_result import Pagination, SearchResult, Thumb
from enma.infra.core.interfaces.nhentai_response import NHentaiImage, NHentaiResponse
from enma.infra.core.utils.cache import Cache


class CloudFlareConfig(BaseModel):
Expand Down Expand Up @@ -116,6 +118,8 @@ def __make_page_uri(self,

return url

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_NHENTAI_FETCH_SYMBOLIC_LINK_TTL_IN_SECONDS', 100)),
max_size=20).cache
def fetch_chapter_by_symbolic_link(self,
link: SymbolicLink) -> Chapter:
response = self.__make_request(url=link.link)
Expand Down Expand Up @@ -152,6 +156,8 @@ def __create_chapter(self,
height=page.get('h')))
return chapter

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_NHENTAI_GET_TTL_IN_SECONDS', 300)),
max_size=20).cache
def get(self,
identifier: str,
with_symbolic_links: bool = False) -> Union[Manga, None]:
Expand Down Expand Up @@ -203,7 +209,9 @@ def get(self,
chapters=[chapter])

return manga


@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_NHENTAI_SEARCH_TTL_IN_SECONDS', 100)),
max_size=5).cache
def search(self,
query: str,
page: int,
Expand Down Expand Up @@ -283,6 +291,8 @@ def search(self,

return search_result

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_NHENTAI_PAGINATE_TTL_IN_SECONDS', 100)),
max_size=5).cache
def paginate(self, page: int) -> Pagination:
response = self.__make_request(url=urljoin(self.__API_URL, f'galleries/all'),
params={'page': page})
Expand Down Expand Up @@ -325,6 +335,8 @@ def random(self, retry=0) -> Manga:

return doujin

@Cache(max_age_seconds=int(os.getenv('ENMA_CACHING_NHENTAI_AUTHOR_TTL_IN_SECONDS', 100)),
max_size=5).cache
def author_page(self,
author: str,
page: int) -> AuthorPage:
Expand Down
32 changes: 32 additions & 0 deletions enma/infra/core/utils/cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
from expiringdict import ExpiringDict
from typing import Callable
from enma.application.core.utils.logger import logger

class Cache:

def __init__(self, max_age_seconds: int=3600, max_size: int=100):
self._CACHE = ExpiringDict(max_len=max_size,
max_age_seconds=max_age_seconds,
items=None)

def cache(self, function: Callable):
def wrapper(*args, **kwargs):

_args = list()

if len(kwargs.keys()) == 0:
_args = list(args[1:])
else:
_args = list([*list(kwargs.values()), *args[1:]])

print(_args)

if self._CACHE.get(str(_args)) is not None:
logger.debug(f'Retrieving cached object with key {str(_args)}')
return self._CACHE.get(str(_args))
else:
new_execution = function(*args, **kwargs)
self._CACHE[str(_args)] = new_execution
return new_execution

return wrapper
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ requests==2.31.0
beautifulsoup4==4.10.0
pydantic==2.5.3
pytest==8.0.2
expiringdict==1.2.2
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ install_requires =
requests==2.31.0
beautifulsoup4==4.10.0
pydantic==2.5.3
expiringdict==1.2.2
setup_requires =
setuptools_scm

Expand Down
Loading
Loading