Skip to content

Commit

Permalink
style: format code with autopep8 and isort (#19)
Browse files Browse the repository at this point in the history
Format code with autopep8 and isort

This commit fixes the style issues introduced in 7b2bb9c according to the output
from autopep8 and isort.

Details: https://app.deepsource.com/gh/nattadasu/ryuuRyuusei/transform/48839f57-c48c-4cbf-ac0f-80d7eb97da88/

Co-authored-by: deepsource-autofix[bot] <62050782+deepsource-autofix[bot]@users.noreply.github.com>
Co-authored-by: Sultan Iskandar Maulana <nattadasu@nattadasu.my.id>
  • Loading branch information
deepsource-autofix[bot] and nattadasu committed Apr 22, 2023
1 parent 520eb1e commit 584cd64
Show file tree
Hide file tree
Showing 38 changed files with 1,390 additions and 1,150 deletions.
52 changes: 31 additions & 21 deletions classes/anilist.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def __init__(self):
"""Initialize the AniList API Wrapper"""
self.base_url = "https://graphql.anilist.co"
self.session = None
self.cache_directory = 'cache/anilist'
self.cache_directory = "cache/anilist"
self.cache_expiration_time = 86400 # 1 day in seconds

async def __aenter__(self):
Expand All @@ -34,10 +34,13 @@ async def close(self) -> None:

class MediaType(Enum):
"""Media type enum for AniList"""

ANIME = "ANIME"
MANGA = "MANGA"

async def nsfwCheck(self, media_id: int, media_type: str | MediaType = MediaType.ANIME) -> bool:
async def nsfwCheck(
self, media_id: int, media_type: str | MediaType = MediaType.ANIME
) -> bool:
"""Check if the media is NSFW
Args:
Expand All @@ -54,7 +57,8 @@ async def nsfwCheck(self, media_id: int, media_type: str | MediaType = MediaType
if isinstance(media_type, self.MediaType):
media_type = media_type.value
cache_file_path = self.get_cache_file_path(
f'nsfw/{media_type.lower()}/{id}.json')
f"nsfw/{media_type.lower()}/{id}.json"
)
cached_data = self.read_cached_data(cache_file_path)
if cached_data is not None:
return cached_data
Expand All @@ -70,7 +74,8 @@ async def nsfwCheck(self, media_id: int, media_type: str | MediaType = MediaType
if response.status == 200:
data = await response.json()
self.write_data_to_cache(
data["data"]["Media"]["isAdult"], cache_file_path)
data["data"]["Media"]["isAdult"], cache_file_path
)
return data["data"]["Media"]["isAdult"]
error_message = await response.text()
raise ProviderHttpError(error_message, response.status)
Expand All @@ -87,7 +92,7 @@ async def anime(self, media_id: int) -> dict:
Returns:
dict: The anime information
"""
cache_file_path = self.get_cache_file_path(f'anime/{media_id}.json')
cache_file_path = self.get_cache_file_path(f"anime/{media_id}.json")
cached_data = self.read_cached_data(cache_file_path)
if cached_data is not None:
return cached_data
Expand Down Expand Up @@ -140,11 +145,12 @@ async def anime(self, media_id: int) -> dict:
}}
}}
}}"""
async with self.session.post(self.base_url, json={"query": gqlquery}) as response:
async with self.session.post(
self.base_url, json={"query": gqlquery}
) as response:
if response.status == 200:
data = await response.json()
self.write_data_to_cache(
data["data"]["Media"], cache_file_path)
self.write_data_to_cache(data["data"]["Media"], cache_file_path)
return data["data"]["Media"]
error_message = await response.text()
raise ProviderHttpError(error_message, response.status)
Expand All @@ -158,7 +164,7 @@ async def manga(self, media_id: int) -> dict:
Returns:
dict: The manga information
"""
cache_file_path = self.get_cache_file_path(f'manga/{media_id}.json')
cache_file_path = self.get_cache_file_path(f"manga/{media_id}.json")
cached_data = self.read_cached_data(cache_file_path)
if cached_data is not None:
return cached_data
Expand Down Expand Up @@ -211,16 +217,19 @@ async def manga(self, media_id: int) -> dict:
}}
}}
}}"""
async with self.session.post(self.base_url, json={"query": gqlquery}) as response:
async with self.session.post(
self.base_url, json={"query": gqlquery}
) as response:
if response.status == 200:
data = await response.json()
self.write_data_to_cache(
data["data"]["Media"], cache_file_path)
self.write_data_to_cache(data["data"]["Media"], cache_file_path)
return data["data"]["Media"]
error_message = await response.text()
raise ProviderHttpError(error_message, response.status)

async def search_media(self, query: str, limit: int = 10, media_type: str | MediaType = MediaType.MANGA) -> list[dict]:
async def search_media(
self, query: str, limit: int = 10, media_type: str | MediaType = MediaType.MANGA
) -> list[dict]:
"""Search anime by its title
Args:
Expand All @@ -236,8 +245,7 @@ async def search_media(self, query: str, limit: int = 10, media_type: str | Medi
list[dict]: The search results
"""
if limit > 10:
raise ProviderTypeError(
"limit must be less than or equal to 10", "int")
raise ProviderTypeError("limit must be less than or equal to 10", "int")
if isinstance(media_type, self.MediaType):
media_type = media_type.value
gqlquery = f"""query ($search: String, $mediaType: MediaType, $limit: Int) {{
Expand All @@ -264,7 +272,9 @@ async def search_media(self, query: str, limit: int = 10, media_type: str | Medi
"mediaType": media_type,
"limit": limit,
}
async with self.session.post(self.base_url, json={"query": gqlquery, "variables": variables}) as response:
async with self.session.post(
self.base_url, json={"query": gqlquery, "variables": variables}
) as response:
if response.status == 200:
data = await response.json()
return data["data"]["Page"]["results"]
Expand Down Expand Up @@ -293,11 +303,11 @@ def read_cached_data(self, cache_file_path: str) -> dict | None:
None: If cache file does not exist
"""
if os.path.exists(cache_file_path):
with open(cache_file_path, 'r') as cache_file:
with open(cache_file_path, "r") as cache_file:
cache_data = json.load(cache_file)
cache_age = time.time() - cache_data['timestamp']
cache_age = time.time() - cache_data["timestamp"]
if cache_age < self.cache_expiration_time:
return cache_data['data']
return cache_data["data"]
return None

def write_data_to_cache(self, data, cache_file_path: str) -> None:
Expand All @@ -310,9 +320,9 @@ def write_data_to_cache(self, data, cache_file_path: str) -> None:
Returns:
None
"""
cache_data = {'timestamp': time.time(), 'data': data}
cache_data = {"timestamp": time.time(), "data": data}
os.makedirs(os.path.dirname(cache_file_path), exist_ok=True)
with open(cache_file_path, 'w') as cache_file:
with open(cache_file_path, "w") as cache_file:
json.dump(cache_data, cache_file)


Expand Down
68 changes: 36 additions & 32 deletions classes/animeapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,47 +32,49 @@ async def close(self) -> None:

class AnimeApiPlatforms(Enum):
"""Anime API supported platforms enum"""
ANI_SEARCH = ANISEARCH = AS = 'anisearch'
ANIDB = 'anidb'
ANILIST = AL = 'anilist'
ANIME_PLANET = ANIMEPLANET = AP = 'animeplanet'
ANNICT = 'annict'
KAIZE = 'kaize'
KITSU = 'kitsu'
LIVECHART = LC = 'livechart'
MYANIMELIST = MAL = 'myanimelist'
NOTIFY = 'notify'
OTAKOTAKU = 'otakotaku'
SHIKIMORI = SHIKI = 'shikimori'
SHOBOI = SYOBOI = 'shoboi'
SILVERYASHA = 'silveryasha'
TRAKT = 'trakt'

ANI_SEARCH = ANISEARCH = AS = "anisearch"
ANIDB = "anidb"
ANILIST = AL = "anilist"
ANIME_PLANET = ANIMEPLANET = AP = "animeplanet"
ANNICT = "annict"
KAIZE = "kaize"
KITSU = "kitsu"
LIVECHART = LC = "livechart"
MYANIMELIST = MAL = "myanimelist"
NOTIFY = "notify"
OTAKOTAKU = "otakotaku"
SHIKIMORI = SHIKI = "shikimori"
SHOBOI = SYOBOI = "shoboi"
SILVERYASHA = "silveryasha"
TRAKT = "trakt"

async def get_update_time(self) -> dt:
"""Get the last update time of AniAPI's database
Returns:
datetime: The last update time of AniAPI's database
"""
cache_file_path = self.get_cache_file_path('updated')
cache_file_path = self.get_cache_file_path("updated")
cached_data = self.read_cached_data(cache_file_path)
if cached_data is not None:
cached_data = dt.fromtimestamp(cached_data['timestamp'])
cached_data = dt.fromtimestamp(cached_data["timestamp"])
return cached_data
try:
async with self.session.get(f'{self.base_url}/updated') as resp:
async with self.session.get(f"{self.base_url}/updated") as resp:
text = await resp.text()
# format: Updated on %m/%d/%Y %H:%M:%S UTC
text = text.replace('Updated on ', '')
text = text.replace(' UTC', '+00:00')
final = dt.strptime(text, '%m/%d/%Y %H:%M:%S%z').timestamp()
self.write_data_to_cache(
cache_file_path, {'timestamp': final})
text = text.replace("Updated on ", "")
text = text.replace(" UTC", "+00:00")
final = dt.strptime(text, "%m/%d/%Y %H:%M:%S%z").timestamp()
self.write_data_to_cache(cache_file_path, {"timestamp": final})
return dt.fromtimestamp(final)
except BaseException:
return dt.now()

async def get_relation(self, id: str | int, platform: AnimeApiPlatforms | str) -> dict:
async def get_relation(
self, id: str | int, platform: AnimeApiPlatforms | str
) -> dict:
"""Get a relation between anime and other platform via Natsu's AniAPI
Args:
Expand All @@ -84,12 +86,14 @@ async def get_relation(self, id: str | int, platform: AnimeApiPlatforms | str) -
"""
if isinstance(platform, self.AnimeApiPlatforms):
platform = platform.value
cache_file_path = self.get_cache_file_path(f'{platform}/{id}.json')
cache_file_path = self.get_cache_file_path(f"{platform}/{id}.json")
cached_data = self.read_cached_data(cache_file_path)
if cached_data is not None:
return cached_data
try:
async with self.session.get(f'https://aniapi.nattadasu.my.id/{platform}/{id}') as resp:
async with self.session.get(
f"https://aniapi.nattadasu.my.id/{platform}/{id}"
) as resp:
jsonText = await resp.text()
jsonText = json.loads(jsonText)
self.write_data_to_cache(jsonText, cache_file_path)
Expand Down Expand Up @@ -120,11 +124,11 @@ def read_cached_data(self, cache_file_path: str) -> dict | None:
None: If cache file does not exist
"""
if os.path.exists(cache_file_path):
with open(cache_file_path, 'r') as cache_file:
with open(cache_file_path, "r") as cache_file:
cache_data = json.load(cache_file)
cache_age = time.time() - cache_data['timestamp']
cache_age = time.time() - cache_data["timestamp"]
if cache_age < self.cache_expiration_time:
return cache_data['data']
return cache_data["data"]
return None

def write_data_to_cache(self, data, cache_file_path: str):
Expand All @@ -134,10 +138,10 @@ def write_data_to_cache(self, data, cache_file_path: str):
data (any): Data to write to cache
cache_file_name (str): Cache file name
"""
cache_data = {'timestamp': time.time(), 'data': data}
cache_data = {"timestamp": time.time(), "data": data}
os.makedirs(os.path.dirname(cache_file_path), exist_ok=True)
with open(cache_file_path, 'w') as cache_file:
with open(cache_file_path, "w") as cache_file:
json.dump(cache_data, cache_file)


__all__ = ['AnimeApi']
__all__ = ["AnimeApi"]
Loading

0 comments on commit 584cd64

Please sign in to comment.