diff --git a/nicotools/download.py b/nicotools/download.py
index 6dc7e71..bd815f4 100644
--- a/nicotools/download.py
+++ b/nicotools/download.py
@@ -27,7 +27,6 @@ def __init__(self,
limit: int=4,
sieve: bool=True,
loop: asyncio.AbstractEventLoop=None,
- return_session: bool=False,
interval: Union[int, float]=5,
backoff: Union[int, float]=3,
retries: Union[int, float]=3,
@@ -39,7 +38,6 @@ def __init__(self,
:param Optional[str] password: パスワード
:param T <= logging.logger logger: ロガーのインスタンス
:param aiohttp.ClientSession session:
- :param bool return_session: データを取り終わった後にセッションを返すか閉じるか
:param int limit: 同時にアクセスする最大数
:param bool sieve: 非公開や削除済み動画を除外するかどうか
:param asyncio.AbstractEventLoop loop: イベントループ
@@ -53,7 +51,6 @@ def __init__(self,
self.session = session or self.loop.run_until_complete(self.get_session())
self.__parallel_limit = limit
self.__do_sieve = sieve
- self.__return_session = return_session
self.interval = interval
self.backoff = backoff
self.retries = retries
@@ -73,7 +70,7 @@ async def get_session(self) -> aiohttp.ClientSession:
else:
login.get_session(utils.LogIn.ask_credentials())
cook = login.cookie
- self.logger.debug("Object ID of cookie (Info): %s", id(cook))
+ self.logger.debug(f"Object ID of cookie (Info): {id(cook)}")
return aiohttp.ClientSession(cookies=cook)
def get_data(self, video_ids: List) -> Dict:
@@ -86,8 +83,6 @@ def get_data(self, video_ids: List) -> Dict:
infos = self.loop.run_until_complete(
asyncio.gather(*[self._retrieve_info(_id) for _id in video_ids]))
result = {_id: _info for _id, _info in zip(video_ids, infos)}
- if not self.__return_session:
- self.session.close()
sieved_result = self.siever(result)
return sieved_result
@@ -113,14 +108,14 @@ async def _retrieve_info(self, video_id: str) -> Dict:
backoff = self.backoff
attempt = max(0, self.retries) + 1
url = URL.URL_Watch + video_id
- self.logger.debug("_worker: %s", locals())
+ self.logger.debug(f"_worker: {locals()}")
async with asyncio.Semaphore(self.__parallel_limit):
st = 0
while attempt > 0:
attempt -= 1
async with self.session.get(url) as response: # type: aiohttp.ClientResponse
- self.logger.debug("Video ID: %s, Status Code: %s", video_id, response.status)
+ self.logger.debug(f"Video ID: {video_id}, Status Code: {response.status}")
if response.status == 200:
info_data = await response.text()
return self._junction(info_data)
@@ -319,9 +314,8 @@ def _junction(self, content: str) -> Dict[str, Union[str, int, List[str], bool]]
file_name = "_#_niconico_#_.html"
with open(file_name, "w", encoding="utf-8") as fd:
fd.write(content)
- raise AttributeError("Unknown HTML. For debug purpose,"
- " the content has been saved in {}."
- " You can safely delete it.".format(file_name))
+ raise AttributeError(f"Unknown HTML. For debug purpose, the content has been saved in {file_name}."
+ f"You can safely delete it.")
class Thumbnail(utils.Canopy):
@@ -329,7 +323,6 @@ def __init__(self,
logger: utils.NTLogger=None,
limit: int=8,
session: aiohttp.ClientSession=None,
- return_session: bool=False,
loop: asyncio.AbstractEventLoop=None,
):
"""
@@ -338,14 +331,12 @@ def __init__(self,
:param T<= logging.logger logger: ロガー
:param int limit: 同時にアクセスする最大数
:param aiohttp.ClientSession session:
- :param bool return_session: セッションを返すか閉じるか
:param asyncio.AbstractEventLoop loop: イベントループ
"""
super().__init__(loop=loop, logger=logger)
self.undone = []
self.__bucket = {}
self.session = session or self.loop.run_until_complete(self.get_session())
- self.__return_session = return_session
self.__parallel_limit = limit
async def get_session(self) -> aiohttp.ClientSession:
@@ -368,19 +359,13 @@ def start(self, glossary: Union[List, Dict],
glossary = utils.validator(glossary)
glossary = self.loop.run_until_complete(self._get_infos(glossary))
self.glossary = glossary
- self.logger.debug("Dictionary of Videos: %s", self.glossary)
self.save_dir = utils.make_dir(save_dir)
- self.logger.debug("Directory to save in: %s", self.save_dir)
if len(self.glossary) > 0:
self.loop.run_until_complete(self._download(list(self.glossary), is_large))
while len(self.undone) > 0:
- self.logger.debug("いくつか残ってる。%s", self.undone)
self.loop.run_until_complete(self._download(self.undone, False))
- self.logger.debug("全部終わった")
- if not self.__return_session:
- self.close()
return self
async def _download(self, video_ids: list, islarge: bool=True) -> None:
@@ -397,13 +382,11 @@ async def _download(self, video_ids: list, islarge: bool=True) -> None:
async def _worker(self, idx: int, video_id: str, url: str) -> Optional[bytes]:
async with asyncio.Semaphore(self.__parallel_limit):
- self.logger.info(Msg.nd_download_pict,
- idx + 1, len(self.glossary), video_id,
- self.glossary[video_id][KeyGTI.TITLE])
+ self.logger.info(Msg.nd_download_pict.format(
+ idx + 1, len(self.glossary), video_id, self.glossary[video_id][KeyGTI.TITLE]))
try:
async with self.session.get(url, timeout=10) as response:
- self.logger.debug("Video ID: %s, Status Code: %s", video_id, response.status)
if response.status == 200:
# ダウンロードに成功したら「未完了のリスト」から取り除く
if video_id in self.undone:
@@ -413,7 +396,7 @@ async def _worker(self, idx: int, video_id: str, url: str) -> Optional[bytes]:
self.undone.append(video_id)
return None
except asyncio.TimeoutError:
- self.logger.warning("%s が時間切れ", video_id)
+ self.logger.warning(f"{video_id} が時間切れ")
self.undone.append(video_id)
return None
@@ -421,7 +404,7 @@ def _saver(self, video_id: str, coroutine: asyncio.Task) -> None:
image_data = coroutine.result()
if image_data:
file_path = self.make_name(video_id, "jpg")
- self.logger.debug("File Path: %s", file_path)
+ self.logger.debug(f"File Path: {file_path}")
with file_path.open('wb') as f:
f.write(image_data)
@@ -435,7 +418,7 @@ def _make_urls(self, video_ids: list, is_large: bool=True) -> list:
:rtype: List[str]
"""
if is_large:
- urls = ["{0}.L".format(self.glossary[_id][KeyGTI.THUMBNAIL_URL]) for _id in video_ids]
+ urls = [f"{self.glossary[_id][KeyGTI.THUMBNAIL_URL]}.L" for _id in video_ids]
else:
urls = [self.glossary[_id][KeyGTI.THUMBNAIL_URL] for _id in video_ids]
return urls
@@ -481,7 +464,6 @@ def __init__(self,
mail: str=None, password: str=None,
logger: utils.NTLogger=None,
session: aiohttp.ClientSession=None,
- return_session=False,
division: int=4,
limit: int=4,
chunk_size=1024*50,
@@ -495,7 +477,6 @@ def __init__(self,
:param password: パスワード
:param logger: ロガー
:param session: セッション
- :param return_session: セッションを返すか
:param division: いくつに分割するか
:param limit: (実際のダウンロード前のファイルサイズの確認で)同時にアクセスする最大数
:param chunk_size: サーバーに一度に要求するデータ量
@@ -509,7 +490,6 @@ def __init__(self,
self.__multiline = multiline
self.__division = division
self.session = session or self.loop.run_until_complete(self.get_session())
- self.__return_session = return_session
self.__parallel_limit = limit
self.__chunk_size = chunk_size
@@ -525,7 +505,6 @@ def start(self,
save_dir: Union[str, Path]):
# TODO Downloading in Economy mode
self.save_dir = utils.make_dir(save_dir)
- self.logger.debug("Directory to save in: %s", self.save_dir)
# 分割数と同じだけの要素を持つリストを作り、各要素にそれぞれが
# 保存したファイルサイズを記録する。プログレスバーに利用する。
self.__downloaded_size = [0] * self.__division
@@ -534,17 +513,14 @@ def start(self,
glossary = utils.validator(glossary)
info = Info(
mail=self.__mail, password=self.__password,
- session=self.session, return_session=True)
+ session=self.session)
glossary = info.get_data(glossary)
self.session = info.session
self.glossary = glossary
- self.logger.debug("Dictionary of Videos: %s", self.glossary)
# まず各動画のファイルサイズを集める。
self.loop.run_until_complete(self._push_file_size())
self.loop.run_until_complete(self._broker())
- if not self.__return_session:
- self.close()
return self
async def _push_file_size(self):
@@ -557,10 +533,10 @@ async def _push_file_size(self):
async def _get_file_size_worker(self, video_id: str) -> int:
vid_url = self.glossary[video_id][KeyDmc.VIDEO_URL_SM]
- self.logger.debug("Video ID: %s, Video URL: %s", video_id, vid_url)
+ self.logger.debug(f"Video ID: {video_id}, Video URL: {vid_url}")
async with self.session.head(vid_url) as resp:
headers = resp.headers
- self.logger.debug(str(headers))
+ self.logger.debug(f"Headers: {str(headers)}")
return int(headers["content-length"])
async def _broker(self):
@@ -576,21 +552,17 @@ async def _download(self, idx: int, video_id: str):
division = self.__division
file_path = self.make_name(video_id, self.glossary[video_id][KeyDmc.MOVIE_TYPE])
- self.logger.info(
- Msg.nd_download_video.format(
- idx + 1, len(self.glossary), video_id,
- self.glossary[video_id][KeyDmc.TITLE]))
+ self.logger.info(Msg.nd_download_video.format(
+ idx + 1, len(self.glossary), video_id, self.glossary[video_id][KeyDmc.TITLE]))
video_url = self.glossary[video_id][KeyDmc.VIDEO_URL_SM]
file_size = self.glossary[video_id][KeyDmc.FILE_SIZE]
- headers = [
- {"Range": "bytes={}-{}".format(
- int(file_size*order/division),
- int((file_size*(order+1))/division-1)
- )} for order in range(division)]
+ headers = [{
+ "Range": f"bytes={int(file_size*order/division)}-{int((file_size*(order+1))/division-1)}"
+ } for order in range(division)]
- for h in headers:
- self.logger.debug(str(h))
+ for i, h in enumerate(headers):
+ self.logger.debug(f"Header {i}: {str(h)}")
if self.__multiline:
progress_bars = [tqdm(total=int(file_size / division),
@@ -613,11 +585,11 @@ async def _download(self, idx: int, video_id: str):
async def _download_worker(self, file_path: Union[str, Path], video_url: str,
header: dict, order: int, pbar: tqdm=None) -> tqdm:
- file_path = Path("{}.{:03}".format(file_path, order))
+ file_path = Path(f"{file_path}.{order:03}")
# => video.mp4.000 ~ video.mp4.003 (4分割の場合)
with file_path.open("wb") as fd:
async with self.session.get(url=video_url, headers=header) as video_data:
- self.logger.debug("Started! Header: %s, Video URL: %s", header, video_url)
+ self.logger.debug(f"Started! Header: {header}, Video URL: {video_url}")
while True:
data = await video_data.content.read(self.__chunk_size)
if not data:
@@ -626,7 +598,7 @@ async def _download_worker(self, file_path: Union[str, Path], video_url: str,
self.__downloaded_size[order] += downloaded_size
if pbar:
pbar.update(downloaded_size)
- self.logger.debug("Order: %s Done!", order)
+ self.logger.debug(f"Order {order}: done!")
return pbar
async def _counter_whole(self, file_size: int, interval: int=1):
@@ -656,8 +628,8 @@ def _combiner(self, video_id: str, coroutine: asyncio.Task):
"""
if coroutine.done() and not coroutine.cancelled():
file_path = self.make_name(video_id, self.glossary[video_id][KeyDmc.MOVIE_TYPE])
- file_names = ["{}.{:03}".format(file_path, order) for order in range(self.__division)]
- self.logger.debug("File names: %s", file_names)
+ file_names = [f"{file_path}.{order:03}" for order in range(self.__division)]
+ self.logger.debug(f"File names: {file_names}")
with file_path.open("wb") as fd:
for name in file_names:
with open(name, "rb") as file:
@@ -671,7 +643,6 @@ def __init__(self,
mail: str=None, password: str=None,
logger: utils.NTLogger=None,
session: aiohttp.ClientSession=None,
- return_session=False,
division: int=4,
chunk_size=1024*50,
multiline=True,
@@ -684,7 +655,6 @@ def __init__(self,
:param password: パスワード
:param logger: ロガー
:param session: セッション
- :param return_session: セッションを返すか
:param division: いくつに分割するか
:param chunk_size: 一度にサーバーに要求するデータ量
:param multiline: プログレスバーを複数行で表示するか
@@ -697,7 +667,6 @@ def __init__(self,
self.__multiline = multiline
self.__division = division
self.session = session or self.loop.run_until_complete(self.get_session())
- self.__return_session = return_session
self.__chunk_size = chunk_size
async def get_session(self) -> aiohttp.ClientSession:
@@ -713,29 +682,24 @@ def start(self,
xml: bool=True):
self.save_dir = utils.make_dir(save_dir)
self.__downloaded_size = [0] * self.__division
- self.logger.debug("Directory to save in: %s", self.save_dir)
if isinstance(glossary, list):
glossary = utils.validator(glossary)
info = Info(
mail=self.__mail, password=self.__password,
- session=self.session, return_session=True)
+ session=self.session)
glossary = info.get_data(glossary)
self.session = info.session
self.glossary = glossary
- self.logger.debug("Dictionary of Videos: %s", self.glossary)
self.loop.run_until_complete(self._broker(xml))
- if not self.__return_session:
- self.close()
return self
async def _broker(self, xml: bool=True) -> None:
for idx, video_id in enumerate(self.glossary):
if self.glossary[video_id][KeyDmc.API_URL] is None:
- self.logger.warning("%s はDMC動画ではありません。従来サーバーの動画を"
- "ダウンロードする場合は --smile をコマンドに"
- "指定してください。", video_id)
+ self.logger.warning(f"{video_id} はDMC動画ではありません。従来サーバーの動画を"
+ "ダウンロードする場合は --smile をコマンドに指定してください。")
continue
if xml:
res_xml = await self._first_nego_xml(video_id)
@@ -746,7 +710,7 @@ async def _broker(self, xml: bool=True) -> None:
video_url = self._extract_video_url_json(res_json)
coro_heartbeat = asyncio.ensure_future(self._heartbeat(video_id, res_json))
- self.logger.debug("動画URL: %s", video_url)
+ self.logger.debug(f"動画URL: {video_url}")
coro_download = asyncio.ensure_future(self._download(idx, video_id, video_url))
coro_download.add_done_callback(functools.partial(self._canceler, coro_heartbeat))
coro_download.add_done_callback(functools.partial(self._combiner, video_id))
@@ -755,8 +719,6 @@ async def _broker(self, xml: bool=True) -> None:
async def _first_nego_xml(self, video_id: str) -> str:
payload = self._make_param_xml(self.glossary[video_id])
- self.logger.debug("Attempting to first negotiation of %s", video_id)
- self.logger.debug("This is the posting XML: %s", payload)
async with self.session.post(
url=self.glossary[video_id][KeyDmc.API_URL],
params={"_format": "xml"},
@@ -766,8 +728,6 @@ async def _first_nego_xml(self, video_id: str) -> str:
async def _first_nego_json(self, video_id: str) -> str: # pragma: no cover
payload = self._make_param_json(self.glossary[video_id])
- self.logger.debug("Attempting to first negotiation of %s", video_id)
- self.logger.debug("This is the posting JSON: %s", payload)
async with self.session.post(
url=self.glossary[video_id][KeyDmc.API_URL],
params={"_format": "json"},
@@ -778,9 +738,9 @@ async def _first_nego_json(self, video_id: str) -> str: # pragma: no cover
def _make_param_xml(self, info: Dict[str, str]) -> str:
info.update({
"video_src_ids_xml": "".join(map(
- lambda _: "{}".format(_), info[KeyDmc.VIDEO_SRC_IDS])),
+ lambda _: f"{_}", info[KeyDmc.VIDEO_SRC_IDS])),
"audio_src_ids_xml": "".join(map(
- lambda _: "{}".format(_), info[KeyDmc.AUDIO_SRC_IDS]))
+ lambda _: f"{_}", info[KeyDmc.AUDIO_SRC_IDS]))
})
xml = Template("""
${recipe_id}
@@ -898,13 +858,13 @@ def _make_param_json(self, info: Dict[str, Union[str, list, int]]) -> str: # pr
return result
def _extract_video_url_xml(self, text: str) -> str:
- self.logger.debug("Returned XML data: %s", text)
+ self.logger.debug(f"Returned XML data: {text}")
soup = BeautifulSoup(text, "html.parser")
url_tag = soup.content_uri # type: Tag
return url_tag.text
def _extract_video_url_json(self, text: str) -> str: # pragma: no cover
- self.logger.debug("Returned JSON data: %s", text)
+ self.logger.debug(f"Returned JSON data: {text}")
soup = json.loads(text)
url_tag = soup["data"]["session"]["content_uri"]
return url_tag
@@ -912,13 +872,13 @@ def _extract_video_url_json(self, text: str) -> str: # pragma: no cover
def _extract_session_id_xml(self, text: str) -> str:
soup = BeautifulSoup(text, "html.parser")
id_tag = soup.session.id # type: Tag
- self.logger.debug("Session ID: %s", id_tag.text)
+ self.logger.debug(f"Session ID: {id_tag.text}")
return id_tag.text
def _extract_session_id_json(self, text: str) -> str: # pragma: no cover
soup = json.loads(text)
id_tag = soup["data"]["session"]["id"]
- self.logger.debug("Session ID: %s", id_tag)
+ self.logger.debug(f"Session ID: {id_tag}")
return id_tag.text
def _extract_session_tag(self, text: str) -> str:
@@ -933,12 +893,11 @@ async def _heartbeat(self, video_id: str, text: str) -> None:
:param text:
"""
try:
- self.logger.debug("返ってきたXML: %s", text)
+ self.logger.debug(f"返ってきたXML: {text}")
api_url = self.glossary[video_id][KeyDmc.API_URL]
# 1分ちょうどで送ると遅れるので、待ち時間を少し早める
waiting = (self.glossary[video_id][KeyDmc.HEARTBEAT] / 1000) - 5
companion = self._extract_session_tag(text)
- self.logger.debug("送信するXML: %s", companion)
session_id = self._extract_session_id_xml(text)
await asyncio.sleep(waiting)
async with self.session.post(
@@ -952,7 +911,7 @@ async def _heartbeat(self, video_id: str, text: str) -> None:
pass
async def _get_file_size(self, video_id: str, video_url: str) -> int:
- self.logger.debug("Video ID: %s, Video URL: %s", video_id, video_url)
+ self.logger.debug(f"Video ID: {video_id}, Video URL: {video_url}")
async with self.session.head(video_url) as resp:
headers = resp.headers
self.logger.debug(str(headers))
@@ -962,20 +921,16 @@ async def _download(self, idx: int, video_id: str, video_url: str):
division = self.__division
file_path = self.make_name(video_id, self.glossary[video_id][KeyDmc.MOVIE_TYPE])
- self.logger.info(
- Msg.nd_download_video.format(
- idx + 1, len(self.glossary), video_id,
- self.glossary[video_id][KeyDmc.TITLE]))
+ self.logger.info(Msg.nd_download_video.format(
+ idx + 1, len(self.glossary), video_id, self.glossary[video_id][KeyDmc.TITLE]))
file_size = await self._get_file_size(video_id, video_url)
- headers = [
- {"Range": "bytes={}-{}".format(
- int(file_size*order/division),
- int((file_size*(order+1))/division-1)
- )} for order in range(division)]
+ headers = [{
+ "Range": f"bytes={int(file_size*order/division)}-{int((file_size*(order+1))/division-1)}"
+ } for order in range(division)]
for o, h in zip(range(division), headers):
- self.logger.debug("Order: %s, %s", o, h)
+ self.logger.debug(f"Order {o}: {h}")
if self.__multiline:
progress_bars = [tqdm(total=int(file_size / division),
@@ -998,12 +953,12 @@ async def _download(self, idx: int, video_id: str, video_url: str):
async def _download_worker(self, file_path: Union[str, Path], video_url: str,
header: dict, order: int, pbar: tqdm = None) -> tqdm:
- file_path = Path("{}.{:03}".format(file_path, order))
+ file_path = Path(f"{file_path}.{order:03}")
# => video.mp4.000 ~ video.mp4.003 (4分割の場合)
self.logger.debug(file_path)
with file_path.open("wb") as fd:
async with self.session.get(url=video_url, headers=header) as video_data:
- self.logger.debug("Started! Header: %s, Video URL: %s", header, video_url)
+ self.logger.debug(f"Started! Header: {header}, Video URL: {video_url}")
while True:
data = await video_data.content.read(self.__chunk_size)
if not data:
@@ -1012,7 +967,7 @@ async def _download_worker(self, file_path: Union[str, Path], video_url: str,
self.__downloaded_size[order] += downloaded_size
if pbar:
pbar.update(downloaded_size)
- self.logger.debug("Order: %s Done!", order)
+ self.logger.debug(f"Order {order}: done!")
return pbar
def _canceler(self, task_to_cancel: asyncio.Task, _: asyncio.Task) -> bool:
@@ -1046,8 +1001,7 @@ def _combiner(self, video_id: str, coroutine: asyncio.Task):
"""
if coroutine.done() and not coroutine.cancelled():
file_path = self.make_name(video_id, self.glossary[video_id][KeyDmc.MOVIE_TYPE])
- file_names = ["{}.{:03}".format(file_path, order) for order in range(self.__division)]
- self.logger.debug("File names: %s", file_names)
+ file_names = [f"{file_path}.{order:03}" for order in range(self.__division)]
with file_path.open("wb") as fd:
for name in file_names:
with open(name, "rb") as file:
@@ -1061,7 +1015,6 @@ def __init__(self,
mail: str=None, password: str=None,
logger: utils.NTLogger=None,
session: aiohttp.ClientSession=None,
- return_session=False,
limit: int=4,
wayback=False,
loop: asyncio.AbstractEventLoop=None,
@@ -1073,7 +1026,6 @@ def __init__(self,
:param password: パスワード
:param logger: ロガー
:param session: セッション
- :param return_session: セッションを返すか
:param limit: 同時にアクセスする最大数
:param wayback: 過去ログを取りに行くかどうか
:param loop: イベントループ
@@ -1083,7 +1035,6 @@ def __init__(self,
self.__password = password
self.__downloaded_size = None # type: List[int]
self.session = session or self.loop.run_until_complete(self.get_session())
- self.__return_session = return_session
self.__parallel_limit = limit
self.__wayback = wayback
@@ -1108,13 +1059,12 @@ def start(self, glossary, save_dir, xml=False, density: str="0-99999:9999,1000")
"""
utils.check_arg(locals())
self.save_dir = utils.make_dir(save_dir)
- self.logger.debug("Directory to save in: %s", self.save_dir)
if isinstance(glossary, list):
glossary = utils.validator(glossary)
info = Info(
mail=self.__mail, password=self.__password,
- session=self.session, return_session=True)
+ session=self.session)
glossary = info.get_data(glossary)
self.session = info.session
self.glossary = glossary
@@ -1144,9 +1094,8 @@ async def _download(self, idx: int, info: dict, is_xml: bool, density: str) -> s
thread_key = None
force_184 = None
- self.logger.info(Msg.nd_download_comment,
- idx + 1, len(self.glossary), video_id,
- info[KeyGTI.TITLE])
+ self.logger.info(Msg.nd_download_comment.format(
+ idx + 1, len(self.glossary), video_id, info[KeyGTI.TITLE]))
is_official = re.match("^(?:so|\d)", info[KeyDmc.VIDEO_ID]) is not None
@@ -1170,7 +1119,6 @@ async def _download(self, idx: int, info: dict, is_xml: bool, density: str) -> s
return self.postprocesser(is_xml, com_data)
async def retriever(self, data: str, url: str) -> str:
- self.logger.debug("Posting Parameters: %s", data)
async with asyncio.Semaphore(self.__parallel_limit):
async with self.session.post(url=url, data=data) as resp: # type: aiohttp.ClientResponse
return await resp.text()
@@ -1197,7 +1145,6 @@ def saver(self, video_id: str, is_xml: bool, coroutine: asyncio.Task) -> bool:
extention = "json"
file_path = self.make_name(video_id, extention)
- self.logger.debug("File Path: %s", file_path)
with file_path.open("w", encoding="utf-8") as f:
f.write(comment_data + "\n")
self.logger.info(Msg.nd_download_done.format(path=file_path))
@@ -1213,13 +1160,13 @@ async def get_thread_key(self, thread_id, needs_key):
"""
utils.check_arg(locals())
if not int(needs_key) == 1:
- self.logger.debug("needs_key is not 1. Video ID (or Thread ID): %s,"
- " needs_key: %s", thread_id, needs_key)
+ self.logger.debug(f"needs_key is not 1. Video ID (or Thread ID): {thread_id},"
+ f" needs_key: {needs_key}")
return "", "0"
async with self.session.get(URL.URL_GetThreadKey, params={"thread": thread_id}) as resp:
response = await resp.text()
self.logger.debug("Response from GetThreadKey API"
- " (thread id is %s): %s", thread_id, response)
+ f" (thread id is {thread_id}): {response}")
parameters = parse_qs(response)
threadkey = parameters["threadkey"][0] # type: str
force_184 = parameters["force_184"][0] # type: str
@@ -1229,7 +1176,7 @@ async def get_wayback_key(self, thread_id: int):
async with asyncio.Semaphore(self.__parallel_limit):
async with self.session.get(URL.URL_WayBackKey, params={"thread", thread_id}) as resp:
response = await resp.text()
- self.logger.debug("Waybackkey response: %s", response)
+ self.logger.debug(f"Waybackkey response: {response}")
return parse_qs(response)["waybackkey"][0]
def make_param_xml(self, thread_id, user_id, thread_key=None, force_184=None,
@@ -1251,33 +1198,29 @@ def make_param_xml(self, thread_id, user_id, thread_key=None, force_184=None,
:rtype: str
"""
utils.check_arg({"thread_id": thread_id, "user_id": user_id})
- self.logger.debug("Arguments: %s", locals())
- wbk = 'waybackkey="{}"'.format(waybackkey) if waybackkey else ""
+ wbk = f'waybackkey="{waybackkey}"' if waybackkey else ""
if thread_key:
return (
- ''
- ''
- ''
- ''
- '{density}'
- '').format(t_id=thread_id, user_id=user_id,
- t_key=thread_key, force=force_184, wbk=wbk,
- quantity=quantity, density=density)
+ f''
+ f''
+ f''
+ f''
+ f'{density}'
+ f'')
else:
return (
- ''
- ''
- ''
- ''
- '{density}'
- '').format(t_id=thread_id, user_id=user_id, wbk=wbk,
- quantity=quantity, density=density)
+ f''
+ f''
+ f''
+ f''
+ f'{density}'
+ f'')
def make_param_json(self, official_video, user_id, user_key, thread_id,
optional_thread_id=None, thread_key=None, force_184=None,
@@ -1301,7 +1244,6 @@ def make_param_json(self, official_video, user_id, user_key, thread_id,
"""
utils.check_arg({"official_video": official_video, "user_id": user_id,
"user_key": user_key, "thread_id": thread_id})
- self.logger.debug("Arguments of creating JSON: %s", locals())
result = [
{"ping": {"content": "rs:0"}},
{"ping": {"content": "ps:0"}},
@@ -1407,7 +1349,7 @@ def main(args):
info = Info(
mail=mailadrs, password=password,
- logger=logger, sieve=args.nosieve, return_session=True)
+ logger=logger, sieve=args.nosieve)
database = info.get_data(videoid)
session = info.session
diff --git a/nicotools/utils.py b/nicotools/utils.py
index 69795f1..9fedf74 100644
--- a/nicotools/utils.py
+++ b/nicotools/utils.py
@@ -7,7 +7,6 @@
import sys
from argparse import ArgumentParser
from getpass import getpass
-from os.path import join, expanduser
from pathlib import Path
from urllib.parse import parse_qs
@@ -180,41 +179,6 @@ def sizeof_fmt(num):
return "{:.2f}Gb".format(num)
-def get_from_getflv(video_id, session, logger=None):
- """
- GetFlv APIから情報を得る。
-
- * GetFlvのサンプル:
-
- thread_id=1406370428
- &l=314
- &url=http%3A%2F%2Fsmile-pom32.nicovideo.jp%2Fsmile%3Fm%3D24093152.45465
- &ms=http%3A%2F%2Fmsg.nicovideo.jp%2F27%2Fapi%2F
- &ms_sub=http%3A%2F%2Fsub.msg.nicovideo.jp%2F27%2Fapi%2F
- &user_id=<ユーザーIDの数字>
- &is_premium=1
- &nickname=
- &time=1475176067845
- &done=true
- &ng_rv=220
- &userkey=1475177867.%7E1%7EhPBJrVv78e251OPzyAiSs1fYAJhYIzDPOq5LNiNqZxs
-
- * 但しアクセス制限がかかったときには:
-
- error=access_locked&done=true
-
- :param str video_id:
- :param requests.Session session:
- :param NTLogger logger:
- :rtype: dict[str, str] | None
- """
- check_arg(locals())
- suffix = {"as3": 1} if video_id.startswith("nm") else None
- response = session.get(URL.URL_GetFlv + video_id, params=suffix)
- if logger: logger.debug("GetFLV Response: {}".format(response.text))
- return extract_getflv(response.text)
-
-
def extract_getflv(content):
"""
@@ -482,7 +446,7 @@ def __init__(self, file_name=LOG_FILE, name=__name__, log_level=logging.INFO):
filename=str(file_name), encoding="utf-8")
else:
log_file = logging.FileHandler(encoding="utf-8",
- filename=join(expanduser("~"), file_name))
+ filename=str(Path.home() / file_name))
log_file.setLevel(log_level)
formatter = self.get_formatter("file")
log_file.setFormatter(formatter)
diff --git a/setup.py b/setup.py
index 4188657..bd7178c 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,6 @@
'License :: OSI Approved :: MIT License',
'Natural Language :: Japanese',
'Programming Language :: Python :: 3 :: Only',
- 'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'
],
diff --git a/tests/test_download.py b/tests/test_download.py
index 0f24027..9f29d88 100644
--- a/tests/test_download.py
+++ b/tests/test_download.py
@@ -271,5 +271,4 @@ def test_video_dmc_premium_multi(self):
def test_okatadsuke():
- for _parh in (SAVE_DIR, SAVE_DIR):
- shutil.rmtree(str(utils.make_dir(_parh)))
+ shutil.rmtree(str(utils.make_dir(SAVE_DIR)))
diff --git a/tests/travis.sh b/tests/travis.sh
index c695cfb..5ca06a7 100644
--- a/tests/travis.sh
+++ b/tests/travis.sh
@@ -1,9 +1,10 @@
#!/usr/bin/env bash
-suffix=`date "+%Y%m%d_%H%M"`_${TRAVIS_PYTHON_VERSION}
+prefix=`date "+%Y%m%d_%H%M"`_${TRAVIS_PYTHON_VERSION}
log_file=nicotools.log
+cd ~
curl -X POST https://content.dropboxapi.com/2/files/upload\
--header "Authorization: Bearer ${DROPBOX_TOKEN}"\
- --header "Dropbox-API-Arg: {\"path\": \"/Travis/${suffix}_${log_file}\", \"mode\": \"add\"}"\
+ --header "Dropbox-API-Arg: {\"path\": \"/Travis/${prefix}_${log_file}\", \"mode\": \"add\"}"\
--header "Content-Type: application/octet-stream"\
- --data-binary @~/${log_file}
+ --data-binary @${log_file}