11import abc
2- from collections import deque
2+ import asyncio
3+ from collections import defaultdict , deque
34from dataclasses import dataclass
45import hashlib
56import os
67from pathlib import Path
78import time
89from typing import Any , Optional
10+
11+ from tqdm import tqdm
912from .. import scheduler , utils , cache
1013from ..logger import logger
1114import urllib .parse as urlparse
@@ -32,6 +35,7 @@ def __init__(self, path: str, width: int) -> None:
3235 self .width = width
3336 self .unique_id = hashlib .md5 (f"{ self .type } ,{ self .path } " .encode ("utf-8" )).hexdigest ()
3437 self .current_width = 0
38+ self .cache_files = cache .MemoryStorage ()
3539
3640 def __repr__ (self ) -> str :
3741 return f"{ self .type } ({ self .path } )"
@@ -46,7 +50,7 @@ async def read_file(self, file_hash: str) -> bytes:
4650 async def delete_file (self , file_hash : str ) -> bool :
4751 raise NotImplementedError ("Not implemented" )
4852 @abc .abstractmethod
49- async def list_files (self ) -> list :
53+ async def list_files (self , pbar : Optional [ tqdm ] = None ) -> defaultdict [ str , deque [ File ]] :
5054 raise NotImplementedError ("Not implemented" )
5155 @abc .abstractmethod
5256 async def exists (self , file_hash : str ) -> bool :
@@ -95,17 +99,33 @@ async def delete_file(self, file_hash: str) -> bool:
9599 os .remove (f"{ self .path } /{ file_hash [:2 ]} /{ file_hash } " )
96100 return True
97101
98- async def list_files (self ) -> list :
99- files = []
100- for root , dirs , filenames in os .walk (self .path ):
101- for filename in filenames :
102- file = File (
103- name = filename ,
104- size = os .path .getsize (os .path .join (root , filename )),
105- mtime = os .path .getmtime (os .path .join (root , filename )),
106- hash = filename
107- )
108- files .append (file )
102+ async def list_files (self , pbar : Optional [tqdm ] = None ) -> dict [str , deque [File ]]:
103+ def update ():
104+ pbar .update (1 ) # type: ignore
105+ def empty ():
106+ ...
107+ update_tqdm = empty
108+ if pbar is not None :
109+ update_tqdm = update
110+
111+ files : defaultdict [str , deque [File ]] = defaultdict (deque )
112+ for root_id in range (0 , 256 ):
113+ root = f"{ self .path } /{ root_id :02x} "
114+ if not os .path .exists (root ):
115+ update_tqdm ()
116+ continue
117+ for file in os .listdir (root ):
118+ path = os .path .join (root , file )
119+ if not os .path .isfile (path ):
120+ continue
121+ files [root ].append (File (
122+ file ,
123+ os .path .getsize (path ),
124+ os .path .getmtime (path ),
125+ file
126+ ))
127+ await asyncio .sleep (0 )
128+ update_tqdm ()
109129 return files
110130
111131 async def get_size (self , file_hash : str ) -> int :
@@ -197,7 +217,7 @@ def __str__(self) -> str:
197217 def __repr__ (self ) -> str :
198218 return f"AlistStorage({ self .path } )"
199219
200- async def _action_data (self , action : str , url : str , data : Any , headers : dict [str , str ] = {}) -> AlistResult :
220+ async def _action_data (self , action : str , url : str , data : Any , headers : dict [str , str ] = {}, session : Optional [ aiohttp . ClientSession ] = None ) -> AlistResult :
201221 hash = hashlib .sha256 (f"{ action } ,{ url } ,{ data } ,{ headers } " .encode ()).hexdigest ()
202222 if hash in self .cache :
203223 return self .cache .get (hash )
@@ -214,14 +234,18 @@ async def _action_data(self, action: str, url: str, data: Any, headers: dict[str
214234 action , url ,
215235 data = data ,
216236 ) as resp :
217- result = AlistResult (
218- ** await resp .json ()
219- )
220- if result .code != 200 :
221- logger .terror ("storage.error.alist" , status = result .code , message = result .message )
222- else :
223- self .cache .set (hash , result , 30 )
224- return result
237+ try :
238+ result = AlistResult (
239+ ** await resp .json ()
240+ )
241+ if result .code != 200 :
242+ logger .terror ("storage.error.alist" , status = result .code , message = result .message )
243+ else :
244+ self .cache .set (hash , result , 30 )
245+ return result
246+ except :
247+ logger .terror ("storage.error.alist" , status = resp .status , message = await resp .text ())
248+ raise
225249
226250 async def __info_file (self , file_hash : str ) -> AlistFileInfo :
227251 r = await self ._action_data (
@@ -272,8 +296,51 @@ async def get_mtime(self, file_hash: str) -> float:
272296 async def get_size (self , file_hash : str ) -> int :
273297 info = await self .__info_file (file_hash )
274298 return max (0 , info .size )
275- async def list_files (self ) -> list :
276- return []
299+ async def list_files (self , pbar : Optional [tqdm ] = None ) -> defaultdict [str , deque [File ]]:
300+ def update ():
301+ pbar .update (1 ) # type: ignore
302+ def empty ():
303+ ...
304+ update_tqdm = empty
305+ if pbar is not None :
306+ update_tqdm = update
307+
308+ files : defaultdict [str , deque [File ]] = defaultdict (deque )
309+ async with aiohttp .ClientSession (
310+ self .url ,
311+ headers = {
312+ "Authorization" : await self ._get_token ()
313+ }
314+ ) as session :
315+ for root_id in range (0 , 256 ):
316+ root = f"{ self .path } /{ root_id :02x} "
317+ if f"listfile_{ root } " in self .cache :
318+ result = self .cache .get (f"listfile_{ root } " )
319+ else :
320+ async with session .post (
321+ "/api/fs/list" ,
322+ data = {
323+ "path" : root
324+ },
325+ ) as resp :
326+ result = AlistResult (
327+ ** await resp .json ()
328+ )
329+ if result .code != 200 :
330+ logger .terror ("storage.error.alist" , status = result .code , message = result .message )
331+ else :
332+ self .cache .set (f"listfile_{ root } " , result , 30 )
333+ for r in result .data ["content" ]:
334+ file = File (
335+ r ["name" ],
336+ r ["size" ],
337+ utils .parse_isotime_to_timestamp (r ["modified" ]),
338+ r ["name" ]
339+ )
340+ files [f"{ root_id :02x} " ].append (file )
341+ update_tqdm ()
342+
343+ return files
277344 async def read_file (self , file_hash : str ) -> bytes :
278345 info = await self .__info_file (file_hash )
279346 if info .size == - 1 :
0 commit comments