1
- from dataclasses import dataclass
1
+ from core .classes import Storage , FileInfo , FileList
2
+ from core .scheduler import scheduler , IntervalTrigger
3
+ from core .logger import logger
4
+ from core .i18n import locale
5
+ from typing import List , Set , Tuple , Dict , Any
6
+ from tqdm import tqdm
7
+ from aiohttp import web
8
+ import aiohttp
9
+ import secrets
10
+ import io
11
+ import asyncio
12
+ import humanize
2
13
3
14
4
- class AListStorage ():
5
- def __init__ (self ) -> None :
15
+ class AListStorage (Storage ):
16
+ def __init__ (self , username : str , password : str , url : str , path : str ) -> None :
17
+ self .username = username
18
+ self .password = password
19
+ self .url = url
20
+ self .path = path
21
+ self .token = ""
22
+ self .scheduler = None
23
+ self .headers = {}
24
+
25
+ async def init (self ) -> None :
26
+ async def fetchToken () -> None :
27
+ logger .tinfo ("storage.info.alist.fetch_token" )
28
+ async with aiohttp .ClientSession (self .url ) as session :
29
+ try :
30
+ async with session .post ("/api/auth/login" , json = {"username" : self .username , "password" : self .password }) as response :
31
+ response .raise_for_status ()
32
+ data = await response .json ()
33
+ if data ["code" ] != 200 :
34
+ raise aiohttp .ClientResponseError (status = data ["code" ], request_info = response .request_info , history = response .history )
35
+ self .token = data ["data" ]["token" ]
36
+ self .headers = {"Authorization" : self .token }
37
+ logger .tsuccess ("storage.success.alist.fetch_token" )
38
+ except Exception as e :
39
+ logger .terror ("storage.error.alist.fetch_token" , e = e )
40
+ if not self .scheduler :
41
+ self .scheduler = scheduler .add_job (fetchToken , IntervalTrigger (days = 2 ))
42
+
43
+ await fetchToken ()
44
+
45
+ async def check (self ) -> None :
46
+ file_name = secrets .token_hex (8 )
47
+ file_path = self .path + file_name
48
+ try :
49
+ async with aiohttp .ClientSession (self .url , headers = {** self .headers , "File-Path" : file_path , "Content-Type" : "application/octet-stream" }) as session :
50
+ response = await session .put ("/api/fs/put" , data = b"" )
51
+ response .raise_for_status ()
52
+ data = await response .json ()
53
+ if data ["code" ] != 200 :
54
+ raise aiohttp .ClientResponseError (status = data ["code" ], request_info = response .request_info , history = response .history )
55
+ async with aiohttp .ClientSession (self .url , headers = self .headers ) as session :
56
+ response = await session .post ("/api/fs/remove" , json = {"names" : [file_name ], "dir" : self .path })
57
+ response .raise_for_status ()
58
+ data = await response .json ()
59
+ if data ["code" ] != 200 :
60
+ raise aiohttp .ClientResponseError (status = data ["code" ], request_info = response .request_info , history = response .history )
61
+ logger .tsuccess ("storage.success.alist.check" )
62
+ except Exception as e :
63
+ logger .terror ("storage.error.alist.check" , e = e )
64
+
65
+ async def getMissingFiles (self , files : FileList , pbar : tqdm ) -> FileList :
66
+ existing_files : List [FileInfo ] = []
67
+ async with aiohttp .ClientSession (self .url , headers = self .headers ) as session :
68
+ async def getFileList (dir : str , pbar : tqdm ) -> List [FileInfo ]:
69
+ file_path = self .path + dir
70
+ response = await session .post ("/api/fs/list" , headers = self .headers , json = {"path" : file_path })
71
+ response .raise_for_status ()
72
+ data = await response .json ()
73
+ pbar .update (1 )
74
+ if data ["code" ] != 200 :
75
+ return []
76
+ return [FileInfo (size = content ["size" ], hash = content ["name" ], path = "" , mtime = - 1 ) for content in data ["data" ]["content" ] if not content ["is_dir" ]]
77
+
78
+ with tqdm (desc = locale .t ("storage.tqdm.alist.get_filelist" ), total = 256 ) as pbar :
79
+ for i in range (256 ):
80
+ dir = f"/{ i :02x} "
81
+ existing_files += await getFileList (dir , pbar )
82
+
83
+ existing_info : Set [Tuple [str , int ]] = {(file .hash , file .size ) for file in existing_files }
84
+ missing_files = [file for file in files .files if (file .hash , file .size ) not in existing_info ]
85
+
86
+ return FileList (files = missing_files )
87
+
88
+ async def measure (self , size : int , request : web .Request , response ):
89
+ file_path = f"{ self .path } /.{ size } "
90
+ try :
91
+ async with aiohttp .ClientSession (self .url , headers = self .headers ) as session :
92
+ response = await session .post ("/api/fs/get" , json = {"path" : file_path , "password" : self .password })
93
+ response .raise_for_status ()
94
+ data = await response .json ()
95
+
96
+ if data ["code" ] == 200 :
97
+ response = web .HTTPFound (data ["raw_url" ])
98
+ response .prepare (request )
99
+ return
100
+
101
+ if data ["code" ] != 200 :
102
+ try :
103
+ buffer = b"\x00 \x66 \xcc \xff " * 256 * 1024
104
+ response = await session .put ("/api/fs/put" , data = buffer , headers = {** self .headers , "File-Path" : file_path , "Content-Type" : "application/octet-stream" })
105
+ response .raise_for_status ()
106
+ data = await response .json ()
107
+ if data ["code" ] != 200 :
108
+ raise aiohttp .ClientResponseError (status = 500 , request_info = response .request_info , history = response .history )
109
+ except Exception as e :
110
+ logger .terror ("storage.error.alist.upload" , e = e )
111
+ raise
112
+
113
+ response = await session .post ("/api/fs/get" , json = {"path" : file_path , "password" : self .password })
114
+ response .raise_for_status ()
115
+ data = await response .json ()
116
+
117
+ response = web .HTTPFound (data ["raw_url" ])
118
+ response .prepare (request )
119
+ return
120
+ except Exception as e :
121
+ logger .terror ("storage.error.alist.measure" , e = e )
122
+ return
123
+
124
+ async def express (self , hash : str , request : web .Request , response ) -> Dict [str , Any ]:
125
+ path = f"{ self .path } /{ hash [:2 ]} /{ hash } "
126
+ async with aiohttp .ClientSession (self .url , headers = self .headers ) as session :
127
+ res = await session .post ("/api/fs/get" , json = {"path" : path , "password" : self .password })
128
+ data = await res .json ()
129
+ if data ["code" ] != 200 :
130
+ response = web .HTTPNotFound ()
131
+ await response .prepare (request )
132
+ return {"bytes" : 0 , "hits" : 0 }
133
+ try :
134
+ response = web .HTTPFound (data ["raw_url" ])
135
+ response .headers ["x-bmclapi-hash" ] = hash
136
+ await response .prepare (request )
137
+ return {"bytes" : data ["size" ], "hits" : 1 }
138
+ except Exception as e :
139
+ response = web .HTTPInternalServerError (reason = e )
140
+ await response .prepare (request )
141
+ logger .debug (e )
142
+ return {"bytes" : 0 , "hits" : 0 }
143
+
144
+ async def writeFile (self , file : FileInfo , content : io .BytesIO , delay : int , retry : int ) -> bool :
145
+ file_path = f"{ self .path } /{ file .hash [:2 ]} /{ file .hash } "
146
+ async def getSize () -> int :
147
+ async with aiohttp .ClientSession (self .url , headers = self .headers ) as session :
148
+ response = await session .post ("/api/fs/get" , json = {"path" : file_path , "password" : self .password })
149
+ response .raise_for_status ()
150
+ data = await response .json ()
151
+ if data ["code" ] != 200 :
152
+ raise aiohttp .ClientResponseError (status = data ["code" ], request_info = response .request_info , history = response .history )
153
+ return data ["data" ]["size" ]
154
+
155
+ for _ in range (retry ):
156
+ try :
157
+ async with aiohttp .ClientSession (self .url , headers = {** self .headers , "File-Path" : file_path , "Content-Type" : "application/octet-stream" }) as session :
158
+ response = await session .put ("/api/fs/put" , data = content .getvalue ())
159
+ response .raise_for_status ()
160
+ data = await response .json ()
161
+ if data ["code" ] != 200 :
162
+ raise aiohttp .ClientResponseError (status = data ["code" ], request_info = response .request_info , history = response .history )
163
+ size = await getSize ()
164
+ if size == file .size :
165
+ return True
166
+ else :
167
+ logger .terror ("storage.error.alist.write_file.size_mismatch" , file = file .hash , file_size = humanize .naturalsize (file .size , binary = True ), actual_file_size = humanize .naturalsize (size , binary = True ))
168
+ return False
169
+ except Exception as e :
170
+ logger .terror ("storage.error.alist.write_file.retry" , file = file .hash , e = e , retry = delay )
171
+ await asyncio .sleep (delay )
172
+ return False
173
+
174
+ async def recycleFiles (files ):
6
175
pass
0 commit comments