Skip to content

Commit

Permalink
取消上传时间限制
Browse files Browse the repository at this point in the history
  • Loading branch information
Hsury committed Nov 30, 2019
1 parent f8df70a commit 834e466
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 15 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
if: 'tag IS blank'
env:
global:
- TRAVIS_TAG=v1.4
- TRAVIS_TAG=v1.5
jobs:
include:
-
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
<h4 align="center">☁️ 哔哩哔哩云,支持任意文件的全速上传与下载 ☁️</h4>

<p align="center">
<img src="https://img.shields.io/badge/version-2019.11.11-green.svg?longCache=true&style=for-the-badge">
<img src="https://img.shields.io/badge/version-2019.11.30-green.svg?longCache=true&style=for-the-badge">
<img src="https://img.shields.io/badge/license-SATA-blue.svg?longCache=true&style=for-the-badge">
<img src="https://img.shields.io/travis/com/Hsury/BiliDrive?style=for-the-badge">
</p>
Expand Down
26 changes: 13 additions & 13 deletions drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def image_upload(data, cookies):
'category': "daily",
}
try:
response = requests.post(url, headers=headers, cookies=cookies, files=files, timeout=10).json()
response = requests.post(url, headers=headers, cookies=cookies, files=files).json()
except:
response = None
return response
Expand Down Expand Up @@ -110,7 +110,7 @@ def read_history():
history = {}
return history

def read_in_chunks(file_name, chunk_size=16 * 1024 * 1024, chunk_number=-1):
def read_in_chunk(file_name, chunk_size=16 * 1024 * 1024, chunk_number=-1):
chunk_counter = 0
with open(file_name, "rb") as f:
while True:
Expand Down Expand Up @@ -157,7 +157,7 @@ def core(index, block):
block_sha1 = calc_sha1(block, hexdigest=True)
full_block = bmp_header(block) + block
full_block_sha1 = calc_sha1(full_block, hexdigest=True)
url = skippable(full_block_sha1)
url = is_skippable(full_block_sha1)
if url:
# log(f"分块{index} ({len(block) / 1024 / 1024:.2f} MB) 已存在于服务器")
block_dict[index] = {
Expand Down Expand Up @@ -194,7 +194,7 @@ def core(index, block):
finally:
done_flag.release()

def skippable(sha1):
def is_skippable(sha1):
url = default_url(sha1)
headers = {
'Referer': "http://t.bilibili.com/",
Expand Down Expand Up @@ -224,7 +224,7 @@ def write_history(first_4mb_sha1, meta_dict, url):
log("不支持上传文件夹")
return None
log(f"上传: {os.path.basename(file_name)} ({os.path.getsize(file_name) / 1024 / 1024:.2f} MB)")
first_4mb_sha1 = calc_sha1(read_in_chunks(file_name, chunk_size=4 * 1024 * 1024, chunk_number=1), hexdigest=True)
first_4mb_sha1 = calc_sha1(read_in_chunk(file_name, chunk_size=4 * 1024 * 1024, chunk_number=1), hexdigest=True)
history = read_history()
if first_4mb_sha1 in history:
url = history[first_4mb_sha1]['url']
Expand All @@ -242,7 +242,7 @@ def write_history(first_4mb_sha1, meta_dict, url):
terminate_flag = threading.Event()
thread_pool = []
block_dict = {}
for index, block in enumerate(read_in_chunks(file_name, chunk_size=args.block_size * 1024 * 1024)):
for index, block in enumerate(read_in_chunk(file_name, chunk_size=args.block_size * 1024 * 1024)):
if len(thread_pool) >= args.thread:
done_flag.acquire()
if not terminate_flag.is_set():
Expand All @@ -255,7 +255,7 @@ def write_history(first_4mb_sha1, meta_dict, url):
thread.join()
if terminate_flag.is_set():
return None
sha1 = calc_sha1(read_in_chunks(file_name), hexdigest=True)
sha1 = calc_sha1(read_in_chunk(file_name), hexdigest=True)
meta_dict = {
'time': int(time.time()),
'filename': os.path.basename(file_name),
Expand Down Expand Up @@ -310,7 +310,7 @@ def core(index, block_dict):
def block_offset(index):
return sum(meta_dict['block'][i]['size'] for i in range(index))

def is_overwrite(file_name):
def is_overwritable(file_name):
if args.force:
return True
else:
Expand All @@ -327,10 +327,10 @@ def is_overwrite(file_name):
log(f"线程数: {args.thread}")
download_block_list = []
if os.path.exists(file_name):
if os.path.getsize(file_name) == meta_dict['size'] and calc_sha1(read_in_chunks(file_name), hexdigest=True) == meta_dict['sha1']:
if os.path.getsize(file_name) == meta_dict['size'] and calc_sha1(read_in_chunk(file_name), hexdigest=True) == meta_dict['sha1']:
log(f"{os.path.basename(file_name)}已存在于本地, 且与服务器端文件内容一致")
return file_name
elif is_overwrite(file_name):
elif is_overwritable(file_name):
with open(file_name, "rb") as f:
for index, block_dict in enumerate(meta_dict['block']):
f.seek(block_offset(index))
Expand All @@ -344,7 +344,7 @@ def is_overwrite(file_name):
else:
return None
else:
download_block_list = list(range(len(meta_dict['block'])))
download_block_list = list(range(len(meta_dict['block'])))
done_flag = threading.Semaphore(0)
terminate_flag = threading.Event()
file_lock = threading.Lock()
Expand All @@ -365,7 +365,7 @@ def is_overwrite(file_name):
return None
f.truncate(sum(block['size'] for block in meta_dict['block']))
log(f"{os.path.basename(file_name)}下载完毕, 用时{int(time.time() - start_time)}秒, 平均速度{meta_dict['size'] / 1024 / 1024 / (time.time() - start_time):.2f} MB/s")
sha1 = calc_sha1(read_in_chunks(file_name), hexdigest=True)
sha1 = calc_sha1(read_in_chunk(file_name), hexdigest=True)
if sha1 == meta_dict['sha1']:
log(f"{os.path.basename(file_name)}校验通过")
return file_name
Expand All @@ -375,7 +375,7 @@ def is_overwrite(file_name):

if __name__ == "__main__":
signal.signal(signal.SIGINT, lambda signum, frame: os.kill(os.getpid(), 9))
parser = argparse.ArgumentParser(description="BiliDrive", epilog="By Hsury, 2019/11/11")
parser = argparse.ArgumentParser(description="BiliDrive", epilog="By Hsury, 2019/11/30")
subparsers = parser.add_subparsers()
history_parser = subparsers.add_parser("history", help="view upload history")
history_parser.set_defaults(func=history_handle)
Expand Down

0 comments on commit 834e466

Please sign in to comment.