From a7d89700e8cc4b7a34df91427ec6f7c1c2a226bb Mon Sep 17 00:00:00 2001 From: RaSan147 <34002411+RaSan147@users.noreply.github.com> Date: Sun, 5 Mar 2023 00:33:28 +0600 Subject: [PATCH] bump to v0.7.0, see change log --- CHANGELOG.MD | 42 +- README.md | 16 +- VERSION | 2 +- {src => dev_src}/clone.py | 0 dev_src/html_vid.html | 2 + dev_src/local_server_pyrobox.py | 29 +- dev_src/pyroboxCore.py | 82 +- dev_src/te-st/test_pyrobox_module.py | 6 + run_setup.py | 4 +- setup.cfg | 2 +- setup.py | 2 - src/__init__.py | 4 +- src/__main__.py | 3 +- src/pyroboxCore.py | 1722 +++++++++++++++++ src/server.py | 2575 +++++--------------------- 15 files changed, 2315 insertions(+), 2176 deletions(-) rename {src => dev_src}/clone.py (100%) create mode 100644 dev_src/te-st/test_pyrobox_module.py create mode 100644 src/pyroboxCore.py diff --git a/CHANGELOG.MD b/CHANGELOG.MD index cbd581b..04d2762 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,28 @@ +# Version 0.7.0 + ## Client-side Changes: + * Video plater page shows title of video + * Video player page predicts whether video is playable or not by Browser + ## Server-side Changes: + * Added manual password, use `--password` or `-k` flag to set password + * Using pyroboxCore as External module + * all prints are now using `logger.log|info|warn|error|debug` instead of `print` + * `TODO`: To change log level, use `--log-level` or `-l` flag + * To change log level programmatically, use `pyroboxCore.logger.setLevel(logging.DEBUG|INFO|WARN|ERROR)` + * REMOVED `pyrobox.clone` module (stil available in `dev_src` folder) + * The Entire server is now using `@SimpleHTTPRequestHandler.on_req` decorator importing from pyroboxCore to handle requests + ### check v0.6.1 for more info + * added send_file function to pyroboxCore + * improved POST request handling + + ## Fixes: + * FIXED Python 3.7 support + * Fixed Video player text issue https://github.com/RaSan147/py_httpserver_Ult/issues/70 + * Fixed zip download not working + * Fixed many bugs +------------------------------------------------------------------------ +------------------------------------------------------------------------ + + # Version 0.6.9 ## Client-side Changes: * Nothing to notice @@ -6,6 +31,9 @@ ## Fixes: * Forgot to increment version number in `server.py` +------------------------------------------------------------------------ +------------------------------------------------------------------------ + # Version 0.6.8 ## Client-side Changes: * Nothing to notice @@ -14,6 +42,8 @@ ## Fixes: * Reload_server button now works +------------------------------------------------------------------------ +------------------------------------------------------------------------ # Version 0.6.7 ## Client-side Changes: @@ -23,6 +53,8 @@ ## Fixes: * Added requests in requirements +------------------------------------------------------------------------ +------------------------------------------------------------------------ # Version 0.6.6 ## Client-side Changes: @@ -32,6 +64,8 @@ ## Fixes: * Fixed where files had "/" at the end of their name +------------------------------------------------------------------------ +------------------------------------------------------------------------ # Version 0.6.5 ## Client-side Changes: @@ -45,9 +79,8 @@ ## Fixes: * Nothing to notice since 0.6.4 - - ------------------------------------------------------------------------ +------------------------------------------------------------------------ +------------------------------------------------------------------------ # Version 0.6.4 @@ -60,6 +93,7 @@ ------------------------------------------------------------------------ +------------------------------------------------------------------------ # Version 0.6.3 @@ -71,6 +105,7 @@ * fixed log_message not working +------------------------------------------------------------------------ ------------------------------------------------------------------------ # Version 0.6.2 @@ -82,6 +117,7 @@ * fixed upload form not working due to missing `action="?upload"` which is required for POST requests +------------------------------------------------------------------------ ------------------------------------------------------------------------ # Version 0.6.1 diff --git a/README.md b/README.md index fe157d7..ee15f57 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,9 @@ And also for the same reason, I'm punching too many updates (acually bug fixes) """ # pyrobox 🔥 -` Note ` UPLOAD PASSWORD: `SECret` +**`Note :`** DEFAULT UPLOAD PASSWORD: `SECret` +* you can change it by editing the code (see `config` class at top) +* to set password from command line, use `-k` or `--password` flag # Status [![Downloads](https://static.pepy.tech/badge/pyrobox)](https://pepy.tech/project/pyrobox) @@ -50,6 +52,7 @@ Server side requirement # Installation ---------------------------------------------------------------- * **Install Python 3.7 or higher** + * **Close older pyrobox process if already running** 1. Install using PIP ### On Windows: * Open `CMD` or `PowerShell` @@ -61,6 +64,7 @@ Server side requirement * Run `pip3 install pyrobox` * Run `pyrobox` to launch the server + CHECK [FAQ](#faq) FOR FUTURE HELP AND ISSUE FIX @@ -81,7 +85,14 @@ CHECK [FAQ](#faq) FOR FUTURE HELP AND ISSUE FIX - `pyrobox -d . 45678` # will run on port 45678 in current directory 1. To specify alternate bind address - - Add bind add `-bind {address}` # idk if it works and how it works, but it's there + - Add bind add `-bind {address}` # ie: `-bind 127.0.0.2` or `-bind 127.0.0.99` + +1. To change upload password + - i) or add `-k` or `--password` command line argument when launching the program + - `pyrobox -k "my new password"` to launch the server with new password + - `pyrobox -k ""` to launch the server without password + - `pyrobox` to launch the server with default password (SECret) + - ii) just edit the code for permanent change (see `config` class at top) TODO: -------------------------------------------------------------- @@ -91,7 +102,6 @@ CHECK [FAQ](#faq) FOR FUTURE HELP AND ISSUE FIX * https://github.com/RaSan147/py_httpserver_Ult/issues/36 Add side bar to do something 🤔 * check output ip and port accuracy on multiple os * https://github.com/RaSan147/py_httpserver_Ult/issues/37 Backup code if Reload causes unhandled issue and can't be accessed -* https://github.com/RaSan147/py_httpserver_Ult/issues/38 command line arg for passwords (vulnerable on reload) * https://github.com/RaSan147/py_httpserver_Ult/issues/39 User login and user based permission set. 🔑 # Support for more features diff --git a/VERSION b/VERSION index 1a5ac0d..faef31a 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.6.9 +0.7.0 diff --git a/src/clone.py b/dev_src/clone.py similarity index 100% rename from src/clone.py rename to dev_src/clone.py diff --git a/dev_src/html_vid.html b/dev_src/html_vid.html index 1413fdd..71b8e37 100644 --- a/dev_src/html_vid.html +++ b/dev_src/html_vid.html @@ -1,6 +1,8 @@ +

Watching: ${PY_FILE_NAME}

+

${PY_UNSUPPORT_WARNING}

diff --git a/dev_src/local_server_pyrobox.py b/dev_src/local_server_pyrobox.py index d863bcc..6ee49b3 100644 --- a/dev_src/local_server_pyrobox.py +++ b/dev_src/local_server_pyrobox.py @@ -38,7 +38,7 @@ false = F = False -config.parser.add_argument('--password', '-k', +config.parser.add_argument('--password', '-k', default=config.PASSWORD, type=str, help='Upload Password (default: %(default)s)') @@ -439,14 +439,21 @@ def get_dir_m_time(path): -def get_titles(path): +def get_titles(path, file=False): + """Make titles for the header directory + path: the path of the file or directory + file: if True, path is a file, else it's a directory + + output: `Viewing NAME`""" paths = path.split('/') + if file: + return 'Viewing ' + paths[-1] if paths[-2]=='': return 'Viewing 🏠 HOME' else: return 'Viewing ' + paths[-2] - + def dir_navigator(path): @@ -637,7 +644,7 @@ def list_directory(self:SH, path): encoded = '\n'.join(r).encode(enc, 'surrogateescape') - + return self.send_txt(HTTPStatus.OK, encoded) @@ -1089,7 +1096,7 @@ def send_video_page(self: SH, *args, **kwargs): - title = get_titles(displaypath) + title = get_titles(displaypath, file=True) r.append(directory_explorer_header().safe_substitute(PY_PAGE_TITLE=title, PY_PUBLIC_URL=config.address(), @@ -1101,8 +1108,9 @@ def send_video_page(self: SH, *args, **kwargs): if ctype not in ['video/mp4', 'video/ogg', 'video/webm']: warning = ('

It seems HTML player may not be able to play this Video format, Try Downloading

') - + r.append(_video_script().safe_substitute(PY_VID_SOURCE=vid_source, + PY_FILE_NAME = displaypath.split("/")[-1], PY_CTYPE=ctype, PY_UNSUPPORT_WARNING=warning)) @@ -1241,7 +1249,7 @@ def upload(self: SH, *args, **kwargs): # PASSWORD SYSTEM password = post.get_part(verify_name='password', decode=T)[1] - + self.log_debug(f'post password: {[password]} by client') if password != config.PASSWORD: # readline returns password with \r\n at end self.log_info(f"Incorrect password by {uid}") @@ -1627,8 +1635,9 @@ def default_post(self: SH, *args, **kwargs): -def main(): - run_server(handler=SH) + +# proxy for old versions +run = run_server if __name__ == '__main__': - main() + run() diff --git a/dev_src/pyroboxCore.py b/dev_src/pyroboxCore.py index 84ce9d0..3d8e579 100644 --- a/dev_src/pyroboxCore.py +++ b/dev_src/pyroboxCore.py @@ -1,4 +1,4 @@ -__version__ = "0.6.5" +__version__ = "0.7.0" enc = "utf-8" __all__ = [ "HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler", @@ -15,7 +15,7 @@ import argparse -logging.basicConfig(level=logging.DEBUG, format='%(levelname)s: %(message)s') +logging.basicConfig(level=logging.DEBUG, format='%(levelname)s: \n%(message)s') logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) @@ -37,9 +37,10 @@ def __init__(self): self.IP = None # will be assigned by checking # DEFAULT PORT TO LAUNCH SERVER - self.port= 45454 # DEFAULT PORT TO LAUNCH SERVER + self.port= 6969 # DEFAULT PORT TO LAUNCH SERVER # UPLOAD PASSWORD SO THAT ANYONE RANDOM CAN'T UPLOAD + # CAN BE CHANGED BY USING --password NEW_PASSWORD self.PASSWORD= "SECret" # LOGGING @@ -64,7 +65,7 @@ def __init__(self): # RUNNING SERVER STATS self.ftp_dir = self.get_default_dir() - self.dev_mode = True + self.dev_mode = False self.ASSETS = False # if you want to use assets folder, set this to True self.ASSETS_dir = os.path.join(self.MAIN_FILE_dir, "/../assets/") self.reload = False @@ -232,11 +233,11 @@ def __call__(self, *key): def reload_server(): """reload the server process from file""" file = config.MAIN_FILE - print("Reloading...") - # print(sys.executable, config.MAIN_FILE, *sys.argv[1:]) + logger.info("Reloading...\n"+ + " ".join( + ["RE-RUNNING: ", sys.executable, sys.executable, file, *sys.argv[1:]] + )) try: - logger.debug(" ".join(["RE-RUNNING: ", sys.executable, sys.executable, file, *sys.argv[1:]])) - os.execl(sys.executable, sys.executable, file, *sys.argv[1:]) except: traceback.print_exc() @@ -639,14 +640,16 @@ def handle_one_request(self): _w = tools.term_width() w = _w - len(str(self.req_hash)) -2 w = w//2 - print('='*w + f' {self.req_hash} ' + '='*w) - print( f'{self.req_hash}|=>\t request\t: {self.command}', - f'{self.req_hash}|=>\t url \t: {url_path}', - f'{self.req_hash}|=>\t query \t: {query}', - f'{self.req_hash}|=>\t fragment\t: {fragment}', - f'{self.req_hash}|=>\t full url \t: {self.path}', - sep=f'\n') - print('+'*w + f' {self.req_hash} ' + '+'*w) + logger.info('='*w + f' {self.req_hash} ' + '='*w + '\n'+ + '\n'.join( + [f'{self.req_hash}|=>\t request\t: {self.command}', + f'{self.req_hash}|=>\t url \t: {url_path}', + f'{self.req_hash}|=>\t query \t: {query}', + f'{self.req_hash}|=>\t fragment\t: {fragment}', + f'{self.req_hash}|=>\t full url \t: {self.path}', + ]) + '\n'+ + '+'*w + f' {self.req_hash} ' + '+'*w + ) @@ -656,8 +659,9 @@ def handle_one_request(self): except Exception: traceback.print_exc() - print('-'*w + f' {self.req_hash} ' + '-'*w) - print('#'*_w) + logger.info('-'*w + f' {self.req_hash} ' + '-'*w + '\n'+ + '#'*_w + ) self.wfile.flush() #actually send the response if not already done. except (TimeoutError, socket.timeout) as e: #a read or a write timed out. Discard this connection @@ -789,7 +793,7 @@ def log_request(self, code='-', size='-'): self.log_message(f'"{self.requestline}"', code, size) def log_error(self, *args): - """Log an error. + """Log an error. [ERROR PRIORITY] This is called when a request cannot be fulfilled. By default it passes the message on to log_message(). @@ -802,16 +806,16 @@ def log_error(self, *args): self.log_message(args, error = True) def log_warning(self, *args): - """Log a warning""" + """Log a warning message [HIGH PRIORITY]""" self.log_message(args, warning = True) def log_debug(self, *args, write = True): - """Log a debug message""" + """Log a debug message [LOWEST PRIORITY]""" self.log_message(args, debug = True, write = write) - def log_info(self, *args): - """Default log""" - self.log_message(args) + def log_info(self, *args, write = False): + """Default log message [MEDIUM PRIORITY]""" + self.log_message(args, write = write) def _log_writer(self, message): os.makedirs(config.log_location, exist_ok=True) @@ -971,7 +975,7 @@ def do_GET(self): try: self.copyfile(f, self.wfile) except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as e: - print(tools.text_box(e.__class__.__name__, e,"\nby ", self.address_string())) + self.log_info(tools.text_box(e.__class__.__name__, e,"\nby ", self.address_string())) finally: f.close() @@ -1030,10 +1034,6 @@ def test_req(self, url='', hasQ=(), QV={}, fragent='', url_regex=''): ''' - # print("^"+url, hasQ, QV, fragent) - # print(self.url_path, self.query, self.fragment) - # print(self.url_path != url, self.query(*hasQ), self.query, self.fragment != fragent) - if url_regex: if not re.search("^"+url_regex+'$', self.url_path): return False elif url and url!=self.url_path: return False @@ -1074,7 +1074,6 @@ def do_POST(self): url_path, query, fragment = self.url_path, self.query, self.fragment spathsplit = self.url_path.split("/") - # print(f'url: {url_path}\nquery: {query}\nfragment: {fragment}') try: for case, func in self.handlers['POST']: @@ -1089,7 +1088,7 @@ def do_POST(self): try: self.copyfile(f, self.wfile) except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as e: - print(tools.text_box(e.__class__.__name__, e,"\nby ", self.address_string())) + logger.info(tools.text_box(e.__class__.__name__, e,"\nby ", [self.address_string()])) finally: f.close() return @@ -1099,7 +1098,7 @@ def do_POST(self): return self.send_error(HTTPStatus.BAD_REQUEST, "Invalid request.") except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as e: - print(tools.text_box(e.__class__.__name__, e,"\nby ", [self.address_string()])) + logger.info(tools.text_box(e.__class__.__name__, e,"\nby ", [self.address_string()])) return except Exception as e: traceback.print_exc() @@ -1113,10 +1112,10 @@ def redirect(self, location): self.send_header("Location", location) self.end_headers() - def return_txt(self, code, msg, content_type="text/html; charset=utf-8", write_log=False): + def return_txt(self, code, msg, content_type="text/html; charset=utf-8"): '''returns only the head to client and returns a file object to be used by copyfile''' - self.log_debug(f'[RETURNED] {code} {msg} to client', write=write_log) + self.log_debug(f'[RETURNED] {code} to client') if not isinstance(msg, bytes): encoded = msg.encode('utf-8', 'surrogateescape') else: @@ -1132,9 +1131,9 @@ def return_txt(self, code, msg, content_type="text/html; charset=utf-8", write_l self.end_headers() return f - def send_txt(self, code, msg, content_type="text/html; charset=utf-8", write_log=False): + def send_txt(self, code, msg, content_type="text/html; charset=utf-8"): '''sends the head and file to client''' - f = self.return_txt(code, msg, content_type, write_log) + f = self.return_txt(code, msg, content_type) if self.command == "HEAD": return # to avoid sending file on get request self.copyfile(f, self.wfile) @@ -1379,7 +1378,6 @@ def copyfile(self, source, outputfile): source.read(1) except: traceback.print_exc() - print(source) source.seek(0) shutil.copyfileobj(source, outputfile) @@ -1633,7 +1631,7 @@ def test(HandlerClass=BaseHTTPRequestHandler, on_network = local_ip!=(device_ip) - print(tools.text_box( + logger.info(tools.text_box( f"Serving HTTP on {host} port {port} \n", #TODO: need to check since the output is "Serving HTTP on :: port 6969" f"(http://{url_host}:{port}/) ...\n", #TODO: need to check since the output is "(http://[::]:6969/) ..." f"Server is probably running on\n", @@ -1645,10 +1643,10 @@ def test(HandlerClass=BaseHTTPRequestHandler, try: httpd.serve_forever(poll_interval=0.1) except KeyboardInterrupt: - print("\nKeyboard interrupt received, exiting.") + logger.info("\nKeyboard interrupt received, exiting.") except OSError: - print("\nOSError received, exiting.") + logger.info("\nOSError received, exiting.") finally: if not config.reload: sys.exit(0) @@ -1684,11 +1682,11 @@ def run(port = None, directory = None, bind = None, arg_parse= True, handler = S - print(tools.text_box("Running pyroboxCore: ", config.MAIN_FILE, "Version: ", __version__)) + logger.info(tools.text_box("Running pyroboxCore: ", config.MAIN_FILE, "Version: ", __version__)) if directory == config.ftp_dir and not os.path.isdir(config.ftp_dir): - print(config.ftp_dir, "not found!\nReseting directory to current directory") + logger.warning(config.ftp_dir, "not found!\nReseting directory to current directory") directory = "." handler_class = partial(handler, diff --git a/dev_src/te-st/test_pyrobox_module.py b/dev_src/te-st/test_pyrobox_module.py new file mode 100644 index 0000000..e16ab80 --- /dev/null +++ b/dev_src/te-st/test_pyrobox_module.py @@ -0,0 +1,6 @@ +import pyrobox + +print(dir(pyrobox)) + +print(pyrobox.__file__) +pyrobox.server.run_server(port=999) diff --git a/run_setup.py b/run_setup.py index cbd6085..d531b11 100644 --- a/run_setup.py +++ b/run_setup.py @@ -8,8 +8,8 @@ with open('VERSION', 'r') as f: version = f.read().strip() -os.system(f"pip uninstall pyrobox") -os.system(f"pip install --user ./dist/pyrobox-{version}.tar.gz") +os.system(f"pip uninstall pyrobox -y") +os.system(f"pip install ./dist/pyrobox-{version}.tar.gz") os.system("pyrobox 45454") # post to pypi diff --git a/setup.cfg b/setup.cfg index 11788ca..2b916b8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = pyrobox -version = 0.6.9 +version = 0.7.0 authors = Rasan author_email= wwwqweasd147@gmail.com description = Personal DropBox for Private Network diff --git a/setup.py b/setup.py index 0ae38c4..c3205aa 100644 --- a/setup.py +++ b/setup.py @@ -5,11 +5,9 @@ install_requires=[ 'natsort', "send2trash", - "requests", ], entry_points=''' [console_scripts] pyrobox=pyrobox:server.run - # pyrobox-clone=pyrobox:clone ''', ) \ No newline at end of file diff --git a/src/__init__.py b/src/__init__.py index 7ec468d..c18108d 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -1,4 +1,4 @@ -__all__ = ["server", "clone"] +__all__ = ["server", "pyroboxCore"] from . import server -from . import clone \ No newline at end of file +from . import pyroboxCore \ No newline at end of file diff --git a/src/__main__.py b/src/__main__.py index a1824c4..472d97d 100644 --- a/src/__main__.py +++ b/src/__main__.py @@ -1,2 +1 @@ -from . import server -from . import clone \ No newline at end of file +from . import server \ No newline at end of file diff --git a/src/pyroboxCore.py b/src/pyroboxCore.py new file mode 100644 index 0000000..8461d6d --- /dev/null +++ b/src/pyroboxCore.py @@ -0,0 +1,1722 @@ +__version__ = "0.7.0" +enc = "utf-8" +__all__ = [ + "HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler", + "SimpleHTTPRequestHandler", + +] + +import os +import atexit +import logging +from queue import Queue +from typing import Union + +import argparse + + +logging.basicConfig(level=logging.INFO, format='%(levelname)s: \n%(message)s') + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) +# set INFO to see all the requests +# set WARNING to see only the requests that made change to the server +# set ERROR to see only the requests that made the errors + + + +endl = "\n" +T = t = true = True # too lazy to type +F = f = false = False # too lazy to type + +class Config: + def __init__(self): + # DEFAULT DIRECTORY TO LAUNCH SERVER + self.ftp_dir = "." # DEFAULT DIRECTORY TO LAUNCH SERVER + + self.IP = None # will be assigned by checking + + # DEFAULT PORT TO LAUNCH SERVER + self.port= 6969 # DEFAULT PORT TO LAUNCH SERVER + + # UPLOAD PASSWORD SO THAT ANYONE RANDOM CAN'T UPLOAD + # CAN BE CHANGED BY USING --password NEW_PASSWORD + self.PASSWORD= "SECret" + + # LOGGING + self.log_location = "./" # fallback log_location = "./" + self.allow_web_log = True # if you want to see some important LOG in browser, may contain your important information + self.write_log = False # if you want to write log to file + + # ZIP FEATURES + self.default_zip = "zipfile" # or "zipfile" to use python built in zip module + + # CHECK FOR MISSING REQUEIREMENTS + self.run_req_check = True + + # FILE INFO + self.MAIN_FILE = os.path.realpath(__file__) + self.MAIN_FILE_dir = os.path.dirname(self.MAIN_FILE) + + + # OS DETECTION + self.OS = self.get_os() + + + # RUNNING SERVER STATS + self.ftp_dir = self.get_default_dir() + self.dev_mode = False + self.ASSETS = False # if you want to use assets folder, set this to True + self.ASSETS_dir = os.path.join(self.MAIN_FILE_dir, "/../assets/") + self.reload = False + + + self.disabled_func = { + "reload": False, + } + + # TEMP FILE MAPPING + self.temp_file = set() + + # CLEAN TEMP FILES ON EXIT + atexit.register(self.clear_temp) + + + # ASSET MAPPING + self.file_list = {} + + # COMMANDLINE ARGUMENTS PARSER + self.parser = argparse.ArgumentParser(add_help=False) + + + def clear_temp(self): + for i in self.temp_file: + try: + os.remove(i) + except: + pass + + + + def get_os(self): + from platform import system as platform_system + + out = platform_system() + if out=="Linux": + if hasattr(sys, 'getandroidapilevel'): + #self.IP = "192.168.43.1" + return 'Android' + + return out + + def get_default_dir(self): + return './' + + + def address(self): + return "http://%s:%i"%(self.IP, self.port) + + def parse_default_args(self, port = None, directory = None, bind = None, ): + if port is None: + port = self.port + if directory is None: + directory = self.ftp_dir + if bind is None: + bind = None + + parser = self.parser + + parser.add_argument('--bind', '-b', + metavar='ADDRESS', default=bind, + help='Specify alternate bind address ' + '[default: all interfaces]') + parser.add_argument('--directory', '-d', default=directory, + help='Specify alternative directory ' + '[default: current directory]') + parser.add_argument('port', action='store', + default=port, type=int, + nargs='?', + help='Specify alternate port [default: 8000]') + parser.add_argument('--version', '-v', action='version', + version=__version__) + + self.parser.add_argument('-h', '--help', action='help', + default='==SUPPRESS==', + help=('show this help message and exit')) + + args = parser.parse_known_args()[0] + + return args + + + + + + + + + +import datetime +import email.utils +import html +import http.client +import io +import mimetypes +import posixpath +import shutil +import socket # For gethostbyaddr() +import socketserver +import sys +import time +import urllib.parse +import urllib.request +import contextlib +from functools import partial +from http import HTTPStatus + +import re +import base64 + +import random +import string +import json +import traceback + + + + +class Tools: + def __init__(self): + self.styles = { + "equal" : "=", + "star" : "*", + "hash" : "#", + "dash" : "-", + "udash": "_" + } + + def term_width(self): + return shutil.get_terminal_size()[0] + + def text_box(self, *text, style = "equal", sep=" "): + """ + Returns a string of text with a border around it. + """ + text = sep.join(map(str, text)) + term_col = shutil.get_terminal_size()[0] + + s = self.styles[style] if style in self.styles else style + tt = "" + for i in text.split('\n'): + tt += i.center(term_col) + '\n' + return (f"\n\n{s*term_col}\n{tt}{s*term_col}\n\n") + + def random_string(self, length=10): + letters = string.ascii_lowercase + return ''.join(random.choice(letters) for i in range(length)) + +tools = Tools() +config = Config() + + +class Callable_dict(dict): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.__dict__ = self + + def __call__(self, *key): + return all([i in self for i in key]) + + + + +def reload_server(): + """reload the server process from file""" + file = config.MAIN_FILE + logger.info("Reloading...\n"+ + " ".join( + ["RE-RUNNING: ", sys.executable, sys.executable, file, *sys.argv[1:]] + )) + try: + os.execl(sys.executable, sys.executable, file, *sys.argv[1:]) + except: + traceback.print_exc() + sys.exit(0) + +def null(*args, **kwargs): + pass + + + + +class Zfunc(object): + """Thread safe sequncial printing/queue task handler class""" + + __all__ = ["new", "update"] + def __init__(self, caller, store_return=False): + super().__init__() + + self.queue = Queue() + # stores [args, kwargs], ... + self.store_return = store_return + self.returner = Queue() + # queue to store return value if store_return enabled + + self.BUSY = False + + self.caller = caller + + def next(self): + """ check if any item in queje and call, if already running or queue empty, returns """ + if self.queue.empty() or self.BUSY: + return None + + self.BUSY = True + args, kwargs = self.queue.get() + + x = self.caller(*args, **kwargs) + if self.store_return: + self.returner.put(x) + + self.BUSY = False + + if not self.queue.empty(): + # will make the loop continue running + return True + + + def update(self, *args, **kwargs): + """ Uses xprint and parse string""" + + self.queue.put((args, kwargs)) + while self.next() is True: + # use while instead of recursion to avoid recursion to avoid recursion to avoid recursion to avoid recursion to avoid recursion to avoid recursion to avoid recursion.... error + pass + + + + def new(self, caller, store_return=False): + self.__init__(caller=caller, store_return=store_return) + + + + +"""HTTP server classes. + +Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see +SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, +and CGIHTTPRequestHandler for CGI scripts. + +It does, however, optionally implement HTTP/1.1 persistent connections, +as of version 0.3. + +XXX To do: + +- log requests even later (to capture byte count) +- log user-agent header and other interesting goodies +- send error log to separate file +""" + + + + +############################################## +# PAUSE AND RESUME FEATURE # +############################################## + +def copy_byte_range(infile, outfile, start=None, stop=None, bufsize=16*1024): + ''' + TO SUPPORT PAUSE AND RESUME FEATURE + Like shutil.copyfileobj, but only copy a range of the streams. + Both start and stop are inclusive. + ''' + if start is not None: infile.seek(start) + while 1: + to_read = min(bufsize, stop + 1 - infile.tell() if stop else bufsize) + buf = infile.read(to_read) + if not buf: + break + outfile.write(buf) + + +BYTE_RANGE_RE = re.compile(r'bytes=(\d+)-(\d+)?$') +def parse_byte_range(byte_range): + '''Returns the two numbers in 'bytes=123-456' or throws ValueError. + The last number or both numbers may be None. + ''' + if byte_range.strip() == '': + return None, None + + m = BYTE_RANGE_RE.match(byte_range) + if not m: + raise ValueError('Invalid byte range %s' % byte_range) + + #first, last = [x and int(x) for x in m.groups()] # + + first, last = map((lambda x: int(x) if x else None), m.groups()) + + if last and last < first: + raise ValueError('Invalid byte range %s' % byte_range) + return first, last + +#---------------------------x-------------------------------- + + + + +def URL_MANAGER(url:str): + """ + returns a tuple of (`path`, `query_dict`, `fragment`)\n + + `url` = `'/store?page=10&limit=15&price=ASC#dskjfhs'`\n + `path` = `'/store'`\n + `query_dict` = `{'page': ['10'], 'limit': ['15'], 'price': ['ASC']}`\n + `fragment` = `dskjfhs`\n + """ + + # url = '/store?page=10&limit=15&price#dskjfhs' + parse_result = urllib.parse.urlparse(url) + + + dict_result = Callable_dict(urllib.parse.parse_qs(parse_result.query, keep_blank_values=True)) + + return (parse_result.path, dict_result, parse_result.fragment) + + + +# Default error message template +DEFAULT_ERROR_MESSAGE = """ + + + + + + Error response + + +

Error response

+

Error code: %(code)d

+

Message: %(message)s.

+

Error code explanation: %(code)s - %(explain)s.

+ + +""" + +DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8" + +class HTTPServer(socketserver.TCPServer): + + allow_reuse_address = True # Seems to make sense in testing environment + + def server_bind(self): + """Override server_bind to store the server name.""" + socketserver.TCPServer.server_bind(self) + host, port = self.server_address[:2] + self.server_name = socket.getfqdn(host) + self.server_port = port + + +class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer): + daemon_threads = True + + +class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): + + """HTTP request handler base class. + + The various request details are stored in instance variables: + + - client_address is the client IP address in the form (host, + port); + + - command, path and version are the broken-down request line; + + - headers is an instance of email.message.Message (or a derived + class) containing the header information; + + - rfile is a file object open for reading positioned at the + start of the optional input data part; + + - wfile is a file object open for writing. + + IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! + + The first thing to be written must be the response line. Then + follow 0 or more header lines, then a blank line, and then the + actual data (if any). The meaning of the header lines depends on + the command executed by the server; in most cases, when data is + returned, there should be at least one header line of the form + + Content-type: / + + where and should be registered MIME types, + e.g. "text/html" or "text/plain". + + """ + + # The Python system version, truncated to its first component. + sys_version = "Python/" + sys.version.split()[0] + + # The server software version. You may want to override this. + # The format is multiple whitespace-separated strings, + # where each string is of the form name[/version]. + server_version = "BaseHTTP/" + __version__ + + error_message_format = DEFAULT_ERROR_MESSAGE + error_content_type = DEFAULT_ERROR_CONTENT_TYPE + + # The default request version. This only affects responses up until + # the point where the request line is parsed, so it mainly decides what + # the client gets back when sending a malformed request line. + # Most web servers default to HTTP 0.9, i.e. don't send a status line. + default_request_version = "HTTP/0.9" + + def parse_request(self): + """Parse a request (internal). + + The request should be stored in self.raw_requestline; the results + are in self.command, self.path, self.request_version and + self.headers. + + Return True for success, False for failure; on failure, any relevant + error response has already been sent back. + + """ + self.command = '' # set in case of error on the first line + self.request_version = version = self.default_request_version + self.close_connection = True + requestline = str(self.raw_requestline, 'iso-8859-1') + requestline = requestline.rstrip('\r\n') + self.requestline = requestline + words = requestline.split() + if len(words) == 0: + return False + + if len(words) >= 3: # Enough to determine protocol version + version = words[-1] + try: + if not version.startswith('HTTP/'): + raise ValueError + base_version_number = version.split('/', 1)[1] + version_number = base_version_number.split(".") + # RFC 2145 section 3.1 says there can be only one "." and + # - major and minor numbers MUST be treated as + # separate integers; + # - HTTP/2.4 is a lower version than HTTP/2.13, which in + # turn is lower than HTTP/12.3; + # - Leading zeros MUST be ignored by recipients. + if len(version_number) != 2: + raise ValueError + version_number = int(version_number[0]), int(version_number[1]) + except (ValueError, IndexError): + self.send_error( + HTTPStatus.BAD_REQUEST, + "Bad request version (%r)" % version) + return False + if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": + self.close_connection = False + if version_number >= (2, 0): + self.send_error( + HTTPStatus.HTTP_VERSION_NOT_SUPPORTED, + "Invalid HTTP version (%s)" % base_version_number) + return False + self.request_version = version + + if not 2 <= len(words) <= 3: + self.send_error( + HTTPStatus.BAD_REQUEST, + "Bad request syntax (%r)" % requestline) + return False + command, path = words[:2] + if len(words) == 2: + self.close_connection = True + if command != 'GET': + self.send_error( + HTTPStatus.BAD_REQUEST, + "Bad HTTP/0.9 request type (%r)" % command) + return False + self.command, self.path = command, path + + + # gh-87389: The purpose of replacing '//' with '/' is to protect + # against open redirect attacks possibly triggered if the path starts + # with '//' because http clients treat //path as an absolute URI + # without scheme (similar to http://path) rather than a path. + if self.path.startswith('//'): + self.path = '/' + self.path.lstrip('/') # Reduce to a single / + + # Examine the headers and look for a Connection directive. + try: + self.headers = http.client.parse_headers(self.rfile, + _class=self.MessageClass) + except http.client.LineTooLong as err: + self.send_error( + HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE, + "Line too long", + str(err)) + return False + except http.client.HTTPException as err: + self.send_error( + HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE, + "Too many headers", + str(err) + ) + return False + + conntype = self.headers.get('Connection', "") + if conntype.lower() == 'close': + self.close_connection = True + elif (conntype.lower() == 'keep-alive' and + self.protocol_version >= "HTTP/1.1"): + self.close_connection = False + # Examine the headers and look for an Expect directive + expect = self.headers.get('Expect', "") + if (expect.lower() == "100-continue" and + self.protocol_version >= "HTTP/1.1" and + self.request_version >= "HTTP/1.1"): + if not self.handle_expect_100(): + return False + return True + + def handle_expect_100(self): + """Decide what to do with an "Expect: 100-continue" header. + + If the client is expecting a 100 Continue response, we must + respond with either a 100 Continue or a final response before + waiting for the request body. The default is to always respond + with a 100 Continue. You can behave differently (for example, + reject unauthorized requests) by overriding this method. + + This method should either return True (possibly after sending + a 100 Continue response) or send an error response and return + False. + + """ + self.send_response_only(HTTPStatus.CONTINUE) + self.end_headers() + return True + + def handle_one_request(self): + """Handle a single HTTP request. + + You normally don't need to override this method; see the class + __doc__ string for information on how to handle specific HTTP + commands such as GET and POST. + + """ + try: + self.raw_requestline = self.rfile.readline(65537) + if len(self.raw_requestline) > 65536: + self.requestline = '' + self.request_version = '' + self.command = '' + self.send_error(HTTPStatus.REQUEST_URI_TOO_LONG) + return + if not self.raw_requestline: + self.close_connection = True + return + if not self.parse_request(): + # An error code has been sent, just exit + return + mname = 'do_' + self.command + if not hasattr(self, mname): + self.send_error( + HTTPStatus.NOT_IMPLEMENTED, + "Unsupported method (%r)" % self.command) + return + method = getattr(self, mname) + + url_path, query, fragment = URL_MANAGER(self.path) + self.url_path = url_path + self.query = query + self.fragment = fragment + + self.use_range = False + + + _hash = abs(hash((self.raw_requestline, tools.random_string(10)))) + self.req_hash = base64.b64encode(str(_hash).encode('ascii')).decode()[:10] + + _w = tools.term_width() + w = _w - len(str(self.req_hash)) -2 + w = w//2 + logger.info('='*w + f' {self.req_hash} ' + '='*w + '\n'+ + '\n'.join( + [f'{self.req_hash}|=>\t request\t: {self.command}', + f'{self.req_hash}|=>\t url \t: {url_path}', + f'{self.req_hash}|=>\t query \t: {query}', + f'{self.req_hash}|=>\t fragment\t: {fragment}', + f'{self.req_hash}|=>\t full url \t: {self.path}', + ]) + '\n'+ + '+'*w + f' {self.req_hash} ' + '+'*w + ) + + + + + try: + method() + except Exception: + traceback.print_exc() + + logger.info('-'*w + f' {self.req_hash} ' + '-'*w + '\n'+ + '#'*_w + ) + self.wfile.flush() #actually send the response if not already done. + except (TimeoutError, socket.timeout) as e: + #a read or a write timed out. Discard this connection + self.log_error("Request timed out:", e) + self.close_connection = True + return + + def handle(self): + """Handle multiple requests if necessary.""" + self.close_connection = True + + self.handle_one_request() + while not self.close_connection: + self.handle_one_request() + + def send_error(self, code, message=None, explain=None): + """Send and log an error reply. + + Arguments are + * code: an HTTP error code + 3 digits + * message: a simple optional 1 line reason phrase. + *( HTAB / SP / VCHAR / %x80-FF ) + defaults to short entry matching the response code + * explain: a detailed message defaults to the long entry + matching the response code. + + This sends an error response (so it must be called before any + output has been generated), logs the error, and finally sends + a piece of HTML explaining the error to the user. + + """ + + try: + shortmsg, longmsg = self.responses[code] + except KeyError: + shortmsg, longmsg = '???', '???' + if message is None: + message = shortmsg + if explain is None: + explain = longmsg + self.log_error("code", code, "message", message) + self.send_response(code, message) + self.send_header('Connection', 'close') + + # Message body is omitted for cases described in: + # - RFC7230: 3.3. 1xx, 204(No Content), 304(Not Modified) + # - RFC7231: 6.3.6. 205(Reset Content) + body = None + if (code >= 200 and + code not in (HTTPStatus.NO_CONTENT, + HTTPStatus.RESET_CONTENT, + HTTPStatus.NOT_MODIFIED)): + # HTML encode to prevent Cross Site Scripting attacks + # (see bug #1100201) + content = (self.error_message_format % { + 'code': code, + 'message': html.escape(message, quote=False), + 'explain': html.escape(explain, quote=False) + }) + body = content.encode('UTF-8', 'replace') + self.send_header("Content-Type", self.error_content_type) + self.send_header('Content-Length', str(len(body))) + self.end_headers() + + if self.command != 'HEAD' and body: + self.wfile.write(body) + + def send_response(self, code, message=None): + """Add the response header to the headers buffer and log the + response code. + + Also send two standard headers with the server software + version and the current date. + + """ + self.log_request(code) + self.send_response_only(code, message) + self.send_header('Server', self.version_string()) + self.send_header('Date', self.date_time_string()) + + def send_response_only(self, code, message=None): + """Send the response header only.""" + if self.request_version != 'HTTP/0.9': + if message is None: + if code in self.responses: + message = self.responses[code][0] + else: + message = '' + if not hasattr(self, '_headers_buffer'): + self._headers_buffer = [] + self._headers_buffer.append(("%s %d %s\r\n" % + (self.protocol_version, code, message)).encode( + 'utf-8', 'strict')) + + def send_header(self, keyword, value): + """Send a MIME header to the headers buffer.""" + if self.request_version != 'HTTP/0.9': + if not hasattr(self, '_headers_buffer'): + self._headers_buffer = [] + self._headers_buffer.append( + ("%s: %s\r\n" % (keyword, value)).encode('utf-8', 'strict')) + + if keyword.lower() == 'connection': + if value.lower() == 'close': + self.close_connection = True + elif value.lower() == 'keep-alive': + self.close_connection = False + + def end_headers(self): + """Send the blank line ending the MIME headers.""" + if self.request_version != 'HTTP/0.9': + self._headers_buffer.append(b"\r\n") + self.flush_headers() + + def flush_headers(self): + if hasattr(self, '_headers_buffer'): + self.wfile.write(b"".join(self._headers_buffer)) + self._headers_buffer = [] + + def log_request(self, code='-', size='-'): + """Log an accepted request. + + This is called by send_response(). + + """ + if isinstance(code, HTTPStatus): + code = code.value + self.log_message(f'"{self.requestline}"', code, size) + + def log_error(self, *args): + """Log an error. [ERROR PRIORITY] + + This is called when a request cannot be fulfilled. By + default it passes the message on to log_message(). + + Arguments are the same as for log_message(). + + XXX This should go to the separate error log. + + """ + self.log_message(args, error = True) + + def log_warning(self, *args): + """Log a warning message [HIGH PRIORITY]""" + self.log_message(args, warning = True) + + def log_debug(self, *args, write = True): + """Log a debug message [LOWEST PRIORITY]""" + self.log_message(args, debug = True, write = write) + + def log_info(self, *args, write = False): + """Default log message [MEDIUM PRIORITY]""" + self.log_message(args, write = write) + + def _log_writer(self, message): + os.makedirs(config.log_location, exist_ok=True) + with open(config.log_location + 'log.txt','a+') as f: + f.write((f"#{self.req_hash} by [{self.address_string()}] at [{self.log_date_time_string()}]|=> {message}\n")) + + + + def log_message(self, *args, error = False, warning = False, debug = False, write = True): + """Log an arbitrary message. + + This is used by all other logging functions. Override + it if you have specific logging wishes. + + The client ip and current date/time are prefixed to + every message. + + """ + + message = ' '.join(map(str, args)) + + message = ("# %s by [%s] at [%s] %s\n" % + (self.req_hash, self.address_string(), + self.log_date_time_string(), + message)) + if error: + logger.error(message) + elif warning: + logger.warning(message) + elif debug: + logger.debug(message) + else: + logger.info(message) + + + if not config.write_log: + return + + if not hasattr(self, "Zlog_writer"): + self.Zlog_writer = Zfunc(self._log_writer) + + try: + self.Zlog_writer.update(message) + except Exception: + traceback.print_exc() + + + def version_string(self): + """Return the server software version string.""" + return self.server_version + ' ' + self.sys_version + + def date_time_string(self, timestamp=None): + """Return the current date and time formatted for a message header.""" + if timestamp is None: + timestamp = time.time() + return email.utils.formatdate(timestamp, usegmt=True) + + def log_date_time_string(self): + """Return the current time formatted for logging.""" + now = time.time() + year, month, day, hh, mm, ss, x, y, z = time.localtime(now) + s = "%02d/%3s/%04d %02d:%02d:%02d" % ( + day, self.monthname[month], year, hh, mm, ss) + return s + + weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + + monthname = [None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] + + def address_string(self): + """Return the client address.""" + + return self.client_address[0] + + # Essentially static class variables + + # The version of the HTTP protocol we support. + # Set this to HTTP/1.1 to enable automatic keepalive + protocol_version = "HTTP/1.0" + + # MessageClass used to parse headers + MessageClass = http.client.HTTPMessage + + # hack to maintain backwards compatibility + responses = { + v: (v.phrase, v.description) + for v in HTTPStatus.__members__.values() + } + + +class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): + + """Simple HTTP request handler with GET and HEAD commands. + + This serves files from the current directory and any of its + subdirectories. The MIME type for files is determined by + calling the .guess_type() method. + + The GET and HEAD requests are identical except that the HEAD + request omits the actual contents of the file. + + """ + + server_version = "SimpleHTTP/" + __version__ + + if not mimetypes.inited: + mimetypes.init() # try to read system mime.types + extensions_map = mimetypes.types_map.copy() + extensions_map.update({ + '': 'application/octet-stream', # Default + '.py': 'text/plain', + '.c': 'text/plain', + '.h': 'text/plain', + '.css': 'text/css', + + '.gz': 'application/gzip', + '.Z': 'application/octet-stream', + '.bz2': 'application/x-bzip2', + '.xz': 'application/x-xz', + + '.webp': 'image/webp', + + 'opus': 'audio/opus', + '.oga': 'audio/ogg', + '.wav': 'audio/wav', + + '.ogv': 'video/ogg', + '.ogg': 'application/ogg', + 'm4a': 'audio/mp4', + }) + + handlers = { + 'HEAD': [], + 'POST': [], + } + + def __init__(self, *args, directory=None, **kwargs): + if directory is None: + directory = os.getcwd() + self.directory = os.fspath(directory) # same as directory, but str, new in 3.6 + super().__init__(*args, **kwargs) + self.query = Callable_dict() + + + def do_GET(self): + """Serve a GET request.""" + try: + f = self.send_head() + except Exception as e: + traceback.print_exc() + self.send_error(500, str(e)) + return + + if f: + try: + self.copyfile(f, self.wfile) + except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as e: + self.log_info(tools.text_box(e.__class__.__name__, e,"\nby ", self.address_string())) + finally: + f.close() + + def do_(self): + '''incase of errored request''' + self.send_error(HTTPStatus.BAD_REQUEST, "Bad request.") + + + @staticmethod + def on_req(type='', url='', hasQ=(), QV={}, fragent='', url_regex = '', func=null): + '''called when request is received + type: GET, POST, HEAD, ... + url: url (must start with /) + hasQ: if url has query + QV: match query value + fragent: fragent of request + url_regex: url regex (must start with /) url regex, the url must start and end with this regex + + if query is tuple, it will only check existence of key + if query is dict, it will check value of key + ''' + self = __class__ + + type = type.upper() + if type == 'GET': + type = 'HEAD' + + + if type not in self.handlers: + self.handlers[type] = [] + + # FIXING TYPE ISSUE + if isinstance(hasQ, str): + hasQ = (hasQ,) + + if url=='' and url_regex=='': + url_regex = '.*' + + + to_check = (url, hasQ, QV, fragent, url_regex) + + def decorator(func): + self.handlers[type].append((to_check, func)) + return func + return decorator + + def test_req(self, url='', hasQ=(), QV={}, fragent='', url_regex=''): + '''test if request is matched' + + args: + url: url relative path (must start with /) + hasQ: if url has query + QV: match query value + fragent: fragent of request + url_regex: url regex, the url must start and end with this regex + + + ''' + if url_regex: + if not re.search("^"+url_regex+'$', self.url_path): return False + elif url and url!=self.url_path: return False + + if isinstance(hasQ, str): + hasQ = (hasQ,) + + if hasQ and self.query(*hasQ)==False: return False + if QV: + for k, v in QV.items(): + if not self.query(k): return False + if self.query[k] != v: return False + + if fragent and self.fragment != fragent: return False + + return True + + def do_HEAD(self): + """Serve a HEAD request.""" + try: + f = self.send_head() + except Exception as e: + traceback.print_exc() + self.send_error(500, str(e)) + return + + if f: + f.close() + + def do_POST(self): + """Serve a POST request.""" + self.range = None, None + + + path = self.translate_path(self.path) + # DIRECTORY DONT CONTAIN SLASH / AT END + + url_path, query, fragment = self.url_path, self.query, self.fragment + spathsplit = self.url_path.split("/") + + + try: + for case, func in self.handlers['POST']: + if self.test_req(*case): + try: + f = func(self, url_path=url_path, query=query, fragment=fragment, path=path, spathsplit=spathsplit) + except PostError: + traceback.print_exc() + break # break if error is raised and send BAD_REQUEST (at end of loop) + + if f: + try: + self.copyfile(f, self.wfile) + except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as e: + logger.info(tools.text_box(e.__class__.__name__, e,"\nby ", [self.address_string()])) + finally: + f.close() + return + + + + return self.send_error(HTTPStatus.BAD_REQUEST, "Invalid request.") + + except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as e: + logger.info(tools.text_box(e.__class__.__name__, e,"\nby ", [self.address_string()])) + return + except Exception as e: + traceback.print_exc() + self.send_error(500, str(e)) + return + + + def redirect(self, location): + '''redirect to location''' + self.send_response(HTTPStatus.FOUND) + self.send_header("Location", location) + self.end_headers() + + def return_txt(self, code, msg, content_type="text/html; charset=utf-8"): + '''returns only the head to client + and returns a file object to be used by copyfile''' + self.log_debug(f'[RETURNED] {code} to client') + if not isinstance(msg, bytes): + encoded = msg.encode('utf-8', 'surrogateescape') + else: + encoded = msg + + f = io.BytesIO() + f.write(encoded) + f.seek(0) + + self.send_response(code) + self.send_header("Content-type", content_type) + self.send_header("Content-Length", str(len(encoded))) + self.end_headers() + return f + + def send_txt(self, code, msg, content_type="text/html; charset=utf-8"): + '''sends the head and file to client''' + f = self.return_txt(code, msg, content_type) + if self.command == "HEAD": + return # to avoid sending file on get request + self.copyfile(f, self.wfile) + f.close() + + def send_json(self, obj): + """send object as json + obj: json-able object or json.dumps() string""" + if not isinstance(obj, str): + obj = json.dumps(obj, indent=1) + f = self.return_txt(200, obj, content_type="application/json") + if self.command == "HEAD": + return # to avoid sending file on get request + self.copyfile(f, self.wfile) + f.close() + + def return_file(self, path, filename=None, download=False): + f = None + is_attachment = "attachment;" if (self.query("dl") or download) else "" + + first, last = 0, None + + try: + ctype = self.guess_type(path) + + f = open(path, 'rb') + fs = os.fstat(f.fileno()) + + file_len = fs[6] + # Use browser cache if possible + if ("If-Modified-Since" in self.headers + and "If-None-Match" not in self.headers): + # compare If-Modified-Since and time of last file modification + try: + ims = email.utils.parsedate_to_datetime( + self.headers["If-Modified-Since"]) + except (TypeError, IndexError, OverflowError, ValueError): + # ignore ill-formed values + pass + else: + if ims.tzinfo is None: + # obsolete format with no timezone, cf. + # https://tools.ietf.org/html/rfc7231#section-7.1.1.1 + ims = ims.replace(tzinfo=datetime.timezone.utc) + if ims.tzinfo is datetime.timezone.utc: + # compare to UTC datetime of last modification + last_modif = datetime.datetime.fromtimestamp( + fs.st_mtime, datetime.timezone.utc) + # remove microseconds, like in If-Modified-Since + last_modif = last_modif.replace(microsecond=0) + + if last_modif <= ims: + self.send_response(HTTPStatus.NOT_MODIFIED) + self.end_headers() + f.close() + + return None + + if self.use_range: + first = self.range[0] + if first is None: + first = 0 + last = self.range[1] + if last is None or last >= file_len: + last = file_len - 1 + + if first >= file_len: # PAUSE AND RESUME SUPPORT + self.send_error(416, 'Requested Range Not Satisfiable') + return None + + self.send_response(206) + self.send_header('Content-Type', ctype) + self.send_header('Accept-Ranges', 'bytes') + + + response_length = last - first + 1 + + self.send_header('Content-Range', + 'bytes %s-%s/%s' % (first, last, file_len)) + self.send_header('Content-Length', str(response_length)) + + + + else: + self.send_response(HTTPStatus.OK) + self.send_header("Content-Type", ctype) + self.send_header("Content-Length", str(file_len)) + + self.send_header("Last-Modified", + self.date_time_string(fs.st_mtime)) + self.send_header("Content-Disposition", is_attachment+'filename="%s"' % (os.path.basename(path) if filename is None else filename)) + self.end_headers() + + return f + + except PermissionError: + self.send_error(HTTPStatus.FORBIDDEN, "Permission denied") + return None + + except OSError: + self.send_error(HTTPStatus.NOT_FOUND, "File not found") + return None + + + except Exception: + traceback.print_exc() + + # if f and not f.closed(): f.close() + raise + + def send_file(self, path, filename=None, download=False): + '''sends the head and file to client''' + f = self.return_file(path, filename, download) + if self.command == "HEAD": + return # to avoid sending file on get request + try: + self.copyfile(f, self.wfile) + finally: + f.close() + + + + def send_head(self): + """Common code for GET and HEAD commands. + + This sends the response code and MIME headers. + + Return value is either a file object (which has to be copied + to the outputfile by the caller unless the command was HEAD, + and must be closed by the caller under all circumstances), or + None, in which case the caller has nothing further to do. + + """ + + if 'Range' not in self.headers: + self.range = None, None + first, last = 0, 0 + + else: + try: + self.range = parse_byte_range(self.headers['Range']) + first, last = self.range + self.use_range = True + except ValueError as e: + self.send_error(400, 'Invalid byte range') + return None + + path = self.translate_path(self.path) + # DIRECTORY DONT CONTAIN SLASH / AT END + + + url_path, query, fragment = self.url_path, self.query, self.fragment + + spathsplit = self.url_path.split("/") + + + + for case, func in self.handlers['HEAD']: # GET WILL Also BE HANDLED BY HEAD + if self.test_req(*case): + return func(self, url_path=url_path, query=query, fragment=fragment, path=path, spathsplit=spathsplit) + + return self.send_error(HTTPStatus.NOT_FOUND, "File not found") + + + + + def get_displaypath(self, url_path): + """ + Helper to produce a display path for the directory listing. + """ + + try: + displaypath = urllib.parse.unquote(url_path, errors='surrogatepass') + except UnicodeDecodeError: + displaypath = urllib.parse.unquote(url_path) + displaypath = html.escape(displaypath, quote=False) + + return displaypath + + + + + + + def get_rel_path(self, filename): + """Return the relative path to the file, FOR OS.""" + return urllib.parse.unquote(posixpath.join(self.url_path, filename), errors='surrogatepass') + + + def translate_path(self, path): + """Translate a /-separated PATH to the local filename syntax. + + Components that mean special things to the local file system + (e.g. drive or directory names) are ignored. (XXX They should + probably be diagnosed.) + + """ + # abandon query parameters + path = path.split('?',1)[0] + path = path.split('#',1)[0] + # Don't forget explicit trailing slash when normalizing. Issue17324 + trailing_slash = path.rstrip().endswith('/') + + try: + path = urllib.parse.unquote(path, errors='surrogatepass') + except UnicodeDecodeError: + path = urllib.parse.unquote(path) + path = posixpath.normpath(path) + words = path.split('/') + words = filter(None, words) + path = self.directory + + + for word in words: + if os.path.dirname(word) or word in (os.curdir, os.pardir): + # Ignore components that are not a simple file/directory name + continue + path = os.path.join(path, word) + if trailing_slash: + path += '/' + + return os.path.normpath(path) # fix OS based path issue + + def copyfile(self, source, outputfile): + """Copy all data between two file objects. + + The SOURCE argument is a file object open for reading + (or anything with a read() method) and the DESTINATION + argument is a file object open for writing (or + anything with a write() method). + + The only reason for overriding this would be to change + the block size or perhaps to replace newlines by CRLF + -- note however that this the default server uses this + to copy binary data as well. + + """ + + + if not self.range: + try: + source.read(1) + except: + traceback.print_exc() + source.seek(0) + shutil.copyfileobj(source, outputfile) + + else: + # SimpleHTTPRequestHandler uses shutil.copyfileobj, which doesn't let + # you stop the copying before the end of the file. + start, stop = self.range # set in send_head() + copy_byte_range(source, outputfile, start, stop) + + + def guess_type(self, path): + """Guess the type of a file. + + Argument is a PATH (a filename). + + Return value is a string of the form type/subtype, + usable for a MIME Content-type header. + + The default implementation looks the file's extension + up in the table self.extensions_map, using application/octet-stream + as a default; however it would be permissible (if + slow) to look inside the data to make a better guess. + + """ + + base, ext = posixpath.splitext(path) + if ext in self.extensions_map: + return self.extensions_map[ext] + ext = ext.lower() + if ext in self.extensions_map: + return self.extensions_map[ext] + guess, _ = mimetypes.guess_type(path) + if guess: + return guess + + return self.extensions_map[''] #return 'application/octet-stream' + + + +class PostError(Exception): + pass + + + + +class DealPostData: + """do_login + +#get starting boundary +0: b'------WebKitFormBoundary7RGDIyjMpWhLXcZa\r\n' +1: b'Content-Disposition: form-data; name="post-type"\r\n' +2: b'\r\n' +3: b'login\r\n' +4: b'------WebKitFormBoundary7RGDIyjMpWhLXcZa\r\n' +5: b'Content-Disposition: form-data; name="username"\r\n' +6: b'\r\n' +7: b'xxx\r\n' +8: b'------WebKitFormBoundary7RGDIyjMpWhLXcZa\r\n' +9: b'Content-Disposition: form-data; name="password"\r\n' +10: b'\r\n' +11: b'ccc\r\n' +12: b'------WebKitFormBoundary7RGDIyjMpWhLXcZa--\r\n' +""" + + + boundary = b'' + num = 0 + blank = 0 # blank is used to check if the post is empty or Connection Aborted + remainbytes = 0 + + def __init__(self, req:SimpleHTTPRequestHandler) -> None: + self.req = req + + + refresh = "

" + + + def get(self, show=F, strip=F): + """ + show: print line + strip: strip \r\n at end + """ + req = self.req + line = req.rfile.readline() + + if line == b'': + self.blank += 1 + else: + self.blank = 0 + if self.blank>=20: # allow 20 loss packets + req.send_error(408, "Request Timeout") + time.sleep(1) # wait for the client to close the connection + + raise ConnectionAbortedError + if show: + self.num+=1 + print(f"{self.num}: {line}") + self.remainbytes -= len(line) + + if strip and line.endswith(b"\r\n"): + line = line.rpartition(b"\r\n")[0] + + return line + + def pass_bound(self): + line = self.get() + if not self.boundary in line: + self.req.log_error("Content NOT begin with boundary\n", [line, self.boundary]) + + def get_name(self, line=None, ): + if not line: + line = self.get() + try: + return re.findall(r'Content-Disposition.*name="(.*?)"', line.decode())[0] + except: return None + + def match_name(self, field_name:Union[None, str]=None): + """ + field_name: name of the field (str) + * if None, skip checking field name + * if `empty string`, field name must be empty too + """ + line = self.get() + + if field_name is not None and self.get_name(line)!=field_name: + raise PostError(f"Invalid request: Expected {field_name} but got {self.get_name(line)}") + + return line + + + def skip(self,): + self.get() + + def start(self): + '''reads upto line 0''' + req = self.req + content_type = req.headers['content-type'] + + if not content_type: + raise PostError("Content-Type header doesn't contain boundary") + self.boundary = content_type.split("=")[1].encode() + + self.remainbytes = int(req.headers['content-length']) + + + self.pass_bound()# LINE 0 + + + def get_part(self, verify_name:Union[None, bytes, str] =None, verify_msg:Union[None, bytes, str] =None, decode=F): + '''read a form field + ends at boundary + verify_name: name of the field (str|bytes|None) + verify_msg: message to verify (str|bytes) + decode: decode the message + * if None, skip checking field name + * if `empty string`, field name must be empty too''' + decoded = False + + if isinstance(verify_name, bytes): + verify_name = verify_name.decode() + + field_name = self.match_name(verify_name) # LINE 1 (field name) + # if not verified, raise PostError + + self.skip() # LINE 2 (blank line) + + line = b'' + while 1: + _line = self.get() # from LINE 4 till boundary (form field value) + if self.boundary in _line: # boundary + break + line += _line + + line = line.rpartition(b"\r\n")[0] # remove \r\n at end + if decode: + line = line.decode() + decoded = True + if verify_msg is not None: + if not decoded: + if isinstance(verify_msg, str): + verify_msg = verify_msg.encode() + + if line != verify_msg: + raise PostError(f"Invalid post request.\n Expected: {[verify_msg]}\n Got: {[line]}") + + # self.pass_bound() # LINE 5 (boundary) + + return field_name, line + + + + + + +def _get_best_family(*address): + infos = socket.getaddrinfo( + *address, + type=socket.SOCK_STREAM, + flags=socket.AI_PASSIVE + ) + family, type, proto, canonname, sockaddr = next(iter(infos)) + return family, sockaddr + +def get_ip(bind=None): + IP = bind # or "127.0.0.1" + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.settimeout(0) + try: + # doesn't even have to be reachable + s.connect(('10.255.255.255', 1)) + IP = s.getsockname()[0] + except: + try: + if config.OS=="Android": + IP = s.connect(("192.168.43.1", 1)) + IP = s.getsockname()[0] + # Assigning this variable because Android does't return actual IP when hosting a hotspot + except (socket.herror, OSError): + pass + finally: + s.close() + return IP + + +def test(HandlerClass=BaseHTTPRequestHandler, + ServerClass=ThreadingHTTPServer, + protocol="HTTP/1.0", port=8000, bind=None): + """Test the HTTP request handler class. + + This runs an HTTP server on port 8000 (or the port argument). + + """ + + global httpd + if sys.version_info>=(3,8): # BACKWARD COMPATIBILITY + ServerClass.address_family, addr = _get_best_family(bind, port) + else: + addr =(bind if bind!=None else '', port) + + device_ip = bind or "127.0.0.1" + # bind can be None (=> 127.0.0.1) or a string (=> 127.0.0.DDD) + + HandlerClass.protocol_version = protocol + httpd = ServerClass(addr, HandlerClass) + host, port = httpd.socket.getsockname()[:2] + url_host = f'[{host}]' if ':' in host else host + hostname = socket.gethostname() + local_ip = config.IP if config.IP else get_ip(device_ip) + config.IP= local_ip + + + on_network = local_ip!=(device_ip) + + logger.info(tools.text_box( + f"Serving HTTP on {host} port {port} \n", #TODO: need to check since the output is "Serving HTTP on :: port 6969" + f"(http://{url_host}:{port}/) ...\n", #TODO: need to check since the output is "(http://[::]:6969/) ..." + f"Server is probably running on\n", + (f"[over NETWORK] {config.address()}\n" if on_network else ""), + f"[on DEVICE] http://localhost:{config.port} & http://127.0.0.1:{config.port}" + , style="star", sep="" + ) + ) + try: + httpd.serve_forever(poll_interval=0.1) + except KeyboardInterrupt: + logger.info("\nKeyboard interrupt received, exiting.") + + except OSError: + logger.info("\nOSError received, exiting.") + finally: + if not config.reload: + sys.exit(0) + + +class DualStackServer(ThreadingHTTPServer): # UNSUPPORTED IN PYTHON 3.7 + + def handle_error(self, request, client_address): + pass + + def server_bind(self): + # suppress exception when protocol is IPv4 + with contextlib.suppress(Exception): + self.socket.setsockopt( + socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0) + return super().server_bind() + + def finish_request(self, request, client_address): + self.RequestHandlerClass(request, client_address, self, + directory=config.ftp_dir) + + + + +def run(port = None, directory = None, bind = None, arg_parse= True, handler = SimpleHTTPRequestHandler): + + if arg_parse: + args = config.parse_default_args(port=port, directory=directory, bind=bind) + + port = args.port + directory = args.directory + bind = args.bind + + + + logger.info(tools.text_box("Running pyroboxCore: ", config.MAIN_FILE, "Version: ", __version__)) + + + if directory == config.ftp_dir and not os.path.isdir(config.ftp_dir): + logger.warning(config.ftp_dir, "not found!\nReseting directory to current directory") + directory = "." + + handler_class = partial(handler, + directory=directory) + + config.port = port + config.ftp_dir = directory + + if not config.reload: + if sys.version_info>(3,8): + test( + HandlerClass=handler_class, + ServerClass=DualStackServer, + port=port, + bind=bind, + ) + else: # BACKWARD COMPATIBILITY + test( + HandlerClass=handler_class, + ServerClass=ThreadingHTTPServer, + port=port, + bind=bind, + ) + + + if config.reload == True: + reload_server() + + + + +if __name__ == '__main__': + run() diff --git a/src/server.py b/src/server.py index a72f511..de90731 100644 --- a/src/server.py +++ b/src/server.py @@ -1,79 +1,56 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -__version__ = "0.6.9" +__version__ = "0.7.0" enc = "utf-8" -__all__ = [ - "HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler", - "SimpleHTTPRequestHandler", - -] +import html +import io import os -import atexit -import logging -from queue import Queue - -logging.basicConfig(level=logging.DEBUG, format='%(levelname)s: %(message)s') - -logger = logging.getLogger(__name__) -logger.setLevel(logging.WARNING) -# set INFO to see all the requests -# set WARNING to see only the requests that made change to the server -# set ERROR to see only the requests that made the errors - - - -endl = "\n" -T = t = true = True # too lazy to type -F = f = false = False # too lazy to type +import sys +import posixpath +import shutil -class Config: - def __init__(self): - # DEFAULT DIRECTORY TO LAUNCH SERVER - self.ftp_dir = "." # DEFAULT DIRECTORY TO LAUNCH SERVER - # some common default dir, but not used - self.ANDROID_ftp_dir = "/storage/emulated/0/" - self.LINUX_ftp_dir = "~/" - self.WIN_ftp_dir= 'D:\\' +import time +import datetime - self.IP = None # will be assigned by checking +from queue import Queue +import importlib.util +import re - # DEFAULT PORT TO LAUNCH SERVER - self.port= 6969 # DEFAULT PORT TO LAUNCH SERVER +import urllib.parse +import urllib.request - # UPLOAD PASSWORD SO THAT ANYONE RANDOM CAN'T UPLOAD - self.PASSWORD= "SECret" +from string import Template as _Template # using this because js also use {$var} and {var} syntax and py .format is often unsafe +import threading - # LOGGING - self.log_location = "./" # fallback log_location = "./" - self.allow_web_log = True # if you want to see some important LOG in browser, may contain your important information - self.write_log = False # if you want to write log to file +import subprocess +import tempfile +import random +import string +import json +from http import HTTPStatus - # ZIP FEATURES - self.default_zip = "zipfile" # or "zipfile" to use python built in zip module +import traceback +import atexit - # CHECK FOR MISSING REQUEIREMENTS - self.run_req_check = True +from .pyroboxCore import config, logger, SimpleHTTPRequestHandler as SH, DealPostData as DPD, run as run_server, tools, Callable_dict, reload_server - # FILE INFO - self.MAIN_FILE = os.path.realpath(__file__) - self.MAIN_FILE_dir = os.path.dirname(self.MAIN_FILE) +true = T = True +false = F = False - # OS DETECTION - self.OS = self.get_os() +config.parser.add_argument('--password', '-k', + default=config.PASSWORD, + type=str, + help='Upload Password (default: %(default)s)') - # RUNNING SERVER STATS - self.ftp_dir = self.get_default_dir() - self.dev_mode = False - self.ASSETS = False # if you want to use assets folder, set this to True - self.ASSETS_dir = os.path.join(self.MAIN_FILE_dir, "/../assets/") - self.reload = False +args = config.parser.parse_known_args()[0] +config.PASSWORD = args.password - self.disabled_func = { +config.disabled_func.update({ "send2trash": False, "natsort": False, "zip": False, @@ -83,137 +60,7 @@ def __init__(self): "upload": False, "new_folder": False, "rename": False, - "reload": False, - } - - # TEMP FILE MAPPING - self.temp_file = set() - - # CLEAN TEMP FILES ON EXIT - atexit.register(self.clear_temp) - - - # ASSET MAPPING - self.file_list = {} - - def clear_temp(self): - for i in self.temp_file: - try: - os.remove(i) - except: - pass - - - - def get_os(self): - from platform import system as platform_system - - out = platform_system() - if out=="Linux": - if hasattr(sys, 'getandroidapilevel'): - #self.IP = "192.168.43.1" - return 'Android' - - return out - - def get_default_dir(self): - return os.getcwd() # ignoring OS based default dir - - # OS = self.OS - # if OS=='Windows': - # return self.WIN_ftp_dir - # elif OS=='Linux': - # return self.LINUX_ftp_dir - # elif OS=='Android': - # return self.ANDROID_ftp_dir - - - # return './' - - - def address(self): - return "http://%s:%i"%(self.IP, self.port) - - - - - -import datetime -import email.utils -import html -import http.client -import io -import mimetypes -import posixpath -import shutil -import socket # For gethostbyaddr() -import socketserver -import sys -import time -import urllib.parse -import urllib.request -import contextlib -from functools import partial -from http import HTTPStatus - -import importlib.util -import re -import base64 - -from string import Template as _Template # using this because js also use {$var} and {var} syntax and py .format is often unsafe -import threading - -import subprocess -import tempfile, random, string, json - - -import traceback - - - - -class Tools: - def __init__(self): - self.styles = { - "equal" : "=", - "star" : "*", - "hash" : "#", - "dash" : "-", - "udash": "_" - } - - def term_width(self): - return shutil.get_terminal_size()[0] - - def text_box(self, *text, style = "equal", sep=" "): - """ - Returns a string of text with a border around it. - """ - text = sep.join(map(str, text)) - term_col = shutil.get_terminal_size()[0] - - s = self.styles[style] if style in self.styles else style - tt = "" - for i in text.split('\n'): - tt += i.center(term_col) + '\n' - return (f"\n\n{s*term_col}\n{tt}{s*term_col}\n\n") - - def random_string(self, length=10): - letters = string.ascii_lowercase - return ''.join(random.choice(letters) for i in range(length)) - -tools = Tools() -config = Config() - - -class Callable_dict(dict): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.__dict__ = self - - def __call__(self, *key): - return all([i in self for i in key]) - +}) class LimitExceed(Exception): def __init__(self, *args, **kwargs): @@ -289,22 +136,6 @@ def run_update(): return False - -def reload_server(): - """reload the server process from file""" - file = config.MAIN_FILE - print("Reloading...") - # print(sys.executable, config.MAIN_FILE, *sys.argv[1:]) - try: - os.execl(sys.executable, sys.executable, file, *sys.argv[1:]) - except: - traceback.print_exc() - sys.exit(0) - -def null(*args, **kwargs): - pass - - ############################################# # FILE HANDLER # ############################################# @@ -609,25 +440,217 @@ def get_dir_m_time(path): -def list_dir(start_path = '.', full_dir=True, both=False): - b =[] - p =[] - for dirpath, dirnames, filenames in os.walk(start_path, onerror= print): - for f in filenames: - fp = os.path.join(dirpath, f) - if both: - b.append((fp, fp.replace(start_path, "", 1))) - elif full_dir: - p.append(fp) +def get_titles(path, file=False): + """Make titles for the header directory + path: the path of the file or directory + file: if True, path is a file, else it's a directory + + output: `Viewing NAME`""" + + paths = path.split('/') + if file: + return 'Viewing ' + paths[-1] + if paths[-2]=='': + return 'Viewing 🏠 HOME' + else: + return 'Viewing ' + paths[-2] + + + +def dir_navigator(path): + """Makes each part of the header directory accessible like links + just like file manager, but with less CSS""" + + dirs = re.sub("/{2,}", "/", path).split('/') + urls = ['/'] + names = ['🏠 HOME'] + r = [] + + for i in range(1, len(dirs)-1): + dir = dirs[i] + urls.append(urls[i-1] + urllib.parse.quote(dir, errors='surrogatepass' )+ '/' if not dir.endswith('/') else "") + names.append(dir) + + for i in range(len(names)): + tag = "" + names[i] + "" + r.append(tag) + + return ''.join(r) + + + +def list_directory_json(self:SH, path=None): + """Helper to produce a directory listing (JSON). + Return json file of available files and folders""" + if path == None: + path = self.translate_path(self.path) + + try: + dir_list = scansort(os.scandir(path)) + except OSError: + self.send_error( + HTTPStatus.NOT_FOUND, + "No permission to list directory") + return None + dir_dict = [] + + + for file in dir_list: + name = file.name + displayname = linkname = name + + + if file.is_dir(): + displayname = name + "/" + linkname = name + "/" + elif file.is_symlink(): + displayname = name + "@" + + dir_dict.append([urllib.parse.quote(linkname, errors='surrogatepass'), + html.escape(displayname, quote=False)]) + + encoded = json.dumps(dir_dict).encode("utf-8", 'surrogateescape') + f = io.BytesIO() + f.write(encoded) + f.seek(0) + self.send_response(HTTPStatus.OK) + self.send_header("Content-type", "application/json; charset=%s" % "utf-8") + self.send_header("Content-Length", str(len(encoded))) + self.end_headers() + return f + + + +def list_directory(self:SH, path): + """Helper to produce a directory listing (absent index.html). + + Return value is either a file object, or None (indicating an + error). In either case, the headers are sent, making the + interface the same as for send_head(). + + """ + + try: + dir_list = scansort(os.scandir(path)) + except OSError: + self.send_error( + HTTPStatus.NOT_FOUND, + "No permission to list directory") + return None + r = [] + + displaypath = self.get_displaypath(self.url_path) + + + title = get_titles(displaypath) + + + r.append(directory_explorer_header().safe_substitute(PY_PAGE_TITLE=title, + PY_PUBLIC_URL=config.address(), + PY_DIR_TREE_NO_JS=dir_navigator(displaypath))) + + r_li= [] # type + file_link + # f : File + # d : Directory + # v : Video + # h : HTML + f_li = [] # file_names + s_li = [] # size list + + r_folders = [] # no js + r_files = [] # no js + + + LIST_STRING = '
  • %s

  • ' + + r.append(""" +
    +
      + + """) + + + # r.append("""🔙 {Prev folder}""") + for file in dir_list: + #fullname = os.path.join(path, name) + name = file.name + displayname = linkname = name + size=0 + # Append / for directories or @ for symbolic links + _is_dir_ = True + if file.is_dir(): + displayname = name + "/" + linkname = name + "/" + elif file.is_symlink(): + displayname = name + "@" + else: + _is_dir_ =False + size = fmbytes(file.stat().st_size) + __, ext = posixpath.splitext(name) + if ext=='.html': + r_files.append(LIST_STRING % ("link", urllib.parse.quote(linkname, + errors='surrogatepass'), + html.escape(displayname, quote=False))) + + r_li.append('h'+ urllib.parse.quote(linkname, errors='surrogatepass')) + f_li.append(html.escape(displayname, quote=False)) + + elif self.guess_type(linkname).startswith('video/'): + r_files.append(LIST_STRING % ("vid", urllib.parse.quote(linkname, + errors='surrogatepass'), + html.escape(displayname, quote=False))) + + r_li.append('v'+ urllib.parse.quote(linkname, errors='surrogatepass')) + f_li.append(html.escape(displayname, quote=False)) + + elif self.guess_type(linkname).startswith('image/'): + r_files.append(LIST_STRING % ("file", urllib.parse.quote(linkname, + errors='surrogatepass'), + html.escape(displayname, quote=False))) + + r_li.append('i'+ urllib.parse.quote(linkname, errors='surrogatepass')) + f_li.append(html.escape(displayname, quote=False)) + else: - p.append(fp.replace(start_path, "", 1)) + r_files.append(LIST_STRING % ("file", urllib.parse.quote(linkname, + errors='surrogatepass'), + html.escape(displayname, quote=False))) + + r_li.append('f'+ urllib.parse.quote(linkname, errors='surrogatepass')) + f_li.append(html.escape(displayname, quote=False)) + if _is_dir_: + r_folders.append(LIST_STRING % ("", urllib.parse.quote(linkname, + errors='surrogatepass'), + html.escape(displayname, quote=False))) + + r_li.append('d' + urllib.parse.quote(linkname, errors='surrogatepass')) + f_li.append(html.escape(displayname, quote=False)) + + s_li.append(size) + + + + r.extend(r_folders) + r.extend(r_files) + + r.append("""
    +
    + +
    + """) + + r.append(_js_script().safe_substitute(PY_LINK_LIST=str(r_li), + PY_FILE_LIST=str(f_li), + PY_FILE_SIZE =str(s_li))) + + + encoded = '\n'.join(r).encode(enc, 'surrogateescape') + + return self.send_txt(HTTPStatus.OK, encoded) - if both: - return b - return p ############################################# @@ -671,14 +694,14 @@ def size(self): class ZipFly: def __init__(self, - mode = 'w', - paths = [], - chunksize = 0x8000, - compression = zipfile.ZIP_STORED, - allowZip64 = True, - compresslevel = None, - storesize = 0, - encode = 'utf-8',): + mode = 'w', + paths = [], + chunksize = 0x8000, + compression = zipfile.ZIP_STORED, + allowZip64 = True, + compresslevel = None, + storesize = 0, + encode = 'utf-8',): """ @param store size : int : size of all files @@ -689,7 +712,7 @@ def __init__(self, - self.comment = f'Written using Zipfly v{zf__version__}' + self.comment = 'Written using Zipfly v' + zf__version__ self.mode = mode self.paths = paths self.filesystem = 'fs' @@ -721,7 +744,8 @@ def generator(self): z_info = zipfile.ZipInfo.from_file( path[self.filesystem], - path[self.arcname] + path[self.arcname], + strict_timestamps=False ) with open( path[self.filesystem], 'rb' ) as e: @@ -741,8 +765,7 @@ def generator(self): def get_size(self): return self._buffer_size - -except Exception: +except ImportError: config.disabled_func["zip"] = True logger.warning("Failed to initialize zipfly, ZIP feature is disabled.") @@ -782,1557 +805,198 @@ def get_id(self, path, size=None): elif self.assigend_zid(path): prev_zid, prev_size, prev_m_time = self.assigend_zid[path] - else: - exist=0 - - - if exist and prev_m_time == source_m_time and prev_size == source_size: - return prev_zid - - - id = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))+'_'+ str(time.time()) - id += '0'*(25-len(id)) - - - self.assigend_zid[path] = (id, source_size, source_m_time) - return id - - - - - def archive(self, path, zid, size=None): - """ - archive the folder - - `path`: path to archive - `zid`: id of the folder - `size`: size of the folder (optional) - """ - def err(msg): - self.zip_in_progress.pop(zid, None) - self.assigend_zid.pop(path, None) - self.zip_id_status[zid] = "ERROR: " + msg - return False - if config.disabled_func["zip"]: - return err("ZIP FUNTION DISABLED") - - - - - # run zipfly - self.zip_in_progress[zid] = 0 - - source_size, fm = size if size else get_dir_size(path, return_list=True, both=True, must_read=True) - - if len(fm)==0: - return err("FOLDER HAS NO FILES") - - source_m_time = get_dir_m_time(path) - - - dir_name = os.path.basename(path) - - - - zfile_name = os.path.join(self.zip_temp_dir, "{dir_name}({zid})".format(dir_name=dir_name, zid=zid) + ".zip") - - self.init_dir() - - - paths = [] - for i,j in fm: - paths.append({"fs": i, "n":j}) - - zfly = ZipFly(paths = paths, chunksize=0x80000) - - - - archived_size = 0 - - self.zip_id_status[zid] = "ARCHIVING" - - try: - with open(zfile_name, "wb") as zf: - for chunk, c_size in zfly.generator(): - zf.write(chunk) - archived_size += c_size - if source_size==0: - source_size+=1 # prevent division by 0 - self.zip_in_progress[zid] = (archived_size/source_size)*100 - except Exception as e: - traceback.print_exc() - return err(e) - self.zip_in_progress.pop(zid, None) - self.assigend_zid.pop(path, None) - self.zip_id_status[zid] = "DONE" - - - - self.zip_path_ids[path] = zid, source_size, source_m_time - self.zip_ids[zid] = zfile_name - # zip_ids are never cleared in runtime due to the fact if someones downloading a zip, the folder content changed, other person asked for zip, new zip created and this id got removed, the 1st user wont be able to resume - - - return zid - - def archive_thread(self, path, zid, size=None): - return threading.Thread(target=self.archive, args=(path, zid, size)) - -zip_manager = ZIP_Manager() - -#---------------------------x-------------------------------- - - -if not os.path.isdir(config.log_location): - try: - os.mkdir(path=config.log_location) - except Exception: - config.log_location ="./" - - - - -if not config.disabled_func["send2trash"]: - try: - from send2trash import send2trash, TrashPermissionError - except Exception: - config.disabled_func["send2trash"] = True - logger.warning("send2trash module not found, send2trash function disabled") - -if not config.disabled_func["natsort"]: - try: - import natsort - except Exception: - config.disabled_func["natsort"] = True - logger.warning("natsort module not found, natsort function disabled") - -def humansorted(li): - if not config.disabled_func["natsort"]: - return natsort.humansorted(li) - - return sorted(li, key=lambda x: x.lower()) - -def scansort(li): - if not config.disabled_func["natsort"]: - return natsort.humansorted(li, key=lambda x:x.name) - - return sorted(li, key=lambda x: x.name.lower()) - -def listsort(li): - return humansorted(li) - - -class Template(_Template): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def __add__(self, other): - if isinstance(other, _Template): - return Template(self.template + other.template) - return Template(self.template + str(other)) - - -def _get_template(path): - if config.dev_mode: - with open(path, encoding=enc) as f: - return Template(f.read()) - - return Template(config.file_list[path]) - -def directory_explorer_header(): - return _get_template("html_page.html") - -def _global_script(): - return _get_template("global_script.html") - -def _js_script(): - return _global_script() + _get_template("html_script.html") - -def _video_script(): - return _global_script() + _get_template("html_vid.html") - -def _zip_script(): - return _global_script() + _get_template("html_zip_page.html") - -def _admin_page(): - return _global_script() + _get_template("html_admin.html") - - - - -class Zfunc(object): - """Thread safe sequncial printing/queue task handler class""" - - __all__ = ["new", "update"] - def __init__(self, caller, store_return=False): - super().__init__() - - self.queue = Queue() - # stores [args, kwargs], ... - self.store_return = store_return - self.returner = Queue() - # queue to store return value if store_return enabled - - self.BUSY = False - - self.caller = caller - - def next(self): - """ check if any item in queje and call, if already running or queue empty, returns """ - if self.queue.empty() or self.BUSY: - return None - - self.BUSY = True - args, kwargs = self.queue.get() - - x = self.caller(*args, **kwargs) - if self.store_return: - self.returner.put(x) - - self.BUSY = False - - if not self.queue.empty(): - # will make the loop continue running - return True - - - def update(self, *args, **kwargs): - """ Uses xprint and parse string""" - - self.queue.put((args, kwargs)) - while self.next() is True: - # use while instead of recursion to avoid recursion to avoid recursion to avoid recursion to avoid recursion to avoid recursion to avoid recursion to avoid recursion.... error - pass - - - - def new(self, caller, store_return=False): - self.__init__(caller=caller, store_return=store_return) - - - - -"""HTTP server classes. - -Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see -SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, -and CGIHTTPRequestHandler for CGI scripts. - -It does, however, optionally implement HTTP/1.1 persistent connections, -as of version 0.3. - -XXX To do: - -- log requests even later (to capture byte count) -- log user-agent header and other interesting goodies -- send error log to separate file -""" - - - - -############################################## -# PAUSE AND RESUME FEATURE # -############################################## - -def copy_byte_range(infile, outfile, start=None, stop=None, bufsize=16*1024): - ''' - TO SUPPORT PAUSE AND RESUME FEATURE - Like shutil.copyfileobj, but only copy a range of the streams. - Both start and stop are inclusive. - ''' - if start is not None: infile.seek(start) - while 1: - to_read = min(bufsize, stop + 1 - infile.tell() if stop else bufsize) - buf = infile.read(to_read) - if not buf: - break - outfile.write(buf) - - -BYTE_RANGE_RE = re.compile(r'bytes=(\d+)-(\d+)?$') -def parse_byte_range(byte_range): - '''Returns the two numbers in 'bytes=123-456' or throws ValueError. - The last number or both numbers may be None. - ''' - if byte_range.strip() == '': - return None, None - - m = BYTE_RANGE_RE.match(byte_range) - if not m: - raise ValueError('Invalid byte range %s' % byte_range) - - #first, last = [x and int(x) for x in m.groups()] # - - first, last = map((lambda x: int(x) if x else None), m.groups()) - - if last and last < first: - raise ValueError('Invalid byte range %s' % byte_range) - return first, last - -#---------------------------x-------------------------------- - - - -# download file from url using urllib -def fetch_url(url, file = None): - try: - with urllib.request.urlopen(url) as response: - data = response.read() # a `bytes` object - if not file: - return data - - with open(file, 'wb') as f: - f.write(data) - return True - except Exception: - traceback.print_exc() - return None - - -def URL_MANAGER(url:str): - """ - returns a tuple of (`path`, `query_dict`, `fragment`)\n - - `url` = `'/store?page=10&limit=15&price=ASC#dskjfhs'`\n - `path` = `'/store'`\n - `query_dict` = `{'page': ['10'], 'limit': ['15'], 'price': ['ASC']}`\n - `fragment` = `dskjfhs`\n - """ - - # url = '/store?page=10&limit=15&price#dskjfhs' - parse_result = urllib.parse.urlparse(url) - - - dict_result = Callable_dict(urllib.parse.parse_qs(parse_result.query, keep_blank_values=True)) - - return (parse_result.path, dict_result, parse_result.fragment) - - - -# Default error message template -DEFAULT_ERROR_MESSAGE = """ - - - - - - Error response - - -

    Error response

    -

    Error code: %(code)d

    -

    Message: %(message)s.

    -

    Error code explanation: %(code)s - %(explain)s.

    - - -""" - -DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8" - -class HTTPServer(socketserver.TCPServer): - - allow_reuse_address = True # Seems to make sense in testing environment - - def server_bind(self): - """Override server_bind to store the server name.""" - socketserver.TCPServer.server_bind(self) - host, port = self.server_address[:2] - self.server_name = socket.getfqdn(host) - self.server_port = port - - -class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer): - daemon_threads = True - - -class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): - - """HTTP request handler base class. - - The various request details are stored in instance variables: - - - client_address is the client IP address in the form (host, - port); - - - command, path and version are the broken-down request line; - - - headers is an instance of email.message.Message (or a derived - class) containing the header information; - - - rfile is a file object open for reading positioned at the - start of the optional input data part; - - - wfile is a file object open for writing. - - IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! - - The first thing to be written must be the response line. Then - follow 0 or more header lines, then a blank line, and then the - actual data (if any). The meaning of the header lines depends on - the command executed by the server; in most cases, when data is - returned, there should be at least one header line of the form - - Content-type: / - - where and should be registered MIME types, - e.g. "text/html" or "text/plain". - - """ - - # The Python system version, truncated to its first component. - sys_version = "Python/" + sys.version.split()[0] - - # The server software version. You may want to override this. - # The format is multiple whitespace-separated strings, - # where each string is of the form name[/version]. - server_version = "BaseHTTP/" + __version__ - - error_message_format = DEFAULT_ERROR_MESSAGE - error_content_type = DEFAULT_ERROR_CONTENT_TYPE - - # The default request version. This only affects responses up until - # the point where the request line is parsed, so it mainly decides what - # the client gets back when sending a malformed request line. - # Most web servers default to HTTP 0.9, i.e. don't send a status line. - default_request_version = "HTTP/0.9" - - def parse_request(self): - """Parse a request (internal). - - The request should be stored in self.raw_requestline; the results - are in self.command, self.path, self.request_version and - self.headers. - - Return True for success, False for failure; on failure, any relevant - error response has already been sent back. - - """ - self.command = '' # set in case of error on the first line - self.request_version = version = self.default_request_version - self.close_connection = True - requestline = str(self.raw_requestline, 'iso-8859-1') - requestline = requestline.rstrip('\r\n') - self.requestline = requestline - words = requestline.split() - if len(words) == 0: - return False - - if len(words) >= 3: # Enough to determine protocol version - version = words[-1] - try: - if not version.startswith('HTTP/'): - raise ValueError - base_version_number = version.split('/', 1)[1] - version_number = base_version_number.split(".") - # RFC 2145 section 3.1 says there can be only one "." and - # - major and minor numbers MUST be treated as - # separate integers; - # - HTTP/2.4 is a lower version than HTTP/2.13, which in - # turn is lower than HTTP/12.3; - # - Leading zeros MUST be ignored by recipients. - if len(version_number) != 2: - raise ValueError - version_number = int(version_number[0]), int(version_number[1]) - except (ValueError, IndexError): - self.send_error( - HTTPStatus.BAD_REQUEST, - "Bad request version (%r)" % version) - return False - if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": - self.close_connection = False - if version_number >= (2, 0): - self.send_error( - HTTPStatus.HTTP_VERSION_NOT_SUPPORTED, - "Invalid HTTP version (%s)" % base_version_number) - return False - self.request_version = version - - if not 2 <= len(words) <= 3: - self.send_error( - HTTPStatus.BAD_REQUEST, - "Bad request syntax (%r)" % requestline) - return False - command, path = words[:2] - if len(words) == 2: - self.close_connection = True - if command != 'GET': - self.send_error( - HTTPStatus.BAD_REQUEST, - "Bad HTTP/0.9 request type (%r)" % command) - return False - self.command, self.path = command, path - - - # gh-87389: The purpose of replacing '//' with '/' is to protect - # against open redirect attacks possibly triggered if the path starts - # with '//' because http clients treat //path as an absolute URI - # without scheme (similar to http://path) rather than a path. - if self.path.startswith('//'): - self.path = '/' + self.path.lstrip('/') # Reduce to a single / - - # Examine the headers and look for a Connection directive. - try: - self.headers = http.client.parse_headers(self.rfile, - _class=self.MessageClass) - except http.client.LineTooLong as err: - self.send_error( - HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE, - "Line too long", - str(err)) - return False - except http.client.HTTPException as err: - self.send_error( - HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE, - "Too many headers", - str(err) - ) - return False - - conntype = self.headers.get('Connection', "") - if conntype.lower() == 'close': - self.close_connection = True - elif (conntype.lower() == 'keep-alive' and - self.protocol_version >= "HTTP/1.1"): - self.close_connection = False - # Examine the headers and look for an Expect directive - expect = self.headers.get('Expect', "") - if (expect.lower() == "100-continue" and - self.protocol_version >= "HTTP/1.1" and - self.request_version >= "HTTP/1.1"): - if not self.handle_expect_100(): - return False - return True - - def handle_expect_100(self): - """Decide what to do with an "Expect: 100-continue" header. - - If the client is expecting a 100 Continue response, we must - respond with either a 100 Continue or a final response before - waiting for the request body. The default is to always respond - with a 100 Continue. You can behave differently (for example, - reject unauthorized requests) by overriding this method. - - This method should either return True (possibly after sending - a 100 Continue response) or send an error response and return - False. - - """ - self.send_response_only(HTTPStatus.CONTINUE) - self.end_headers() - return True - - def handle_one_request(self): - """Handle a single HTTP request. - - You normally don't need to override this method; see the class - __doc__ string for information on how to handle specific HTTP - commands such as GET and POST. - - """ - try: - self.raw_requestline = self.rfile.readline(65537) - if len(self.raw_requestline) > 65536: - self.requestline = '' - self.request_version = '' - self.command = '' - self.send_error(HTTPStatus.REQUEST_URI_TOO_LONG) - return - if not self.raw_requestline: - self.close_connection = True - return - if not self.parse_request(): - # An error code has been sent, just exit - return - mname = 'do_' + self.command - if not hasattr(self, mname): - self.send_error( - HTTPStatus.NOT_IMPLEMENTED, - "Unsupported method (%r)" % self.command) - return - method = getattr(self, mname) - - url_path, query, fragment = URL_MANAGER(self.path) - self.url_path = url_path - self.query = query - self.fragment = fragment - - self.use_range = False - - - _hash = abs(hash((self.raw_requestline, tools.random_string(10)))) - self.req_hash = base64.b64encode(str(_hash).encode('ascii')).decode()[:10] - - _w = tools.term_width() - w = _w - len(str(self.req_hash)) -2 - w = w//2 - print('='*w + f' {self.req_hash} ' + '='*w) - print( f'{self.req_hash}|=>\t request\t: {self.command}', - f'{self.req_hash}|=>\t url \t: {url_path}', - f'{self.req_hash}|=>\t query \t: {query}', - f'{self.req_hash}|=>\t fragment\t: {fragment}' - , sep=f'\n') - print('+'*w + f' {self.req_hash} ' + '+'*w) - - - - - try: - method() - except Exception: - traceback.print_exc() - - print('-'*w + f' {self.req_hash} ' + '-'*w) - print('#'*_w) - self.wfile.flush() #actually send the response if not already done. - except (TimeoutError, socket.timeout) as e: - #a read or a write timed out. Discard this connection - self.log_error("Request timed out:", e) - self.close_connection = True - return - - def handle(self): - """Handle multiple requests if necessary.""" - self.close_connection = True - - self.handle_one_request() - while not self.close_connection: - self.handle_one_request() - - def send_error(self, code, message=None, explain=None): - """Send and log an error reply. - - Arguments are - * code: an HTTP error code - 3 digits - * message: a simple optional 1 line reason phrase. - *( HTAB / SP / VCHAR / %x80-FF ) - defaults to short entry matching the response code - * explain: a detailed message defaults to the long entry - matching the response code. - - This sends an error response (so it must be called before any - output has been generated), logs the error, and finally sends - a piece of HTML explaining the error to the user. - - """ - - try: - shortmsg, longmsg = self.responses[code] - except KeyError: - shortmsg, longmsg = '???', '???' - if message is None: - message = shortmsg - if explain is None: - explain = longmsg - self.log_error("code", code, "message", message) - self.send_response(code, message) - self.send_header('Connection', 'close') - - # Message body is omitted for cases described in: - # - RFC7230: 3.3. 1xx, 204(No Content), 304(Not Modified) - # - RFC7231: 6.3.6. 205(Reset Content) - body = None - if (code >= 200 and - code not in (HTTPStatus.NO_CONTENT, - HTTPStatus.RESET_CONTENT, - HTTPStatus.NOT_MODIFIED)): - # HTML encode to prevent Cross Site Scripting attacks - # (see bug #1100201) - content = (self.error_message_format % { - 'code': code, - 'message': html.escape(message, quote=False), - 'explain': html.escape(explain, quote=False) - }) - body = content.encode('UTF-8', 'replace') - self.send_header("Content-Type", self.error_content_type) - self.send_header('Content-Length', str(len(body))) - self.end_headers() - - if self.command != 'HEAD' and body: - self.wfile.write(body) - - def send_response(self, code, message=None): - """Add the response header to the headers buffer and log the - response code. - - Also send two standard headers with the server software - version and the current date. - - """ - self.log_request(code) - self.send_response_only(code, message) - self.send_header('Server', self.version_string()) - self.send_header('Date', self.date_time_string()) - - def send_response_only(self, code, message=None): - """Send the response header only.""" - if self.request_version != 'HTTP/0.9': - if message is None: - if code in self.responses: - message = self.responses[code][0] - else: - message = '' - if not hasattr(self, '_headers_buffer'): - self._headers_buffer = [] - self._headers_buffer.append(("%s %d %s\r\n" % - (self.protocol_version, code, message)).encode( - 'utf-8', 'strict')) - - def send_header(self, keyword, value): - """Send a MIME header to the headers buffer.""" - if self.request_version != 'HTTP/0.9': - if not hasattr(self, '_headers_buffer'): - self._headers_buffer = [] - self._headers_buffer.append( - ("%s: %s\r\n" % (keyword, value)).encode('utf-8', 'strict')) - - if keyword.lower() == 'connection': - if value.lower() == 'close': - self.close_connection = True - elif value.lower() == 'keep-alive': - self.close_connection = False - - def end_headers(self): - """Send the blank line ending the MIME headers.""" - if self.request_version != 'HTTP/0.9': - self._headers_buffer.append(b"\r\n") - self.flush_headers() - - def flush_headers(self): - if hasattr(self, '_headers_buffer'): - self.wfile.write(b"".join(self._headers_buffer)) - self._headers_buffer = [] - - def log_request(self, code='-', size='-'): - """Log an accepted request. - - This is called by send_response(). - - """ - if isinstance(code, HTTPStatus): - code = code.value - self.log_message(f'"{self.requestline}"', code, size) - - def log_error(self, *args): - """Log an error. - - This is called when a request cannot be fulfilled. By - default it passes the message on to log_message(). - - Arguments are the same as for log_message(). - - XXX This should go to the separate error log. - - """ - self.log_message(args, error = True) - - def log_warning(self, *args): - """Log a warning""" - self.log_message(args, warning = True) - - def log_debug(self, *args, write = True): - """Log a debug message""" - self.log_message(args, debug = True, write = write) - - def log_info(self, *args): - """Default log""" - self.log_message(args) - - def _log_writer(self, message): - os.makedirs(config.log_location, exist_ok=True) - with open(config.log_location + 'log.txt','a+') as f: - f.write((f"#{self.req_hash} by [{self.address_string()}] at [{self.log_date_time_string()}]|=> {message}\n")) - - - - def log_message(self, *args, error = False, warning = False, debug = False, write = True): - """Log an arbitrary message. - - This is used by all other logging functions. Override - it if you have specific logging wishes. - - The client ip and current date/time are prefixed to - every message. - - """ - - message = ' '.join(map(str, args)) - - message = ("# %s by [%s] at [%s] %s\n" % - (self.req_hash, self.address_string(), - self.log_date_time_string(), - message)) - if error: - logger.error(message) - elif warning: - logger.warning(message) - elif debug: - logger.debug(message) - else: - logger.info(message) - - - if not config.write_log: - return - - if not hasattr(self, "Zlog_writer"): - self.Zlog_writer = Zfunc(self._log_writer) - - try: - self.Zlog_writer.update(message) - except Exception: - traceback.print_exc() - - - def version_string(self): - """Return the server software version string.""" - return self.server_version + ' ' + self.sys_version - - def date_time_string(self, timestamp=None): - """Return the current date and time formatted for a message header.""" - if timestamp is None: - timestamp = time.time() - return email.utils.formatdate(timestamp, usegmt=True) - - def log_date_time_string(self): - """Return the current time formatted for logging.""" - now = time.time() - year, month, day, hh, mm, ss, x, y, z = time.localtime(now) - s = "%02d/%3s/%04d %02d:%02d:%02d" % ( - day, self.monthname[month], year, hh, mm, ss) - return s - - weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] - - monthname = [None, - 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', - 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] - - def address_string(self): - """Return the client address.""" - - return self.client_address[0] - - # Essentially static class variables - - # The version of the HTTP protocol we support. - # Set this to HTTP/1.1 to enable automatic keepalive - protocol_version = "HTTP/1.0" - - # MessageClass used to parse headers - MessageClass = http.client.HTTPMessage - - # hack to maintain backwards compatibility - responses = { - v: (v.phrase, v.description) - for v in HTTPStatus.__members__.values() - } - - -class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): - - """Simple HTTP request handler with GET and HEAD commands. - - This serves files from the current directory and any of its - subdirectories. The MIME type for files is determined by - calling the .guess_type() method. - - The GET and HEAD requests are identical except that the HEAD - request omits the actual contents of the file. - - """ - - server_version = "SimpleHTTP/" + __version__ - - if not mimetypes.inited: - mimetypes.init() # try to read system mime.types - extensions_map = mimetypes.types_map.copy() - extensions_map.update({ - '': 'application/octet-stream', # Default - '.py': 'text/plain', - '.c': 'text/plain', - '.h': 'text/plain', - '.css': 'text/css', - - '.gz': 'application/gzip', - '.Z': 'application/octet-stream', - '.bz2': 'application/x-bzip2', - '.xz': 'application/x-xz', - - '.webp': 'image/webp', - - 'opus': 'audio/opus', - '.oga': 'audio/ogg', - '.wav': 'audio/wav', - - '.ogv': 'video/ogg', - '.ogg': 'application/ogg', - 'm4a': 'audio/mp4', - }) - - handlers = { - 'HEAD': [], - 'POST': [], - } - - def __init__(self, *args, directory=None, **kwargs): - if directory is None: - directory = os.getcwd() - self.directory = os.fspath(directory) # same as directory, but str, new in 3.6 - super().__init__(*args, **kwargs) - self.query = Callable_dict() - - - def do_GET(self): - """Serve a GET request.""" - try: - f = self.send_head() - except Exception as e: - traceback.print_exc() - self.send_error(500, str(e)) - return - - if f: - try: - self.copyfile(f, self.wfile) - except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as e: - print(tools.text_box(e.__class__.__name__, e,"\nby ", self.address_string())) - finally: - f.close() - - def do_(self): - '''incase of errored request''' - self.send_error(HTTPStatus.BAD_REQUEST, "Bad request.") - - - @staticmethod - def on_req(type='', url='', hasQ=(), QV={}, fragent='', url_regex = '', func=null): - '''called when request is received - type: GET, POST, HEAD, ... - url: url (must start with /) - hasQ: if url has query - QV: match query value - fragent: fragent of request - url_regex: url regex (must start with /) url regex, the url must start and end with this regex - - if query is tuple, it will only check existence of key - if query is dict, it will check value of key - ''' - self = __class__ - - type = type.upper() - if type == 'GET': - type = 'HEAD' - - - if type not in self.handlers: - self.handlers[type] = [] - - # FIXING TYPE ISSUE - if isinstance(hasQ, str): - hasQ = (hasQ,) - - if url=='' and url_regex=='': - url_regex = '.*' - - - to_check = (url, hasQ, QV, fragent, url_regex) - - def decorator(func): - self.handlers[type].append((to_check, func)) - return func - return decorator - - def test_req(self, url='', hasQ=(), QV={}, fragent='', url_regex=''): - '''test if request is matched' - - args: - url: url relative path (must start with /) - hasQ: if url has query - QV: match query value - fragent: fragent of request - url_regex: url regex, the url must start and end with this regex - - - ''' - # print("^"+url, hasQ, QV, fragent) - # print(self.url_path, self.query, self.fragment) - # print(self.url_path != url, self.query(*hasQ), self.query, self.fragment != fragent) - - if url_regex: - if not re.search("^"+url_regex+'$', self.url_path): return False - elif url and url!=self.url_path: return False - - if isinstance(hasQ, str): - hasQ = (hasQ,) - - if hasQ and self.query(*hasQ)==False: return False - if QV: - for k, v in QV.items(): - if not self.query(k): return False - if self.query[k] != v: return False - - if fragent and self.fragment != fragent: return False - - return True - - def do_HEAD(self): - """Serve a HEAD request.""" - try: - f = self.send_head() - except Exception as e: - traceback.print_exc() - self.send_error(500, str(e)) - return - - if f: - f.close() - - def do_POST(self): - """Serve a POST request.""" - self.range = None, None - - - path = self.translate_path(self.path) - # DIRECTORY DONT CONTAIN SLASH / AT END - - url_path, query, fragment = self.url_path, self.query, self.fragment - spathsplit = self.url_path.split("/") - - # print(f'url: {url_path}\nquery: {query}\nfragment: {fragment}') - - try: - for case, func in self.handlers['POST']: - if self.test_req(*case): - try: - f = func(self, url_path=url_path, query=query, fragment=fragment, path=path, spathsplit=spathsplit) - except PostError: - traceback.print_exc() - break # break if error is raised and send BAD_REQUEST (at end of loop) - - if f: - try: - self.copyfile(f, self.wfile) - except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as e: - print(tools.text_box(e.__class__.__name__, e,"\nby ", self.address_string())) - finally: - f.close() - return - - - - return self.send_error(HTTPStatus.BAD_REQUEST, "Invalid request.") - - except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as e: - print(tools.text_box(e.__class__.__name__, e,"\nby ", [self.address_string()])) - return - except Exception as e: - traceback.print_exc() - self.send_error(500, str(e)) - return - - - - - def return_txt(self, code, msg, content_type="text/html; charset=utf-8", write_log=False): - '''returns only the head to client - and returns a file object to be used by copyfile''' - self.log_debug(f'[RETURNED] {code} {msg} to client', write=write_log) - if not isinstance(msg, bytes): - encoded = msg.encode('utf-8', 'surrogateescape') - else: - encoded = msg - - f = io.BytesIO() - f.write(encoded) - f.seek(0) - - self.send_response(code) - self.send_header("Content-type", content_type) - self.send_header("Content-Length", str(len(encoded))) - self.end_headers() - return f - - def send_txt(self, code, msg, write_log=True): - '''sends the head and file to client''' - f = self.return_txt(code, msg, write_log=write_log) - if self.command == "HEAD": - return # to avoid sending file on get request - self.copyfile(f, self.wfile) - f.close() - - def send_json(self, obj): - """send object as json - obj: json-able object or json.dumps() string""" - if not isinstance(obj, str): - obj = json.dumps(obj, indent=1) - f = self.return_txt(200, obj, content_type="application/json") - if self.command == "HEAD": - return # to avoid sending file on get request - self.copyfile(f, self.wfile) - f.close() - - def return_file(self, path, filename=None, download=False): - f = None - is_attachment = "attachment;" if (self.query("dl") or download) else "" - - first, last = 0, None - - try: - ctype = self.guess_type(path) - - f = open(path, 'rb') - fs = os.fstat(f.fileno()) - - file_len = fs[6] - # Use browser cache if possible - if ("If-Modified-Since" in self.headers - and "If-None-Match" not in self.headers): - # compare If-Modified-Since and time of last file modification - try: - ims = email.utils.parsedate_to_datetime( - self.headers["If-Modified-Since"]) - except (TypeError, IndexError, OverflowError, ValueError): - # ignore ill-formed values - pass - else: - if ims.tzinfo is None: - # obsolete format with no timezone, cf. - # https://tools.ietf.org/html/rfc7231#section-7.1.1.1 - ims = ims.replace(tzinfo=datetime.timezone.utc) - if ims.tzinfo is datetime.timezone.utc: - # compare to UTC datetime of last modification - last_modif = datetime.datetime.fromtimestamp( - fs.st_mtime, datetime.timezone.utc) - # remove microseconds, like in If-Modified-Since - last_modif = last_modif.replace(microsecond=0) - - if last_modif <= ims: - self.send_response(HTTPStatus.NOT_MODIFIED) - self.end_headers() - f.close() - - return None - - if self.use_range: - first = self.range[0] - if first is None: - first = 0 - last = self.range[1] - if last is None or last >= file_len: - last = file_len - 1 - - if first >= file_len: # PAUSE AND RESUME SUPPORT - self.send_error(416, 'Requested Range Not Satisfiable') - return None - - self.send_response(206) - self.send_header('Content-Type', ctype) - self.send_header('Accept-Ranges', 'bytes') - - - response_length = last - first + 1 - - self.send_header('Content-Range', - 'bytes %s-%s/%s' % (first, last, file_len)) - self.send_header('Content-Length', str(response_length)) - - - - else: - self.send_response(HTTPStatus.OK) - self.send_header("Content-Type", ctype) - self.send_header("Content-Length", str(file_len)) - - self.send_header("Last-Modified", - self.date_time_string(fs.st_mtime)) - self.send_header("Content-Disposition", is_attachment+'filename="%s"' % (os.path.basename(path) if filename is None else filename)) - self.end_headers() - - return f - - except PermissionError: - self.send_error(HTTPStatus.FORBIDDEN, "Permission denied") - return None - - except OSError: - self.send_error(HTTPStatus.NOT_FOUND, "File not found") - return None - - - except Exception: - traceback.print_exc() - - # if f and not f.closed(): f.close() - raise - - - - def send_head(self): - """Common code for GET and HEAD commands. - - This sends the response code and MIME headers. - - Return value is either a file object (which has to be copied - to the outputfile by the caller unless the command was HEAD, - and must be closed by the caller under all circumstances), or - None, in which case the caller has nothing further to do. - - """ - - if 'Range' not in self.headers: - self.range = None, None - first, last = 0, 0 - - else: - try: - self.range = parse_byte_range(self.headers['Range']) - first, last = self.range - self.use_range = True - except ValueError as e: - self.send_error(400, 'Invalid byte range') - return None - - path = self.translate_path(self.path) - # DIRECTORY DONT CONTAIN SLASH / AT END - - - url_path, query, fragment = self.url_path, self.query, self.fragment - - spathsplit = self.url_path.split("/") - - - - for case, func in self.handlers['HEAD']: # GET WILL Also BE HANDLED BY HEAD - if self.test_req(*case): - return func(self, url_path=url_path, query=query, fragment=fragment, path=path, spathsplit=spathsplit) - - return self.send_error(HTTPStatus.NOT_FOUND, "File not found") - - - - - def get_displaypath(self, url_path): - """ - Helper to produce a display path for the directory listing. - """ - - try: - displaypath = urllib.parse.unquote(url_path, errors='surrogatepass') - except UnicodeDecodeError: - displaypath = urllib.parse.unquote(url_path) - displaypath = html.escape(displaypath, quote=False) - - return displaypath - + else: + exist=0 + if exist and prev_m_time == source_m_time and prev_size == source_size: + return prev_zid - def list_directory_json(self, path=None): - """Helper to produce a directory listing (JSON). - Return json file of available files and folders""" - if path == None: - path = self.translate_path(self.path) + id = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))+'_'+ str(time.time()) + id += '0'*(25-len(id)) - try: - dir_list = scansort(os.scandir(path)) - except OSError: - self.send_error( - HTTPStatus.NOT_FOUND, - "No permission to list directory") - return None - dir_dict = [] + self.assigend_zid[path] = (id, source_size, source_m_time) + return id - for file in dir_list: - name = file.name - displayname = linkname = name - if file.is_dir(): - displayname = name + "/" - linkname = name + "/" - elif file.is_symlink(): - displayname = name + "@" - dir_dict.append([urllib.parse.quote(linkname, errors='surrogatepass'), - html.escape(displayname, quote=False)]) + def archive(self, path, zid, size=None): + """ + archive the folder - encoded = json.dumps(dir_dict).encode("utf-8", 'surrogateescape') - f = io.BytesIO() - f.write(encoded) - f.seek(0) - self.send_response(HTTPStatus.OK) - self.send_header("Content-type", "application/json; charset=%s" % "utf-8") - self.send_header("Content-Length", str(len(encoded))) - self.end_headers() - return f + `path`: path to archive + `zid`: id of the folder + `size`: size of the folder (optional) + """ + def err(msg): + self.zip_in_progress.pop(zid, None) + self.assigend_zid.pop(path, None) + self.zip_id_status[zid] = "ERROR: " + msg + return False + if config.disabled_func["zip"]: + return err("ZIP FUNTION DISABLED") - def list_directory(self, path): - """Helper to produce a directory listing (absent index.html). - Return value is either a file object, or None (indicating an - error). In either case, the headers are sent, making the - interface the same as for send_head(). + # run zipfly + self.zip_in_progress[zid] = 0 - """ + source_size, fm = size if size else get_dir_size(path, return_list=True, both=True, must_read=True) - try: - dir_list = scansort(os.scandir(path)) - except OSError: - self.send_error( - HTTPStatus.NOT_FOUND, - "No permission to list directory") - return None - r = [] + if len(fm)==0: + return err("FOLDER HAS NO FILES") - displaypath = self.get_displaypath(self.url_path) + source_m_time = get_dir_m_time(path) - title = self.get_titles(displaypath) + dir_name = os.path.basename(path) - r.append(directory_explorer_header().safe_substitute(PY_PAGE_TITLE=title, - PY_PUBLIC_URL=config.address(), - PY_DIR_TREE_NO_JS=self.dir_navigator(displaypath))) - r_li= [] # type + file_link - # f : File - # d : Directory - # v : Video - # h : HTML - f_li = [] # file_names - s_li = [] # size list + zfile_name = os.path.join(self.zip_temp_dir, "{dir_name}({zid})".format(dir_name=dir_name, zid=zid) + ".zip") - r_folders = [] # no js - r_files = [] # no js + self.init_dir() - LIST_STRING = '
  • %s

  • ' + paths = [] + for i,j in fm: + paths.append({"fs": i, "n":j}) - r.append(""" -
    -
      - - """) + zfly = ZipFly(paths = paths, chunksize=0x80000) - # r.append("""🔙 {Prev folder}""") - for file in dir_list: - #fullname = os.path.join(path, name) - name = file.name - displayname = linkname = name - size=0 - # Append / for directories or @ for symbolic links - _is_dir_ = True - if file.is_dir(): - displayname = name + "/" - linkname = name + "/" - elif file.is_symlink(): - displayname = name + "@" - else: - _is_dir_ =False - size = fmbytes(file.stat().st_size) - __, ext = posixpath.splitext(name) - if ext=='.html': - r_files.append(LIST_STRING % ("link", urllib.parse.quote(linkname, - errors='surrogatepass'), - html.escape(displayname, quote=False))) - - r_li.append('h'+ urllib.parse.quote(linkname, errors='surrogatepass')) - f_li.append(html.escape(displayname, quote=False)) - - elif self.guess_type(linkname).startswith('video/'): - r_files.append(LIST_STRING % ("vid", urllib.parse.quote(linkname, - errors='surrogatepass'), - html.escape(displayname, quote=False))) - - r_li.append('v'+ urllib.parse.quote(linkname, errors='surrogatepass')) - f_li.append(html.escape(displayname, quote=False)) - - elif self.guess_type(linkname).startswith('image/'): - r_files.append(LIST_STRING % ("file", urllib.parse.quote(linkname, - errors='surrogatepass'), - html.escape(displayname, quote=False))) - - r_li.append('i'+ urllib.parse.quote(linkname, errors='surrogatepass')) - f_li.append(html.escape(displayname, quote=False)) - - else: - r_files.append(LIST_STRING % ("file", urllib.parse.quote(linkname, - errors='surrogatepass'), - html.escape(displayname, quote=False))) - - r_li.append('f'+ urllib.parse.quote(linkname, errors='surrogatepass')) - f_li.append(html.escape(displayname, quote=False)) - if _is_dir_: - r_folders.append(LIST_STRING % ("", urllib.parse.quote(linkname, - errors='surrogatepass'), - html.escape(displayname, quote=False))) - - r_li.append('d' + urllib.parse.quote(linkname, errors='surrogatepass')) - f_li.append(html.escape(displayname, quote=False)) - s_li.append(size) + archived_size = 0 + self.zip_id_status[zid] = "ARCHIVING" + try: + with open(zfile_name, "wb") as zf: + for chunk, c_size in zfly.generator(): + zf.write(chunk) + archived_size += c_size + if source_size==0: + source_size+=1 # prevent division by 0 + self.zip_in_progress[zid] = (archived_size/source_size)*100 + except Exception as e: + traceback.print_exc() + return err(e) + self.zip_in_progress.pop(zid, None) + self.assigend_zid.pop(path, None) + self.zip_id_status[zid] = "DONE" - r.extend(r_folders) - r.extend(r_files) - r.append("""
    -
    - -
    - """) - r.append(_js_script().safe_substitute(PY_LINK_LIST=str(r_li), - PY_FILE_LIST=str(f_li), - PY_FILE_SIZE =str(s_li))) + self.zip_path_ids[path] = zid, source_size, source_m_time + self.zip_ids[zid] = zfile_name + # zip_ids are never cleared in runtime due to the fact if someones downloading a zip, the folder content changed, other person asked for zip, new zip created and this id got removed, the 1st user wont be able to resume - encoded = '\n'.join(r).encode(enc, 'surrogateescape') - f = io.BytesIO() - f.write(encoded) - f.seek(0) - self.send_response(HTTPStatus.OK) - self.send_header("Content-type", "text/html; charset=%s" % enc) - self.send_header("Content-Length", str(len(encoded))) - self.end_headers() - return f + return zid - def get_titles(self, path): + def archive_thread(self, path, zid, size=None): + return threading.Thread(target=self.archive, args=(path, zid, size)) - paths = path.split('/') - if paths[-2]=='': - return 'Viewing 🏠 HOME' - else: - return 'Viewing ' + paths[-2] +zip_manager = ZIP_Manager() +#---------------------------x-------------------------------- - def get_rel_path(self, filename): - """Return the relative path to the file, FOR OS.""" - return urllib.parse.unquote(posixpath.join(self.url_path, filename), errors='surrogatepass') +if not os.path.isdir(config.log_location): + try: + os.mkdir(path=config.log_location) + except Exception: + config.log_location ="./" - def dir_navigator(self, path): - """Makes each part of the header directory accessible like links - just like file manager, but with less CSS""" - dirs = re.sub("/{2,}", "/", path).split('/') - urls = ['/'] - names = ['🏠 HOME'] - r = [] - for i in range(1, len(dirs)-1): - dir = dirs[i] - urls.append(urls[i-1] + urllib.parse.quote(dir, errors='surrogatepass' )+ '/' if not dir.endswith('/') else "") - names.append(dir) - for i in range(len(names)): - tag = "" + names[i] + "" - r.append(tag) +if not config.disabled_func["send2trash"]: + try: + from send2trash import send2trash, TrashPermissionError + except Exception: + config.disabled_func["send2trash"] = True + logger.warning("send2trash module not found, send2trash function disabled") - return ''.join(r) +if not config.disabled_func["natsort"]: + try: + import natsort + except Exception: + config.disabled_func["natsort"] = True + logger.warning("natsort module not found, natsort function disabled") +def humansorted(li): + if not config.disabled_func["natsort"]: + return natsort.humansorted(li) + return sorted(li, key=lambda x: x.lower()) - def translate_path(self, path): - """Translate a /-separated PATH to the local filename syntax. +def scansort(li): + if not config.disabled_func["natsort"]: + return natsort.humansorted(li, key=lambda x:x.name) - Components that mean special things to the local file system - (e.g. drive or directory names) are ignored. (XXX They should - probably be diagnosed.) + return sorted(li, key=lambda x: x.name.lower()) - """ - # abandon query parameters - path = path.split('?',1)[0] - path = path.split('#',1)[0] - # Don't forget explicit trailing slash when normalizing. Issue17324 - trailing_slash = path.rstrip().endswith('/') +def listsort(li): + return humansorted(li) - try: - path = urllib.parse.unquote(path, errors='surrogatepass') - except UnicodeDecodeError: - path = urllib.parse.unquote(path) - path = posixpath.normpath(path) - words = path.split('/') - words = filter(None, words) - path = self.directory - - - for word in words: - if os.path.dirname(word) or word in (os.curdir, os.pardir): - # Ignore components that are not a simple file/directory name - continue - path = os.path.join(path, word) - if trailing_slash: - path += '/' - return os.path.normpath(path) # fix OS based path issue +class Template(_Template): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) - def copyfile(self, source, outputfile): - """Copy all data between two file objects. + def __add__(self, other): + if isinstance(other, _Template): + return Template(self.template + other.template) + return Template(self.template + str(other)) - The SOURCE argument is a file object open for reading - (or anything with a read() method) and the DESTINATION - argument is a file object open for writing (or - anything with a write() method). - The only reason for overriding this would be to change - the block size or perhaps to replace newlines by CRLF - -- note however that this the default server uses this - to copy binary data as well. +def _get_template(path): + if config.dev_mode: + with open(path, encoding=enc) as f: + return Template(f.read()) - """ + return Template(config.file_list[path]) +def directory_explorer_header(): + return _get_template("html_page.html") - if not self.range: - try: - source.read(1) - except: - traceback.print_exc() - print(source) - source.seek(0) - shutil.copyfileobj(source, outputfile) +def _global_script(): + return _get_template("global_script.html") - else: - # SimpleHTTPRequestHandler uses shutil.copyfileobj, which doesn't let - # you stop the copying before the end of the file. - start, stop = self.range # set in send_head() - copy_byte_range(source, outputfile, start, stop) +def _js_script(): + return _global_script() + _get_template("html_script.html") +def _video_script(): + return _global_script() + _get_template("html_vid.html") - def guess_type(self, path): - """Guess the type of a file. +def _zip_script(): + return _global_script() + _get_template("html_zip_page.html") - Argument is a PATH (a filename). +def _admin_page(): + return _global_script() + _get_template("html_admin.html") - Return value is a string of the form type/subtype, - usable for a MIME Content-type header. - The default implementation looks the file's extension - up in the table self.extensions_map, using application/octet-stream - as a default; however it would be permissible (if - slow) to look inside the data to make a better guess. - """ +# download file from url using urllib +def fetch_url(url, file = None): + try: + with urllib.request.urlopen(url) as response: + data = response.read() # a `bytes` object + if not file: + return data - base, ext = posixpath.splitext(path) - if ext in self.extensions_map: - return self.extensions_map[ext] - ext = ext.lower() - if ext in self.extensions_map: - return self.extensions_map[ext] - guess, _ = mimetypes.guess_type(path) - if guess: - return guess + with open(file, 'wb') as f: + f.write(data) + return data + except Exception: + traceback.print_exc() + return None - return self.extensions_map[''] #return 'application/octet-stream' @@ -2340,36 +1004,32 @@ class PostError(Exception): pass -@SimpleHTTPRequestHandler.on_req('HEAD', '/favicon.ico') -def send_favicon(self: SimpleHTTPRequestHandler, *args, **kwargs): - self.send_response(301) - self.send_header('Location','https://cdn.jsdelivr.net/gh/RaSan147/py_httpserver_Ult@main/assets/favicon.ico') - self.end_headers() - return None +@SH.on_req('HEAD', '/favicon.ico') +def send_favicon(self: SH, *args, **kwargs): + self.redirect('https://cdn.jsdelivr.net/gh/RaSan147/py_httpserver_Ult@main/assets/favicon.ico') -@SimpleHTTPRequestHandler.on_req('HEAD', hasQ="reload") -def reload(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', hasQ="reload") +def reload(self: SH, *args, **kwargs): # RELOADS THE SERVER BY RE-READING THE FILE, BEST FOR TESTING REMOTELY. VULNERABLE config.reload = True - httpd.server_close() - httpd.shutdown() + reload_server() -@SimpleHTTPRequestHandler.on_req('HEAD', hasQ="admin") -def admin_page(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', hasQ="admin") +def admin_page(self: SH, *args, **kwargs): title = "ADMIN PAGE" url_path = kwargs.get('url_path', '') displaypath = self.get_displaypath(url_path) head = directory_explorer_header().safe_substitute(PY_PAGE_TITLE=title, PY_PUBLIC_URL=config.address(), - PY_DIR_TREE_NO_JS=self.dir_navigator(displaypath)) + PY_DIR_TREE_NO_JS=dir_navigator(displaypath)) tail = _admin_page().template return self.return_txt(HTTPStatus.OK, f"{head}{tail}", write_log=False) -@SimpleHTTPRequestHandler.on_req('HEAD', hasQ="update") -def update(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', hasQ="update") +def update(self: SH, *args, **kwargs): """Check for update and return the latest version""" data = fetch_url("https://raw.githubusercontent.com/RaSan147/py_httpserver_Ult/main/VERSION") if data: @@ -2379,8 +1039,8 @@ def update(self: SimpleHTTPRequestHandler, *args, **kwargs): else: return self.return_txt(HTTPStatus.INTERNAL_SERVER_ERROR, "Failed to fetch latest version") -@SimpleHTTPRequestHandler.on_req('HEAD', hasQ="update_now") -def update_now(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', hasQ="update_now") +def update_now(self: SH, *args, **kwargs): """Run update""" if config.disabled_func["update"]: return self.return_txt(HTTPStatus.OK, json.dumps({"status": 0, "message": "UPDATE FEATURE IS UNAVAILABLE !"})) @@ -2392,8 +1052,8 @@ def update_now(self: SimpleHTTPRequestHandler, *args, **kwargs): else: return self.return_txt(HTTPStatus.OK, json.dumps({"status": 0, "message": "UPDATE FAILED !"})) -@SimpleHTTPRequestHandler.on_req('HEAD', hasQ="size") -def get_size(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', hasQ="size") +def get_size(self: SH, *args, **kwargs): """Return size of the file""" url_path = kwargs.get('url_path', '') @@ -2414,8 +1074,8 @@ def get_size(self: SimpleHTTPRequestHandler, *args, **kwargs): "humanbyte": humanbyte, "fmbyte": fmbyte})) -@SimpleHTTPRequestHandler.on_req('HEAD', hasQ="size_n_count") -def get_size_n_count(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', hasQ="size_n_count") +def get_size_n_count(self: SH, *args, **kwargs): """Return size of the file""" url_path = kwargs.get('url_path', '') @@ -2438,8 +1098,8 @@ def get_size_n_count(self: SimpleHTTPRequestHandler, *args, **kwargs): "count": count})) -@SimpleHTTPRequestHandler.on_req('HEAD', hasQ="czip") -def create_zip(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', hasQ="czip") +def create_zip(self: SH, *args, **kwargs): """Create ZIP task and return ID""" path = kwargs.get('path', '') url_path = kwargs.get('url_path', '') @@ -2464,7 +1124,7 @@ def create_zip(self: SimpleHTTPRequestHandler, *args, **kwargs): head = directory_explorer_header().safe_substitute(PY_PAGE_TITLE=title, PY_PUBLIC_URL=config.address(), - PY_DIR_TREE_NO_JS=self.dir_navigator(displaypath)) + PY_DIR_TREE_NO_JS=dir_navigator(displaypath)) tail = _zip_script().safe_substitute(PY_ZIP_ID = zid, PY_ZIP_NAME = filename) @@ -2474,8 +1134,8 @@ def create_zip(self: SimpleHTTPRequestHandler, *args, **kwargs): self.log_error(traceback.format_exc()) return self.return_txt(HTTPStatus.OK, "ERROR") -@SimpleHTTPRequestHandler.on_req('HEAD', hasQ="zip") -def get_zip(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', hasQ="zip") +def get_zip(self: SH, *args, **kwargs): """Return ZIP file if available Else return progress of the task""" path = kwargs.get('path', '') @@ -2509,7 +1169,7 @@ def get_zip(self: SimpleHTTPRequestHandler, *args, **kwargs): if query("download"): path = zip_manager.zip_ids[id] - return self.return_file(path, first, last, filename) + return self.return_file(path, filename, True) if query("progress"): @@ -2523,13 +1183,13 @@ def get_zip(self: SimpleHTTPRequestHandler, *args, **kwargs): if zip_manager.zip_id_status[id].startswith("ERROR"): return self.return_txt(HTTPStatus.OK, zip_manager.zip_id_status[id]) -@SimpleHTTPRequestHandler.on_req('HEAD', hasQ="json") -def send_ls_json(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', hasQ="json") +def send_ls_json(self: SH, *args, **kwargs): """Send directory listing in JSON format""" - return self.list_directory_json() + return list_directory_json(self) -@SimpleHTTPRequestHandler.on_req('HEAD', hasQ="vid") -def send_video_page(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', hasQ="vid") +def send_video_page(self: SH, *args, **kwargs): # SEND VIDEO PLAYER path = kwargs.get('path', '') url_path = kwargs.get('url_path', '') @@ -2545,35 +1205,33 @@ def send_video_page(self: SimpleHTTPRequestHandler, *args, **kwargs): - title = self.get_titles(displaypath) + title = get_titles(displaypath, file=True) r.append(directory_explorer_header().safe_substitute(PY_PAGE_TITLE=title, PY_PUBLIC_URL=config.address(), - PY_DIR_TREE_NO_JS=self.dir_navigator(displaypath))) + PY_DIR_TREE_NO_JS= dir_navigator(displaypath))) + ctype = self.guess_type(path) + warning = "" - r.append("
    ") + if ctype not in ['video/mp4', 'video/ogg', 'video/webm']: + warning = ('

    It seems HTML player may not be able to play this Video format, Try Downloading

    ') - if self.guess_type(path) not in ['video/mp4', 'video/ogg', 'video/webm']: - r.append('

    It seems HTML player can\'t play this Video format, Try Downloading

    ') - else: - ctype = self.guess_type(path) - r.append(_video_script().safe_substitute(PY_VID_SOURCE=vid_source, - PY_CTYPE=ctype)) - - r.append(f'
    Download') + r.append(_video_script().safe_substitute(PY_VID_SOURCE=vid_source, + PY_FILE_NAME = displaypath.split("/")[-1], + PY_CTYPE=ctype, + PY_UNSUPPORT_WARNING=warning)) - r.append('\n
    \n\n\n') encoded = '\n'.join(r).encode(enc, 'surrogateescape') return self.return_txt(HTTPStatus.OK, encoded, write_log=False) -@SimpleHTTPRequestHandler.on_req('HEAD', url_regex="/@assets/.*") -def send_assets(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('HEAD', url_regex="/@assets/.*") +def send_assets(self: SH, *args, **kwargs): """Send assets""" if not config.ASSETS: self.send_error(HTTPStatus.NOT_FOUND, "Assets not available") @@ -2581,10 +1239,7 @@ def send_assets(self: SimpleHTTPRequestHandler, *args, **kwargs): path = kwargs.get('path', '') - url_path = kwargs.get('url_path', '') spathsplit = kwargs.get('spathsplit', '') - first = kwargs.get('first', '') - last = kwargs.get('last', '') path = config.ASSETS_dir + "/".join(spathsplit[2:]) # print("USING ASSETS", path) @@ -2593,19 +1248,14 @@ def send_assets(self: SimpleHTTPRequestHandler, *args, **kwargs): self.send_error(HTTPStatus.NOT_FOUND, "File not found") return None - return self.return_file(path, first, last) + return self.return_file(path) -@SimpleHTTPRequestHandler.on_req('HEAD') -def default_get(self: SimpleHTTPRequestHandler, filename=None, *args, **kwargs): +@SH.on_req('HEAD') +def default_get(self: SH, filename=None, *args, **kwargs): """Serve a GET request.""" path = kwargs.get('path', '') - url_path = kwargs.get('url_path', '') - spathsplit = kwargs.get('spathsplit', '') - first = kwargs.get('first', '') - last = kwargs.get('last', '') - if os.path.isdir(path): parts = urllib.parse.urlsplit(self.path) @@ -2625,13 +1275,14 @@ def default_get(self: SimpleHTTPRequestHandler, filename=None, *args, **kwargs): path = index break else: - return self.list_directory(path) + return list_directory(self, path) # check for trailing "/" which should return 404. See Issue17324 # The test for this was added in test_httpserver.py # However, some OS platforms accept a trailingSlash as a filename # See discussion on python-dev and Issue34711 regarding # parseing and rejection of filenames with a trailing slash + if path.endswith("/"): self.send_error(HTTPStatus.NOT_FOUND, "File not found") return None @@ -2654,144 +1305,7 @@ def default_get(self: SimpleHTTPRequestHandler, filename=None, *args, **kwargs): - -class DealPostData: - """do_login - -#get starting boundary -0: b'------WebKitFormBoundary7RGDIyjMpWhLXcZa\r\n' -1: b'Content-Disposition: form-data; name="post-type"\r\n' -2: b'\r\n' -3: b'login\r\n' -4: b'------WebKitFormBoundary7RGDIyjMpWhLXcZa\r\n' -5: b'Content-Disposition: form-data; name="username"\r\n' -6: b'\r\n' -7: b'xxx\r\n' -8: b'------WebKitFormBoundary7RGDIyjMpWhLXcZa\r\n' -9: b'Content-Disposition: form-data; name="password"\r\n' -10: b'\r\n' -11: b'ccc\r\n' -12: b'------WebKitFormBoundary7RGDIyjMpWhLXcZa--\r\n' -""" - - - boundary = b'' - num = 0 - blank = 0 # blank is used to check if the post is empty or Connection Aborted - remainbytes = 0 - - def __init__(self, req:SimpleHTTPRequestHandler) -> None: - self.req = req - - - refresh = "

    " - - - def get(self, show=F, strip=F): - """ - show: print line - strip: strip \r\n at end - """ - req = self.req - line = req.rfile.readline() - - if line == b'': - self.blank += 1 - else: - self.blank = 0 - if self.blank>=20: # allow 20 loss packets - req.send_error(408, "Request Timeout") - time.sleep(1) # wait for the client to close the connection - - raise ConnectionAbortedError - if show: - self.num+=1 - print(f"{self.num}: {line}") - self.remainbytes -= len(line) - - if strip and line.endswith(b"\r\n"): - line = line.rpartition(b"\r\n")[0] - - return line - - def pass_bound(self): - line = self.get() - if not self.boundary in line: - self.req.log_error("Content NOT begin with boundary\n", [line, self.boundary]) - - def get_name(self, line=None, ): - if not line: - line = self.get() - try: - return re.findall(r'Content-Disposition.*name="(.*?)"', line.decode())[0] - except: return None - - def match_name(self, field_name=''): - """ - field_name: name of the field (str) - """ - line = self.get() - if field_name and self.get_name(line)!=field_name: - raise PostError(f"Invalid request: Expected {field_name} but got {self.get_name(line)}") - - return line - - - def skip(self,): - self.get() - - def start(self): - '''reads upto line 0''' - req = self.req - content_type = req.headers['content-type'] - - if not content_type: - raise PostError("Content-Type header doesn't contain boundary") - self.boundary = content_type.split("=")[1].encode() - - self.remainbytes = int(req.headers['content-length']) - - - self.pass_bound()# LINE 0 - - - def get_part(self, verify_name='', verify_msg='', decode=F): - '''read a form field - ends at boundary''' - decoded = False - field_name = self.match_name(verify_name) # LINE 1 (field name) - # if not verified, raise PostError - - self.skip() # LINE 2 (blank line) - - line = b'' - while 1: - _line = self.get() # from LINE 4 till boundary (form field value) - if self.boundary in _line: # boundary - break - line += _line - - line = line.rpartition(b"\r\n")[0] # remove \r\n at end - if decode: - line = line.decode() - decoded = True - if verify_msg: - _line = line - if not decoded: - _line = line.decode() - if _line != verify_msg: - raise PostError(f"Invalid post request Expected: {[verify_msg]} Got: {[_line]}") - - # self.pass_bound() # LINE 5 (boundary) - - return field_name, line - - - - - - -def AUTHORIZE_POST(req: SimpleHTTPRequestHandler, post:DealPostData, post_type=''): +def AUTHORIZE_POST(req: SH, post:DPD, post_type=''): """Check if the user is authorized to post""" # START @@ -2822,14 +1336,14 @@ def AUTHORIZE_POST(req: SimpleHTTPRequestHandler, post:DealPostData, post_type=' -@SimpleHTTPRequestHandler.on_req('POST', hasQ="upload") -def upload(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('POST', hasQ="upload") +def upload(self: SH, *args, **kwargs): """GET Uploaded files""" path = kwargs.get('path') url_path = kwargs.get('url_path') - post = DealPostData(self) + post = DPD(self) # AUTHORIZE uid = AUTHORIZE_POST(self, post, 'upload') @@ -2844,11 +1358,7 @@ def upload(self: SimpleHTTPRequestHandler, *args, **kwargs): # PASSWORD SYSTEM password = post.get_part(verify_name='password', decode=T)[1] - # if post.get_type()!="password": - # return self.send_txt(HTTPStatus.BAD_REQUEST, "Invalid request") - # post.skip() - # password= post.get(F) self.log_debug(f'post password: {[password]} by client') if password != config.PASSWORD: # readline returns password with \r\n at end self.log_info(f"Incorrect password by {uid}") @@ -2924,14 +1434,14 @@ def upload(self: SimpleHTTPRequestHandler, *args, **kwargs): -@SimpleHTTPRequestHandler.on_req('POST', hasQ="del-f") -def del_2_recycle(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('POST', hasQ="del-f") +def del_2_recycle(self: SH, *args, **kwargs): """Move 2 recycle bin""" path = kwargs.get('path') url_path = kwargs.get('url_path') - post = DealPostData(self) + post = DPD(self) # AUTHORIZE uid = AUTHORIZE_POST(self, post, 'del-f') @@ -2966,14 +1476,14 @@ def del_2_recycle(self: SimpleHTTPRequestHandler, *args, **kwargs): -@SimpleHTTPRequestHandler.on_req('POST', hasQ="del-p") -def del_permanently(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('POST', hasQ="del-p") +def del_permanently(self: SH, *args, **kwargs): """DELETE files permanently""" path = kwargs.get('path') url_path = kwargs.get('url_path') - post = DealPostData(self) + post = DPD(self) # AUTHORIZE uid = AUTHORIZE_POST(self, post, 'del-p') @@ -3004,14 +1514,14 @@ def del_permanently(self: SimpleHTTPRequestHandler, *args, **kwargs): -@SimpleHTTPRequestHandler.on_req('POST', hasQ="rename") -def rename_content(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('POST', hasQ="rename") +def rename_content(self: SH, *args, **kwargs): """Rename files""" path = kwargs.get('path') url_path = kwargs.get('url_path') - post = DealPostData(self) + post = DPD(self) # AUTHORIZE uid = AUTHORIZE_POST(self, post, 'rename') @@ -3044,8 +1554,8 @@ def rename_content(self: SimpleHTTPRequestHandler, *args, **kwargs): -@SimpleHTTPRequestHandler.on_req('POST', hasQ="info") -def get_info(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('POST', hasQ="info") +def get_info(self: SH, *args, **kwargs): """Get files permanently""" path = kwargs.get('path') url_path = kwargs.get('url_path') @@ -3053,7 +1563,7 @@ def get_info(self: SimpleHTTPRequestHandler, *args, **kwargs): script = None - post = DealPostData(self) + post = DPD(self) # AUTHORIZE uid = AUTHORIZE_POST(self, post, 'info') @@ -3156,13 +1666,13 @@ def get_dt(time): -@SimpleHTTPRequestHandler.on_req('POST', hasQ="new_folder") -def new_folder(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('POST', hasQ="new_folder") +def new_folder(self: SH, *args, **kwargs): """Create new folder""" path = kwargs.get('path') url_path = kwargs.get('url_path') - post = DealPostData(self) + post = DPD(self) # AUTHORIZE uid = AUTHORIZE_POST(self, post, 'new_folder') @@ -3196,8 +1706,8 @@ def new_folder(self: SimpleHTTPRequestHandler, *args, **kwargs): -@SimpleHTTPRequestHandler.on_req('POST') -def default_post(self: SimpleHTTPRequestHandler, *args, **kwargs): +@SH.on_req('POST') +def default_post(self: SH, *args, **kwargs): raise PostError("Bad Request") @@ -3232,104 +1742,6 @@ def default_post(self: SimpleHTTPRequestHandler, *args, **kwargs): - - - - - - -def _get_best_family(*address): - infos = socket.getaddrinfo( - *address, - type=socket.SOCK_STREAM, - flags=socket.AI_PASSIVE - ) - family, type, proto, canonname, sockaddr = next(iter(infos)) - return family, sockaddr - -def get_ip(): - IP = '127.0.0.1' - s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.settimeout(0) - try: - # doesn't even have to be reachable - s.connect(('10.255.255.255', 1)) - IP = s.getsockname()[0] - except: - try: - if config.OS=="Android": - IP = s.connect(("192.168.43.1", 1)) - IP = s.getsockname()[0] - # Assigning this variable because Android does't return actual IP when hosting a hotspot - except (socket.herror, OSError): - pass - finally: - s.close() - return IP - - -def test(HandlerClass=BaseHTTPRequestHandler, - ServerClass=ThreadingHTTPServer, - protocol="HTTP/1.0", port=8000, bind=None): - """Test the HTTP request handler class. - - This runs an HTTP server on port 8000 (or the port argument). - - """ - - global httpd - if sys.version_info>(3,7,2): # BACKWARD COMPATIBILITY - ServerClass.address_family, addr = _get_best_family(bind, port) - else: - addr =(bind if bind!=None else '', port) - - HandlerClass.protocol_version = protocol - httpd = ServerClass(addr, HandlerClass) - host, port = httpd.socket.getsockname()[:2] - url_host = f'[{host}]' if ':' in host else host - hostname = socket.gethostname() - local_ip = config.IP if config.IP else get_ip() - config.IP= local_ip - - - on_network = local_ip!="127.0.0.1" - - print(tools.text_box( - f"Serving HTTP on {host} port {port} \n", #TODO: need to check since the output is "Serving HTTP on :: port 6969" - f"(http://{url_host}:{port}/) ...\n", #TODO: need to check since the output is "(http://[::]:6969/) ..." - f"Server is probably running on\n", - (f"[over NETWORK] {config.address()}\n" if on_network else ""), - f"[on DEVICE] http://localhost:{config.port} & http://127.0.0.1:{config.port}" - , style="star", sep="" - ) - ) - try: - httpd.serve_forever() - except KeyboardInterrupt: - print("\nKeyboard interrupt received, exiting.") - - except OSError: - print("\nOSError received, exiting.") - finally: - if not config.reload: - sys.exit(0) - - -class DualStackServer(ThreadingHTTPServer): # UNSUPPORTED IN PYTHON 3.7 - - def handle_error(self, request, client_address): - pass - - def server_bind(self): - # suppress exception when protocol is IPv4 - with contextlib.suppress(Exception): - self.socket.setsockopt( - socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0) - return super().server_bind() - - def finish_request(self, request, client_address): - self.RequestHandlerClass(request, client_address, self, - directory=config.ftp_dir) @@ -3345,6 +1757,7 @@ def finish_request(self, request, client_address): ${PY_PAGE_TITLE} + @@ -3584,13 +1997,16 @@ def finish_request(self, request, client_address): .file-name, .link_name { display: inline-block; word-wrap: break-all; + overflow-wrap: anywhere; width: 70%; } .link_name { width: calc(100% - 57px); + } + .file-remove { padding: 5px 7px; margin: 0 5px; @@ -3881,9 +2297,7 @@ def finish_request(self, request, client_address): - - @@ -4030,7 +2444,12 @@ class Tools { } download(dataurl, filename = null, new_tab=false) { const link = createElement("a"); - link.href = dataurl+"?dl"; + var Q = "?dl" + // if ? in URL as Query, then use & to add dl + if(dataurl.indexOf("?") > -1){ + Q = "&dl" + } + link.href = dataurl+Q; link.download = filename; if(new_tab){ link.target = "_blank"; @@ -4265,7 +2684,8 @@ class Toaster { -""" + +""" ##################################################### @@ -4892,6 +3312,10 @@ class ContextMenu { +

    Watching: ${PY_FILE_NAME}

    + +

    ${PY_UNSUPPORT_WARNING}

    +