Skip to content

Commit

Permalink
update the connect protocol and the submodules
Browse files Browse the repository at this point in the history
  • Loading branch information
wwqgtxx committed Jan 1, 2018
1 parent 03113b7 commit c2ae65a
Show file tree
Hide file tree
Showing 9 changed files with 61 additions and 55 deletions.
1 change: 1 addition & 0 deletions .idea/dictionaries/Administrator.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

15 changes: 14 additions & 1 deletion wwqLyParse/lib/lib_wwqLyParse.py
Expand Up @@ -24,8 +24,11 @@ def init_lib():

init_lib()

get_uuid = lib_wwqLyParse.get_uuid
get_name = lib_wwqLyParse.get_name

def lib_parse(byte_str: bytes):

def _lib_parse(byte_str: bytes):
length = len(byte_str)
result_length = ctypes.c_int()
result_p = ctypes.POINTER(ctypes.c_char)()
Expand All @@ -36,3 +39,13 @@ def lib_parse(byte_str: bytes):
result = b''.join(result_arr)
lib_wwqLyParse.free_str(result_p)
return result


if POOL_TYPE == "geventpool":
_pool = ThreadPool()


def lib_parse(byte_str: bytes):
return _pool.apply(_lib_parse, args=(byte_str,))
else:
lib_parse = _lib_parse
31 changes: 15 additions & 16 deletions wwqLyParse/main.py
Expand Up @@ -132,7 +132,7 @@ def init_version():
ver = f.readline().strip()
version['version'] = ver

version['name'] = lib_wwqLyParse.get_name().decode() + version['version'] + "[Include "
version['name'] = get_name().decode() + version['version'] + "[Include "
try:
version['name'] = version['name'] + parser_class_map["YouGetParser"]().get_version() + "&"
except:
Expand Down Expand Up @@ -306,31 +306,29 @@ def exit():
# abort(404)

def _handle(data):
req_type = data["type"]
req_data = data["data"]
req_data = lib_parse(req_data)
req_data = lib_parse(data)
req_data = req_data.decode()
logging.debug("input json:" + req_data)
data = json.loads(req_data)
req_url = data["url"]
req_data = data["data"]
try:
result = ""
if req_type == "close":
if req_url == "close":
close()
elif req_type == "GetVersion":
elif req_url == "GetVersion":
result = get_version()
elif req_type == "Parse":
elif req_url == "Parse":
if req_data is not None:
logging.debug("input json:" + req_data)
j_json = req_data
j_json = json.loads(j_json)
logging.debug("load json:" + str(j_json))
result = parse(j_json["input_text"], j_json["types"], j_json["parsers_name"],
j_json["urlhandles_name"])
else:
raise Exception("can't get input json")
elif req_type == "ParseURL":
elif req_url == "ParseURL":
if req_data is not None:
logging.debug("input json:" + req_data)
j_json = req_data
j_json = json.loads(j_json)
logging.debug("load json:" + str(j_json))
result = parse_url(j_json["input_text"], j_json["label"], j_json["min"], j_json["max"],
j_json["urlhandles_name"])
Expand All @@ -340,6 +338,7 @@ def _handle(data):
info = traceback.format_exc()
logging.error(info)
result = {"type": "error", "error": info}
result = {"type": "result", "url": req_url, "data": result}
debug(result)
j_json = json.dumps(result)
byte_str = j_json.encode("utf-8")
Expand All @@ -352,12 +351,12 @@ def handle(conn: multiprocessing_connection.Connection):
with conn:
logging.debug("parse conn %s" % conn)
while not conn.closed:
data = conn.recv()
data = conn.recv_bytes()
if not data:
break
logging.debug(data)
# logging.debug(data)
result = _handle(data)
conn.send(result)
conn.send_bytes(result)
except EOFError:
pass
except BrokenPipeError:
Expand All @@ -366,7 +365,7 @@ def handle(conn: multiprocessing_connection.Connection):

def _run(address):
with WorkerPool(thread_name_prefix="HandlePool") as handle_pool:
with multiprocessing_connection.Listener(address, authkey=lib_wwqLyParse.get_uuid()) as listener:
with multiprocessing_connection.Listener(address, authkey=get_uuid()) as listener:
while True:
try:
conn = listener.accept()
Expand Down
63 changes: 28 additions & 35 deletions wwqLyParse/run.py
Expand Up @@ -57,7 +57,7 @@ def get_real_path(abstract_path):
ver = f.readline().strip()
CONFIG['version'] = ver

address = r'\\.\pipe\%s@%s' % (CONFIG["pipe"],CONFIG["version"])
address = r'\\.\pipe\%s@%s' % (CONFIG["pipe"], CONFIG["version"])


def get_caller_info():
Expand Down Expand Up @@ -130,9 +130,12 @@ def init_lib():
lib_wwqLyParse.get_uuid.restype = ctypes.c_char_p
lib_wwqLyParse.get_name.restype = ctypes.c_char_p
assert lib_wwqLyParse.get_uuid().decode() == CONFIG["uuid"]
logging.debug("successful load lib_wwqLyParse %s" % lib_wwqLyParse)


init_lib()
get_uuid = lib_wwqLyParse.get_uuid
get_name = lib_wwqLyParse.get_name


def lib_parse(byte_str: bytes):
Expand Down Expand Up @@ -194,7 +197,7 @@ def is_open(addr):
# if _winapi and getattr(_winapi, "WaitNamedPipe", None):
# _winapi.WaitNamedPipe(addr, 1000)
# else:
with multiprocessing.connection.Client(addr, authkey=lib_wwqLyParse.get_uuid()) as conn:
with multiprocessing.connection.Client(addr, authkey=get_uuid()) as conn:
pass
logging.info(get_caller_info() + "'%s' is open" % addr)
return True
Expand Down Expand Up @@ -247,24 +250,23 @@ def init():
raise Exception("can't init server")


def process(url, data, will_refused=False, need_result=True, need_json=True, need_parse=True)->dict:
logging.info(data)
data = data.encode("utf-8")
data = lib_parse(data)
def process(url, data, will_refused=False, need_result=True) -> dict:
req = {"type": "get", "url": url, "data": data}
logging.debug(req)
req = json.dumps(req)
req = req.encode("utf-8")
req = lib_parse(req)
try:
with multiprocessing.connection.Client(address, authkey=lib_wwqLyParse.get_uuid()) as conn:
req = {"type": url, "data": data}
logging.debug(req)
conn.send(req)
with multiprocessing.connection.Client(address, authkey=get_uuid()) as conn:
conn.send_bytes(req)
if will_refused:
return {}
if need_result:
results = conn.recv()
if need_parse:
results = lib_parse(results)
results = conn.recv_bytes()
results = lib_parse(results)
results = results.decode('utf-8')
if need_json:
results = json.loads(results)
results = json.loads(results)
results = results["data"]
return results
else:
return {}
Expand All @@ -286,8 +288,7 @@ def close_server():
if is_open(address):
url = 'close'
values = {}
jjson = json.dumps(values)
process(url, jjson, will_refused=True)
process(url, values, will_refused=True)
for n in range(100):
if not is_open(address):
return
Expand All @@ -310,18 +311,18 @@ def get_version():
init()
url = 'GetVersion'
values = {}
jjson = json.dumps(values)
results = process(url, jjson)
results = process(url, values)
assert results["uuid"] == CONFIG["uuid"]
assert lib_wwqLyParse.get_name().decode() in results["name"]
assert get_name().decode() in results["name"]
version = results
logging.info(version)
return version
except AssertionError:
raise
except:
logging.exception("getVersion fail on '%s'" % address)
if need_close:
close_server()
CONFIG["port"] += 1


def Cleanup():
Expand All @@ -339,21 +340,17 @@ def GetVersion(debug=False):


def Parse(input_text, types=None, parsers_name=None, urlhandles_name=None):
if not version:
get_version()
error = None
for n in range(3):
try:
init()
url = 'Parse'
# user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
values = {"input_text": input_text,
"types": types,
"parsers_name": parsers_name,
"urlhandles_name": urlhandles_name
}
jjson = json.dumps(values)
results = process(url, jjson)
results = process(url, values)
return results
except Exception as e:
# logging.info(e)
Expand All @@ -364,22 +361,18 @@ def Parse(input_text, types=None, parsers_name=None, urlhandles_name=None):


def ParseURL(input_text, label, min=None, max=None, urlhandles_name=None):
if not version:
get_version()
error = None
for n in range(3):
try:
init()
url = 'ParseURL'
# user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
values = {"input_text": input_text,
"label": label,
"min": min,
"max": max,
"urlhandles_name": urlhandles_name
}
jjson = json.dumps(values)
results = process(url, jjson)
results = process(url, values)
return results
except Exception as e:
# logging.info(e)
Expand All @@ -405,7 +398,7 @@ def main():
# Cleanup()
# debug(Parse('http://www.iqiyi.com/lib/m_209445514.html?src=search'))
# debug(Parse('http://www.iqiyi.com/a_19rrhacdwt.html#vfrm=2-4-0-1'))
debug(Parse('http://www.iqiyi.com/v_19rrl8pmn8.html'))
# debug(Parse('http://www.iqiyi.com/v_19rrl8pmn8.html'))
# debug(Parse('http://www.iqiyi.com/a_19rrhaare5.html'))
# debug(Parse('http://www.iqiyi.com/a_19rrhbhf6d.html#vfrm=2-3-0-1'))
# debug(Parse('http://www.le.com'))
Expand All @@ -431,7 +424,7 @@ def main():
# debug(Parse('http://www.bilibili.com/video/av2557971/')) #don't support
# debug(Parse('http://v.baidu.com/link?url=dm_10tBNoD-LLAMb79CB_p0kxozuoJcW0SiN3eycdo6CdO3GZgQm26uOzZh9fqcNSWZmz9aU9YYCCfT0NmZoGfEMoznyHhz3st-QvlOeyArYdIbhzBbdIrmntA4h1HsSampAs4Z3c17r_exztVgUuHZqChPeZZQ4tlmM5&page=tvplaydetail&vfm=bdvtx&frp=v.baidu.com%2Ftv_intro%2F&bl=jp_video',"formats"))
# debug(Parse('https://www.mgtv.com/b/318221/4222532.html',parsers_name=["MgTVParser"]))
debug(ParseURL('https://www.mgtv.com/b/318221/4222532.html', "3@MgTVParser"))
# debug(ParseURL('https://www.mgtv.com/b/318221/4222532.html', "3@MgTVParser"))
# debug(Parse('http://v.youku.com/v_show/id_XMTYxODUxOTEyNA==.html?f=27502474'))
# debug(Parse('http://v.qq.com/cover/y/yxpn9yol52go2i6.html?vid=f0141icyptp'))
# debug(ParseURL('http://v.qq.com/cover/y/yxpn9yol52go2i6.html?vid=f0141icyptp','4_1080p____-1x-1_2521.9kbps_09:35.240_1_mp4_@LypPvParser'))
Expand All @@ -443,5 +436,5 @@ def main():
try:
main()
finally:
pass
# Cleanup()
# pass
Cleanup()
2 changes: 1 addition & 1 deletion wwqLyParse/version.txt
@@ -1 +1 @@
1.2.3
1.2.4
Binary file modified wwqLyParse/wwqLyParse32.dll
Binary file not shown.
Binary file modified wwqLyParse/wwqLyParse64.dll
Binary file not shown.
2 changes: 1 addition & 1 deletion wwqLyParse/ykdl
Submodule ykdl updated 2 files
+11 −0 CHANGELOG.rst
+1 −1 ykdl/version.py
2 changes: 1 addition & 1 deletion wwqLyParse/you-get

0 comments on commit c2ae65a

Please sign in to comment.