Skip to content

Commit

Permalink
gpt_academic text mask imp
Browse files Browse the repository at this point in the history
  • Loading branch information
binary-husky committed Jan 20, 2024
1 parent f2e73aa commit 142b516
Show file tree
Hide file tree
Showing 5 changed files with 191 additions and 84 deletions.
5 changes: 3 additions & 2 deletions core_functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# 'stop' 颜色对应 theme.py 中的 color_er
import importlib
from toolbox import clear_line_break
from toolbox import build_gpt_academic_masked_string
from textwrap import dedent

def get_core_functions():
Expand Down Expand Up @@ -32,12 +33,12 @@ def get_core_functions():
"Prefix": r"",
# 后缀,会被加在你的输入之后。例如,配合前缀可以把你的输入内容用引号圈起来
"Suffix":
dedent("\n"+r'''
dedent("\n"+f'''
==============================
使用mermaid flowchart对以上文本进行总结,概括上述段落的内容以及内在逻辑关系,例如:
以下是对以上文本的总结,以mermaid flowchart的形式展示:
```mermaid
```{build_gpt_academic_masked_string(text_show_llm="mermaid", text_show_render="")}
flowchart LR
A["节点名1"] --> B("节点名2")
B --> C{"节点名3"}
Expand Down
4 changes: 3 additions & 1 deletion request_llms/bridge_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import tiktoken, copy
from functools import lru_cache
from concurrent.futures import ThreadPoolExecutor
from toolbox import get_conf, trimmed_format_exc
from toolbox import get_conf, trimmed_format_exc, apply_gpt_academic_string_mask

from .bridge_chatgpt import predict_no_ui_long_connection as chatgpt_noui
from .bridge_chatgpt import predict as chatgpt_ui
Expand Down Expand Up @@ -668,6 +668,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history, sys_prompt, obser
"""
import threading, time, copy

inputs = apply_gpt_academic_string_mask(inputs, mode="show_llm")
model = llm_kwargs['llm_model']
n_model = 1
if '&' not in model:
Expand Down Expand Up @@ -741,6 +742,7 @@ def predict(inputs, llm_kwargs, *args, **kwargs):
additional_fn代表点击的哪个按钮,按钮见functional.py
"""

inputs = apply_gpt_academic_string_mask(inputs, mode="show_llm")
method = model_info[llm_kwargs['llm_model']]["fn_with_ui"] # 如果这里报错,检查config中的AVAIL_LLM_MODELS选项
yield from method(inputs, llm_kwargs, *args, **kwargs)

0 comments on commit 142b516

Please sign in to comment.