Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
…emic into master
  • Loading branch information
binary-sky committed Jun 30, 2023
2 parents ecb08e6 + 28c1e3f commit bf805cf
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 4 deletions.
11 changes: 10 additions & 1 deletion request_llm/bridge_all.py
Expand Up @@ -152,7 +152,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
},

# chatglm
# chatglm 直接对齐到 chatglm2
"chatglm": {
"fn_with_ui": chatglm_ui,
"fn_without_ui": chatglm_noui,
Expand All @@ -161,6 +161,15 @@ def decode(self, *args, **kwargs):
"tokenizer": tokenizer_gpt35,
"token_cnt": get_token_num_gpt35,
},
"chatglm2": {
"fn_with_ui": chatglm_ui,
"fn_without_ui": chatglm_noui,
"endpoint": None,
"max_token": 1024,
"tokenizer": tokenizer_gpt35,
"token_cnt": get_token_num_gpt35,
},

# newbing
"newbing": {
"fn_with_ui": newbing_ui,
Expand Down
6 changes: 3 additions & 3 deletions request_llm/bridge_chatglm.py
Expand Up @@ -40,12 +40,12 @@ def run(self):
while True:
try:
if self.chatglm_model is None:
self.chatglm_tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
self.chatglm_tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm2-6b", trust_remote_code=True)
device, = get_conf('LOCAL_MODEL_DEVICE')
if device=='cpu':
self.chatglm_model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).float()
self.chatglm_model = AutoModel.from_pretrained("THUDM/chatglm2-6b", trust_remote_code=True).float()
else:
self.chatglm_model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).half().cuda()
self.chatglm_model = AutoModel.from_pretrained("THUDM/chatglm2-6b", trust_remote_code=True).half().cuda()
self.chatglm_model = self.chatglm_model.eval()
break
else:
Expand Down

0 comments on commit bf805cf

Please sign in to comment.