18
18
from ops import celery_app
19
19
20
20
21
- def get_llm_model (model_id ):
21
+ def get_llm_model (model_id , model_params_setting = None ):
22
22
model = QuerySet (Model ).filter (id = model_id ).first ()
23
- return ModelManage .get_model (model_id , lambda _id : get_model (model ))
23
+ return ModelManage .get_model (model_id , lambda _id : get_model (model , ** ( model_params_setting or {}) ))
24
24
25
25
26
26
def generate_problem_by_paragraph (paragraph , llm_model , prompt ):
@@ -64,18 +64,18 @@ def is_the_task_interrupted():
64
64
65
65
@celery_app .task (base = QueueOnce , once = {'keys' : ['knowledge_id' ]},
66
66
name = 'celery:generate_related_by_knowledge' )
67
- def generate_related_by_knowledge_id (knowledge_id , model_id , prompt , state_list = None ):
67
+ def generate_related_by_knowledge_id (knowledge_id , model_id , model_params_setting , prompt , state_list = None ):
68
68
document_list = QuerySet (Document ).filter (knowledge_id = knowledge_id )
69
69
for document in document_list :
70
70
try :
71
- generate_related_by_document_id .delay (document .id , model_id , prompt , state_list )
71
+ generate_related_by_document_id .delay (document .id , model_id , model_params_setting , prompt , state_list )
72
72
except Exception as e :
73
73
pass
74
74
75
75
76
76
@celery_app .task (base = QueueOnce , once = {'keys' : ['document_id' ]},
77
77
name = 'celery:generate_related_by_document' )
78
- def generate_related_by_document_id (document_id , model_id , prompt , state_list = None ):
78
+ def generate_related_by_document_id (document_id , model_id , model_params_setting , prompt , state_list = None ):
79
79
if state_list is None :
80
80
state_list = [State .PENDING .value , State .STARTED .value , State .SUCCESS .value , State .FAILURE .value ,
81
81
State .REVOKE .value ,
@@ -87,7 +87,7 @@ def generate_related_by_document_id(document_id, model_id, prompt, state_list=No
87
87
ListenerManagement .update_status (QuerySet (Document ).filter (id = document_id ),
88
88
TaskType .GENERATE_PROBLEM ,
89
89
State .STARTED )
90
- llm_model = get_llm_model (model_id )
90
+ llm_model = get_llm_model (model_id , model_params_setting )
91
91
92
92
# 生成问题函数
93
93
generate_problem = get_generate_problem (llm_model , prompt ,
@@ -110,7 +110,7 @@ def generate_related_by_document_id(document_id, model_id, prompt, state_list=No
110
110
111
111
@celery_app .task (base = QueueOnce , once = {'keys' : ['paragraph_id_list' ]},
112
112
name = 'celery:generate_related_by_paragraph_list' )
113
- def generate_related_by_paragraph_id_list (document_id , paragraph_id_list , model_id , prompt ):
113
+ def generate_related_by_paragraph_id_list (document_id , paragraph_id_list , model_id , model_params_setting , prompt ):
114
114
try :
115
115
is_the_task_interrupted = get_is_the_task_interrupted (document_id )
116
116
if is_the_task_interrupted ():
@@ -121,7 +121,7 @@ def generate_related_by_paragraph_id_list(document_id, paragraph_id_list, model_
121
121
ListenerManagement .update_status (QuerySet (Document ).filter (id = document_id ),
122
122
TaskType .GENERATE_PROBLEM ,
123
123
State .STARTED )
124
- llm_model = get_llm_model (model_id )
124
+ llm_model = get_llm_model (model_id , model_params_setting )
125
125
# 生成问题函数
126
126
generate_problem = get_generate_problem (llm_model , prompt , ListenerManagement .get_aggregation_document_status (
127
127
document_id ))
0 commit comments