Skip to content

Commit 121614f

Browse files
committed
chore: add model_params_setting to document, knowledge, and paragraph processing
--story=1018694 --user=刘瑞斌 【菲尼克斯】知识库生成问题选择模型,希望可以设置模型参数 https://www.tapd.cn/62980211/s/1768601
1 parent 5ed5d27 commit 121614f

File tree

6 files changed

+49
-13
lines changed

6 files changed

+49
-13
lines changed

apps/knowledge/serializers/document.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1305,6 +1305,7 @@ def batch_generate_related(self, instance: Dict, with_valid=True):
13051305
document_id_list = instance.get("document_id_list")
13061306
model_id = instance.get("model_id")
13071307
prompt = instance.get("prompt")
1308+
model_params_setting = instance.get("model_params_setting")
13081309
state_list = instance.get('state_list')
13091310
ListenerManagement.update_status(
13101311
QuerySet(Document).filter(id__in=document_id_list),
@@ -1327,7 +1328,7 @@ def batch_generate_related(self, instance: Dict, with_valid=True):
13271328
QuerySet(Document).filter(id__in=document_id_list))()
13281329
try:
13291330
for document_id in document_id_list:
1330-
generate_related_by_document_id.delay(document_id, model_id, prompt, state_list)
1331+
generate_related_by_document_id.delay(document_id, model_id, model_params_setting, prompt, state_list)
13311332
except AlreadyQueued as e:
13321333
pass
13331334

apps/knowledge/serializers/knowledge.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -267,6 +267,7 @@ def generate_related(self, instance: Dict, with_valid=True):
267267
knowledge_id = self.data.get('knowledge_id')
268268
model_id = instance.get("model_id")
269269
prompt = instance.get("prompt")
270+
model_params_setting = instance.get("model_params_setting")
270271
state_list = instance.get('state_list')
271272
ListenerManagement.update_status(
272273
QuerySet(Document).filter(knowledge_id=knowledge_id),
@@ -285,7 +286,7 @@ def generate_related(self, instance: Dict, with_valid=True):
285286
)
286287
ListenerManagement.get_aggregation_document_status_by_knowledge_id(knowledge_id)()
287288
try:
288-
generate_related_by_knowledge_id.delay(knowledge_id, model_id, prompt, state_list)
289+
generate_related_by_knowledge_id.delay(knowledge_id, model_id, model_params_setting, prompt, state_list)
289290
except AlreadyQueued as e:
290291
raise AppApiException(500, _('Failed to send the vectorization task, please try again later!'))
291292

apps/knowledge/serializers/paragraph.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -480,6 +480,7 @@ def batch_generate_related(self, instance: Dict, with_valid=True):
480480
paragraph_id_list = instance.get("paragraph_id_list")
481481
model_id = instance.get("model_id")
482482
prompt = instance.get("prompt")
483+
model_params_setting = instance.get("model_params_setting")
483484
document_id = self.data.get('document_id')
484485
ListenerManagement.update_status(
485486
QuerySet(Document).filter(id=document_id),
@@ -493,7 +494,7 @@ def batch_generate_related(self, instance: Dict, with_valid=True):
493494
)
494495
ListenerManagement.get_aggregation_document_status(document_id)()
495496
try:
496-
generate_related_by_paragraph_id_list.delay(document_id, paragraph_id_list, model_id, prompt)
497+
generate_related_by_paragraph_id_list.delay(document_id, paragraph_id_list, model_id, model_params_setting, prompt)
497498
except AlreadyQueued as e:
498499
raise AppApiException(500, _('The task is being executed, please do not send it again.'))
499500

apps/knowledge/task/generate.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818
from ops import celery_app
1919

2020

21-
def get_llm_model(model_id):
21+
def get_llm_model(model_id, model_params_setting=None):
2222
model = QuerySet(Model).filter(id=model_id).first()
23-
return ModelManage.get_model(model_id, lambda _id: get_model(model))
23+
return ModelManage.get_model(model_id, lambda _id: get_model(model, **(model_params_setting or {})))
2424

2525

2626
def generate_problem_by_paragraph(paragraph, llm_model, prompt):
@@ -64,18 +64,18 @@ def is_the_task_interrupted():
6464

6565
@celery_app.task(base=QueueOnce, once={'keys': ['knowledge_id']},
6666
name='celery:generate_related_by_knowledge')
67-
def generate_related_by_knowledge_id(knowledge_id, model_id, prompt, state_list=None):
67+
def generate_related_by_knowledge_id(knowledge_id, model_id, model_params_setting, prompt, state_list=None):
6868
document_list = QuerySet(Document).filter(knowledge_id=knowledge_id)
6969
for document in document_list:
7070
try:
71-
generate_related_by_document_id.delay(document.id, model_id, prompt, state_list)
71+
generate_related_by_document_id.delay(document.id, model_id, model_params_setting, prompt, state_list)
7272
except Exception as e:
7373
pass
7474

7575

7676
@celery_app.task(base=QueueOnce, once={'keys': ['document_id']},
7777
name='celery:generate_related_by_document')
78-
def generate_related_by_document_id(document_id, model_id, prompt, state_list=None):
78+
def generate_related_by_document_id(document_id, model_id, model_params_setting, prompt, state_list=None):
7979
if state_list is None:
8080
state_list = [State.PENDING.value, State.STARTED.value, State.SUCCESS.value, State.FAILURE.value,
8181
State.REVOKE.value,
@@ -87,7 +87,7 @@ def generate_related_by_document_id(document_id, model_id, prompt, state_list=No
8787
ListenerManagement.update_status(QuerySet(Document).filter(id=document_id),
8888
TaskType.GENERATE_PROBLEM,
8989
State.STARTED)
90-
llm_model = get_llm_model(model_id)
90+
llm_model = get_llm_model(model_id, model_params_setting)
9191

9292
# 生成问题函数
9393
generate_problem = get_generate_problem(llm_model, prompt,
@@ -110,7 +110,7 @@ def generate_related_by_document_id(document_id, model_id, prompt, state_list=No
110110

111111
@celery_app.task(base=QueueOnce, once={'keys': ['paragraph_id_list']},
112112
name='celery:generate_related_by_paragraph_list')
113-
def generate_related_by_paragraph_id_list(document_id, paragraph_id_list, model_id, prompt):
113+
def generate_related_by_paragraph_id_list(document_id, paragraph_id_list, model_id, model_params_setting, prompt):
114114
try:
115115
is_the_task_interrupted = get_is_the_task_interrupted(document_id)
116116
if is_the_task_interrupted():
@@ -121,7 +121,7 @@ def generate_related_by_paragraph_id_list(document_id, paragraph_id_list, model_
121121
ListenerManagement.update_status(QuerySet(Document).filter(id=document_id),
122122
TaskType.GENERATE_PROBLEM,
123123
State.STARTED)
124-
llm_model = get_llm_model(model_id)
124+
llm_model = get_llm_model(model_id, model_params_setting)
125125
# 生成问题函数
126126
generate_problem = get_generate_problem(llm_model, prompt, ListenerManagement.get_aggregation_document_status(
127127
document_id))

ui/src/components/generate-related-dialog/index.vue

Lines changed: 33 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,21 @@
2828
<p>{{ $t('views.document.generateQuestion.tip4') }}</p>
2929
</div>
3030
</div>
31-
<el-form-item :label="$t('views.application.form.aiModel.label')" prop="model_id">
31+
<el-form-item prop="model_id">
32+
<template #label>
33+
<div class="flex-between">
34+
<span>{{ $t('views.application.form.aiModel.label') }}</span>
35+
<el-button
36+
type="primary"
37+
link
38+
@click="openAIParamSettingDialog"
39+
:disabled="!form.model_id"
40+
>
41+
<AppIcon iconName="app-setting" class="mr-4"></AppIcon>
42+
{{ $t('common.paramSetting') }}
43+
</el-button>
44+
</div>
45+
</template>
3246
<ModelSelect
3347
v-model="form.model_id"
3448
:placeholder="$t('views.application.form.aiModel.placeholder')"
@@ -68,6 +82,8 @@
6882
</span>
6983
</template>
7084
</el-dialog>
85+
<AIModeParamSettingDialog ref="AIModeParamSettingDialogRef" @refresh="refreshForm" />
86+
7187
</template>
7288
<script setup lang="ts">
7389
import { reactive, ref, watch, computed } from 'vue'
@@ -78,6 +94,7 @@ import { MsgSuccess } from '@/utils/message'
7894
import { t } from '@/locales'
7995
import type { FormInstance } from 'element-plus'
8096
import { loadSharedApi } from '@/utils/dynamics-api/shared-api'
97+
import AIModeParamSettingDialog from "@/views/application/component/AIModeParamSettingDialog.vue";
8198
8299
const props = defineProps<{
83100
apiType: 'systemShare' | 'workspace' | 'systemManage' | 'workspaceShare'
@@ -130,6 +147,21 @@ watch(dialogVisible, (bool) => {
130147
FormRef.value?.clearValidate()
131148
}
132149
})
150+
const AIModeParamSettingDialogRef = ref()
151+
const openAIParamSettingDialog = () => {
152+
if (form.value.model_id) {
153+
AIModeParamSettingDialogRef.value?.open(
154+
form.value.model_id,
155+
id,
156+
form.value.model_params_setting,
157+
)
158+
}
159+
}
160+
161+
function refreshForm(data: any) {
162+
form.value.model_params_setting = data
163+
}
164+
133165
134166
const open = (ids: string[], type: string, _knowledge?: any) => {
135167
currentKnowledge.value = _knowledge

ui/src/stores/modules/prompt.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { defineStore } from 'pinia'
22
import { t } from '@/locales'
33
export interface promptTypes {
44
user: string
5-
formValue: { model_id: string; prompt: string }
5+
formValue: { model_id: string; prompt: string; model_params_setting: any }
66
}
77

88
const usePromptStore = defineStore('prompt', {
@@ -25,6 +25,7 @@ const usePromptStore = defineStore('prompt', {
2525
}
2626
return {
2727
model_id: '',
28+
model_params_setting: {},
2829
prompt:
2930
t('views.document.generateQuestion.prompt1', { data: '{data}' }) +
3031
'<question></question>' +

0 commit comments

Comments
 (0)