diff --git a/domain/domain.go b/domain/domain.go index 53a725f..3e0838c 100644 --- a/domain/domain.go +++ b/domain/domain.go @@ -24,6 +24,7 @@ type Response struct { type ModelListResp struct { Models []ModelListItem `json:"models"` + Error string `json:"error"` } type ModelListItem struct { @@ -76,22 +77,22 @@ type ModelResponseParser interface { } type GithubModel struct { - ID string `json:"id"` - Name string `json:"name"` - Registry string `json:"registry"` - Publisher string `json:"publisher"` - Summary string `json:"summary"` - RateLimitTier string `json:"rate_limit_tier"` - HTMLURL string `json:"html_url"` - Version string `json:"version"` - Capabilities []string `json:"capabilities"` - Limits struct { + ID string `json:"id"` + Name string `json:"name"` + Registry string `json:"registry"` + Publisher string `json:"publisher"` + Summary string `json:"summary"` + RateLimitTier string `json:"rate_limit_tier"` + HTMLURL string `json:"html_url"` + Version string `json:"version"` + Capabilities []string `json:"capabilities"` + Limits struct { MaxInputTokens int `json:"max_input_tokens"` MaxOutputTokens int `json:"max_output_tokens"` } `json:"limits"` - Tags []string `json:"tags"` - SupportedInputModalities []string `json:"supported_input_modalities"` - SupportedOutputModalities []string `json:"supported_output_modalities"` + Tags []string `json:"tags"` + SupportedInputModalities []string `json:"supported_input_modalities"` + SupportedOutputModalities []string `json:"supported_output_modalities"` } type GithubResp []GithubModel diff --git a/test/main.go b/test/main.go index be81ef7..53e6182 100644 --- a/test/main.go +++ b/test/main.go @@ -32,9 +32,10 @@ func NewModelKit( func (p *ModelKit) GetModelList(c echo.Context) error { var req domain.ModelListReq if err := c.Bind(&req); err != nil { - return c.JSON(http.StatusBadRequest, domain.Response{ + return c.JSON(http.StatusOK, domain.Response{ Success: false, Message: "参数绑定失败: " + err.Error(), + Data: nil, }) } fmt.Println("list model req:", req) @@ -42,9 +43,10 @@ func (p *ModelKit) GetModelList(c echo.Context) error { resp, err := usecase.ModelList(c.Request().Context(), &req) if err != nil { fmt.Println("err:", err) - return c.JSON(http.StatusInternalServerError, domain.Response{ - Success: false, + return c.JSON(http.StatusOK, domain.Response{ + Success: true, Message: err.Error(), + Data: resp, }) } @@ -65,13 +67,21 @@ func (p *ModelKit) CheckModel(c echo.Context) error { } fmt.Println("check model req:", req) - resp, _ := usecase.CheckModel(c.Request().Context(), &req) + resp, err := usecase.CheckModel(c.Request().Context(), &req) + if err != nil { + fmt.Println("err:", err) + return c.JSON(http.StatusOK, domain.Response{ + Success: true, + Message: err.Error(), + Data: resp, + }) + } // 如果检查过程中有错误,返回错误响应 if resp.Error != "" { fmt.Println("resp.Error:", resp.Error) return c.JSON(http.StatusOK, domain.Response{ - Success: false, + Success: true, Message: resp.Error, Data: resp, }) diff --git a/test/ui_example/src/localService.ts b/test/ui_example/src/localService.ts index d410a75..7cf0239 100644 --- a/test/ui_example/src/localService.ts +++ b/test/ui_example/src/localService.ts @@ -22,6 +22,7 @@ interface ApiResponse { // 模型列表响应 interface ModelListResponse { models: ModelListItem[]; + error?: string; } // 模型检查响应 @@ -75,8 +76,7 @@ export class LocalModelService implements ModelService { return result; } - async listModel(data: ListModelReq): Promise<{ models: ModelListItem[] }> { - try { + async listModel(data: ListModelReq): Promise<{ models: ModelListItem[]; error?: string }> { const queryParams = new URLSearchParams(); if (data.provider) queryParams.append('provider', data.provider); if (data.model_type) queryParams.append('model_type', data.model_type); @@ -88,17 +88,12 @@ export class LocalModelService implements ModelService { const response = await this.request(url, { method: 'GET', }); - return { models: response.models }; - } catch (error) { - console.error('Failed to list models:', error); - throw error; - } + console.log('listModel response:', response); + return { models: response.models, error: response.error }; } async checkModel(data: CheckModelReq): Promise<{ model: Model; error?: string }> { - try { const queryParams = new URLSearchParams(); - console.log('checkModel data:', data); if (data.provider) queryParams.append('provider', data.provider); if (data.model_name) queryParams.append('model_name', data.model_name); if (data.base_url) queryParams.append('base_url', data.base_url); @@ -106,19 +101,12 @@ export class LocalModelService implements ModelService { if (data.api_header) queryParams.append('api_header', data.api_header); if (data.model_type) queryParams.append('model_type', data.model_type); const url = `/checkmodel${queryParams.toString() ? '?' + queryParams.toString() : ''}`; + console.log('checkModel url:', url); const response = await this.request(url, { method: 'GET', }); - - if (response.error) { - throw new Error(response.error); - } - - return { model: response.model }; - } catch (error) { - console.error('Failed to list models:', error); - throw error; - } + console.log('checkModel response:', response); + return { model: response.model, error: response.error }; } async updateModel(data: UpdateModelReq): Promise<{ model: Model }> { diff --git a/ui/ModelModal/package.json b/ui/ModelModal/package.json index 0e5af27..0832469 100644 --- a/ui/ModelModal/package.json +++ b/ui/ModelModal/package.json @@ -1,6 +1,6 @@ { "name": "@yokowu/modelkit-ui", - "version": "0.5.1", + "version": "0.6.0", "description": "A reusable AI model configuration modal component for React applications", "private": false, "type": "module", diff --git a/ui/ModelModal/src/ModelModal.tsx b/ui/ModelModal/src/ModelModal.tsx index f0d3c15..d588d39 100644 --- a/ui/ModelModal/src/ModelModal.tsx +++ b/ui/ModelModal/src/ModelModal.tsx @@ -26,7 +26,7 @@ import { getLocaleMessage } from './constants/locale'; import './assets/fonts/iconfont'; import { lightTheme } from './theme'; -const titleMap: Record = { +const titleMap: Record = { ["llm"]: '对话模型', ["chat"]: '对话模型', ["coder"]: '代码补全模型', @@ -131,34 +131,39 @@ export const ModelModal: React.FC = ({ api_key: value.api_key, base_url: value.base_url, provider: value.provider as Exclude, - api_header: value.api_header || header, + api_header: value.api_header || header, }) .then((res) => { - setModelUserList( - (res.models || []) - .filter((item): item is { model: string } => !!item.model) - .sort((a, b) => a.model!.localeCompare(b.model!)) - ); - if ( - data && - (res.models || []).find((it) => it.model === data.model_name) - ) { - setValue('model_name', data.model_name!); + if (res.error) { + messageHandler.error("获取模型失败 " + res.error); + setModelLoading(false); } else { - setValue('model_name', res.models?.[0]?.model || ''); + setModelUserList( + (res.models || []) + .filter((item): item is { model: string } => !!item.model) + .sort((a, b) => a.model!.localeCompare(b.model!)) + ); + if ( + data && + (res.models || []).find((it) => it.model === data.model_name) + ) { + setValue('model_name', data.model_name!); + } else { + setValue('model_name', res.models?.[0]?.model || ''); + } + setSuccess(true); } - setSuccess(true); }) .finally(() => { setModelLoading(false); }). catch((res) => { - messageHandler.error("获取模型失败"); + messageHandler.error("获取模型失败 " + res.error); setModelLoading(false); }); - }; + }; - const onSubmit = (value: AddModelForm) => { + const onSubmit = (value: AddModelForm) => { let header = ''; if (value.api_header_key && value.api_header_value) { header = value.api_header_key + '=' + value.api_header_value; @@ -166,14 +171,14 @@ export const ModelModal: React.FC = ({ setError(''); setLoading(true); modelService.checkModel({ - model_type, - model_name: value.model_name, - api_key: value.api_key, - base_url: value.base_url, - api_version: value.api_version, - provider: value.provider, - api_header: value.api_header || header, - } + model_type, + model_name: value.model_name, + api_key: value.api_key, + base_url: value.base_url, + api_version: value.api_version, + provider: value.provider, + api_header: value.api_header || header, + } ) .then((res) => { if (res.error) { @@ -200,15 +205,20 @@ export const ModelModal: React.FC = ({ support_prompt_cache: value.support_prompt_caching, }, }) - .then(() => { - messageHandler.success('修改成功'); - handleReset(); + .then((res) => { + if (res.error) { + messageHandler.error("修改模型失败 " + res.error); + setLoading(false); + } else { + messageHandler.success('修改成功'); + handleReset(); + } }) .finally(() => { setLoading(false); }) .catch((res) => { - messageHandler.error("修改模型失败"); + messageHandler.error("修改模型失败 " + res.error); setLoading(false); }); } else { @@ -230,15 +240,20 @@ export const ModelModal: React.FC = ({ support_prompt_cache: value.support_prompt_caching, }, }) - .then(() => { - messageHandler.success('添加成功'); - handleReset(); + .then((res) => { + if (res.error) { + messageHandler.error("添加模型失败 " + res.error); + setLoading(false); + } else { + messageHandler.success('添加成功'); + handleReset(); + } }) .finally(() => { setLoading(false); }) .catch((res) => { - messageHandler.error("添加模型失败"); + messageHandler.error("添加模型失败 " + res.error); setLoading(false); }); } @@ -272,7 +287,7 @@ export const ModelModal: React.FC = ({ }); } reset({ - model_type, + model_type, provider: value.provider || 'Other', model_name: value.model_name || '', base_url: value.base_url || '', @@ -322,579 +337,579 @@ export const ModelModal: React.FC = ({ return ( - - - - - 模型供应商 - - + + - - {Object.values(providers) - .filter((it) => { - // 根据model_type和provider配置决定是否显示 - switch (model_type) { - case 'chat': - case 'llm': - return it.chat; - case 'code': - case 'coder': - return it.code; - case 'embedding': - return it.embedding; - case 'rerank': - case 'reranker': - return it.rerank; - default: - return it.label === 'BaiZhiCloud' || it.label === 'Other'; - } - }) - .map((it) => ( - { - if (data && data.provider === it.label) { - resetCurData(data); - } else { - setModelUserList([]); - setError(''); - setModelLoading(false); - setSuccess(false); - reset({ - provider: it.label as keyof typeof DEFAULT_MODEL_PROVIDERS, - base_url: - it.label === 'AzureOpenAI' ? '' : it.defaultBaseUrl, - model_name: '', - api_version: '', - api_key: '', - api_header_key: '', - api_header_value: '', - show_name: '', - // 重置高级设置 - context_window_size: 64000, - max_output_tokens: 8192, - enable_r1_params: false, - support_image: false, - support_compute: false, - support_prompt_caching: false, - }); - } - }} - > - - {it.cn || it.label || '其他'} - - ))} - - - - - - API 地址{' '} - - * + + 模型供应商 - - ( - { - field.onChange(e.target.value); - setModelUserList([]); - setValue('model_name', ''); - setSuccess(false); - }} - /> - )} - /> - - - API Secret - {providers[providerBrand].secretRequired && ( - - {' '} - * - - )} + + + {Object.values(providers) + .filter((it) => { + // 根据model_type和provider配置决定是否显示 + switch (model_type) { + case 'chat': + case 'llm': + return it.chat; + case 'code': + case 'coder': + return it.code; + case 'embedding': + return it.embedding; + case 'rerank': + case 'reranker': + return it.rerank; + default: + return it.label === 'BaiZhiCloud' || it.label === 'Other'; + } + }) + .map((it) => ( + { + if (data && data.provider === it.label) { + resetCurData(data); + } else { + setModelUserList([]); + setError(''); + setModelLoading(false); + setSuccess(false); + reset({ + provider: it.label as keyof typeof DEFAULT_MODEL_PROVIDERS, + base_url: + it.label === 'AzureOpenAI' ? '' : it.defaultBaseUrl, + model_name: '', + api_version: '', + api_key: '', + api_header_key: '', + api_header_value: '', + show_name: '', + // 重置高级设置 + context_window_size: 64000, + max_output_tokens: 8192, + enable_r1_params: false, + support_image: false, + support_compute: false, + support_prompt_caching: false, + }); + } + }} + > + + {it.cn || it.label || '其他'} + + ))} + - {providers[providerBrand].modelDocumentUrl && ( - - window.open( - providers[providerBrand].modelDocumentUrl, - '_blank' - ) - } - > - 查看文档 - - )} - ( - { - field.onChange(e.target.value); - setModelUserList([]); - setValue('model_name', ''); - setSuccess(false); - }} - /> - )} - /> - {(modelUserList.length !== 0 || providerBrand === 'Other') && ( - <> - - 模型备注 - - * - + + + API 地址{' '} + + * - ( - + + ( + { + field.onChange(e.target.value); + setModelUserList([]); + setValue('model_name', ''); + setSuccess(false); + }} + /> + )} + /> + + + API Secret + {providers[providerBrand].secretRequired && ( + + {' '} + * + )} - /> - - )} - {providerBrand === 'AzureOpenAI' && ( - <> - - API Version - ( - { - field.onChange(e.target.value); - setModelUserList([]); - setValue('model_name', ''); - setSuccess(false); - }} - /> - )} - /> - - )} - {providerBrand === 'Other' ? ( - <> - - 模型名称{' '} - - * + {providers[providerBrand].modelDocumentUrl && ( + + window.open( + providers[providerBrand].modelDocumentUrl, + '_blank' + ) + } + > + 查看文档 - - ( - - )} - /> - - 需要与模型供应商提供的名称完全一致,不要随便填写 - - - ) : modelUserList.length === 0 ? ( - - ) : ( - <> - - 模型名称{' '} - - * - - - ( - - {modelUserList.map((it) => ( - - {it.model} - - ))} - - )} - /> - {providers[providerBrand].customHeader && ( - <> - - Header + render={({ field }) => ( + { + field.onChange(e.target.value); + setModelUserList([]); + setValue('model_name', ''); + setSuccess(false); + }} + /> + )} + /> + {(modelUserList.length !== 0 || providerBrand === 'Other') && ( + <> + + 模型备注 + + * - - ( - - )} + + ( + - = - ( - - )} + )} + /> + + )} + {providerBrand === 'AzureOpenAI' && ( + <> + + API Version + + ( + { + field.onChange(e.target.value); + setModelUserList([]); + setValue('model_name', ''); + setSuccess(false); + }} /> - - - )} - - - )} - {/* 高级设置部分 - 在选择了模型或者是其它供应商时显示 */} - {(modelUserList.length !== 0 || providerBrand === 'Other') && ( - - + + )} + {providerBrand === 'Other' ? ( + <> + + 模型名称{' '} + + * + + + ( + + )} + /> + + 需要与模型供应商提供的名称完全一致,不要随便填写 + + + ) : modelUserList.length === 0 ? ( + + ) : ( + <> + + 模型名称{' '} + + * + + + ( + + {modelUserList.map((it) => ( + + {it.model} + + ))} + + )} + /> + {providers[providerBrand].customHeader && ( + <> + + Header + + ( - <> - field.onChange(Number(e.target.value))} - /> - - {[ - { label: '128k', value: 128000 }, - { label: '256k', value: 256000 }, - { label: '512k', value: 512000 }, - { label: '1m', value: 1_000_000 } - ].map((option) => ( - field.onChange(option.value)} - > - {option.label} - - ))} - - + )} /> - - - - - 最大输出 Token - + = ( field.onChange(Number(e.target.value))} + placeholder='value' + error={!!errors.api_header_value} + helperText={errors.api_header_value?.message} /> )} /> - - - {/* 复选框组 - 使用更紧凑的布局 */} - - ( - field.onChange(e.target.checked)} + + + )} + + + )} + {/* 高级设置部分 - 在选择了模型或者是其它供应商时显示 */} + {(modelUserList.length !== 0 || providerBrand === 'Other') && ( + + setExpandAdvanced(!expandAdvanced)} + > + + 高级设置 + + + + + + 上下文窗口大小 + + ( + <> + field.onChange(Number(e.target.value))} /> - } - label={ - - 启用 R1 模型参数 - - (使用 QWQ 等 R1 系列模型时必须启用,避免出现 400 错误) - + + {[ + { label: '128k', value: 128000 }, + { label: '256k', value: 256000 }, + { label: '512k', value: 512000 }, + { label: '1m', value: 1_000_000 } + ].map((option) => ( + field.onChange(option.value)} + > + {option.label} + + ))} - } - sx={{ margin: 0 }} - /> - )} - /> + + )} + /> + + + + + 最大输出 Token + + ( + field.onChange(Number(e.target.value))} + /> + )} + /> + + + {/* 复选框组 - 使用更紧凑的布局 */} + + ( + field.onChange(e.target.checked)} + size='small' + /> + } + label={ + + 启用 R1 模型参数 + + (使用 QWQ 等 R1 系列模型时必须启用,避免出现 400 错误) + + + } + sx={{ margin: 0 }} + /> + )} + /> + - - - - - )} - {error && ( - - {error} - - )} - - - + + + + )} + {error && ( + + {error} + + )} + + + ); }; diff --git a/ui/ModelModal/src/types/types.ts b/ui/ModelModal/src/types/types.ts index 3a7a890..80076e4 100644 --- a/ui/ModelModal/src/types/types.ts +++ b/ui/ModelModal/src/types/types.ts @@ -131,10 +131,10 @@ export interface UpdateModelReq { // 模型服务接口 export interface ModelService { - createModel: (data: CreateModelReq) => Promise<{ model: Model }>; - listModel: (data: ListModelReq) => Promise<{ models: ModelListItem[] }>; + createModel: (data: CreateModelReq) => Promise<{ model: Model; error?: string }>; + listModel: (data: ListModelReq) => Promise<{ models: ModelListItem[]; error?: string }>; checkModel: (data: CheckModelReq) => Promise<{ model: Model; error?: string }>; - updateModel: (data: UpdateModelReq) => Promise<{ model: Model }>; + updateModel: (data: UpdateModelReq) => Promise<{ model: Model; error?: string }>; } export interface ModelListItem { diff --git a/usecase/modelkit.go b/usecase/modelkit.go index 59d004e..2898ef7 100644 --- a/usecase/modelkit.go +++ b/usecase/modelkit.go @@ -36,14 +36,14 @@ import ( func reqModelListApi[T domain.ModelResponseParser](req *domain.ModelListReq, httpClient *http.Client, responseType T) ([]domain.ModelListItem, error) { u, err := url.Parse(req.BaseURL) if err != nil { - return nil, err + return nil, fmt.Errorf("解析BaseURL失败: %w", err) } u.Path = path.Join(u.Path, "/models") client := request.NewClient(u.Scheme, u.Host, httpClient.Timeout, request.WithClient(httpClient)) query, err := utils.GetQuery(req) if err != nil { - return nil, err + return nil, fmt.Errorf("获取查询参数失败: %w", err) } resp, err := request.Get[T]( client, u.Path, @@ -55,7 +55,7 @@ func reqModelListApi[T domain.ModelResponseParser](req *domain.ModelListReq, htt request.WithQuery(query), ) if err != nil { - return nil, err + return nil, fmt.Errorf("请求模型列表API失败: %w", err) } return (*resp).ParseModels(), nil @@ -86,7 +86,9 @@ func ModelList(ctx context.Context, req *domain.ModelListReq) (*domain.ModelList // get from ollama http://10.10.16.24:11434/api/tags u, err := url.Parse(req.BaseURL) if err != nil { - return nil, err + return &domain.ModelListResp{ + Error: fmt.Errorf("Ollama解析BaseURL失败: %w", err).Error(), + }, nil } u.Path = "/api/tags" client := request.NewClient(u.Scheme, u.Host, httpClient.Timeout, request.WithClient(httpClient)) @@ -101,7 +103,9 @@ func ModelList(ctx context.Context, req *domain.ModelListReq) (*domain.ModelList case consts.ModelProviderGemini: client, err := generativeGenai.NewClient(ctx, option.WithAPIKey(req.APIKey)) if err != nil { - return nil, err + return &domain.ModelListResp{ + Error: fmt.Errorf("创建Gemini客户端失败: %w", err).Error(), + }, nil } defer func() { if closeErr := client.Close(); closeErr != nil { @@ -132,7 +136,9 @@ func ModelList(ctx context.Context, req *domain.ModelListReq) (*domain.ModelList } if len(modelsList) == 0 { - return nil, fmt.Errorf("failed to get gemini models") + return &domain.ModelListResp{ + Error: fmt.Errorf("获取Gemini模型列表失败: 未找到可用模型").Error(), + }, nil } return &domain.ModelListResp{ @@ -141,7 +147,9 @@ func ModelList(ctx context.Context, req *domain.ModelListReq) (*domain.ModelList case consts.ModelProviderGithub: models, err := reqModelListApi(req, httpClient, &domain.GithubResp{}) if err != nil { - return nil, err + return &domain.ModelListResp{ + Error: fmt.Errorf("获取Github模型列表失败: %w", err).Error(), + }, nil } return &domain.ModelListResp{ Models: models, @@ -151,7 +159,9 @@ func ModelList(ctx context.Context, req *domain.ModelListReq) (*domain.ModelList models, err := reqModelListApi(req, httpClient, &domain.OpenAIResp{}) if err != nil { - return nil, err + return &domain.ModelListResp{ + Error: fmt.Errorf("获取OpenAI兼容模型列表失败: %w", err).Error(), + }, nil } return &domain.ModelListResp{ Models: models, @@ -188,19 +198,19 @@ func CheckModel(ctx context.Context, req *domain.CheckModelReq) (*domain.CheckMo } body, err := json.Marshal(reqBody) if err != nil { - checkResp.Error = fmt.Sprintf("marshal request body failed: %s", err.Error()) + checkResp.Error = fmt.Sprintf("序列化请求体失败: %s", err.Error()) return checkResp, nil } request, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer(body)) if err != nil { - checkResp.Error = fmt.Sprintf("new request failed: %s", err.Error()) + checkResp.Error = fmt.Sprintf("创建HTTP请求失败: %s", err.Error()) return checkResp, nil } request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", req.APIKey)) request.Header.Set("Content-Type", "application/json") resp, err := http.DefaultClient.Do(request) if err != nil { - checkResp.Error = fmt.Sprintf("send request failed: %s", err.Error()) + checkResp.Error = fmt.Sprintf("发送HTTP请求失败: %s", err.Error()) return checkResp, nil } defer func() { @@ -209,7 +219,7 @@ func CheckModel(ctx context.Context, req *domain.CheckModelReq) (*domain.CheckMo } }() if resp.StatusCode != http.StatusOK { - checkResp.Error = fmt.Sprintf("request failed: %s", resp.Status) + checkResp.Error = fmt.Sprintf("HTTP请求失败: %s", resp.Status) return checkResp, nil } return checkResp, nil @@ -225,7 +235,7 @@ func CheckModel(ctx context.Context, req *domain.CheckModelReq) (*domain.CheckMo ModelType: modelType, }) if err != nil { - checkResp.Error = err.Error() + checkResp.Error = fmt.Sprintf("获取聊天模型失败: %s", err.Error()) return checkResp, nil } resp, err := chatModel.Generate(ctx, []*schema.Message{ @@ -233,12 +243,12 @@ func CheckModel(ctx context.Context, req *domain.CheckModelReq) (*domain.CheckMo schema.UserMessage("hi"), }) if err != nil { - checkResp.Error = err.Error() + checkResp.Error = fmt.Sprintf("生成聊天内容失败: %s", err.Error()) return checkResp, nil } content := resp.Content if content == "" { - checkResp.Error = "generate failed" + checkResp.Error = "生成内容失败" return checkResp, nil } checkResp.Content = content @@ -278,7 +288,7 @@ func GetChatModel(ctx context.Context, model *domain.ModelMetadata) (model.BaseC Temperature: temperature, }) if err != nil { - return nil, fmt.Errorf("create chat model failed: %w", err) + return nil, fmt.Errorf("创建DeepSeek聊天模型失败: %w", err) } return chatModel, nil case consts.ModelProviderGemini: @@ -286,7 +296,7 @@ func GetChatModel(ctx context.Context, model *domain.ModelMetadata) (model.BaseC APIKey: model.APIKey, }) if err != nil { - return nil, fmt.Errorf("create genai client failed: %w", err) + return nil, fmt.Errorf("创建Genai客户端失败: %w", err) } chatModel, err := gemini.NewChatModel(ctx, &gemini.Config{ @@ -298,13 +308,13 @@ func GetChatModel(ctx context.Context, model *domain.ModelMetadata) (model.BaseC }, }) if err != nil { - return nil, fmt.Errorf("create chat model failed: %w", err) + return nil, fmt.Errorf("创建Gemini聊天模型失败: %w", err) } return chatModel, nil case consts.ModelProviderOllama: baseUrl, err := utils.URLRemovePath(config.BaseURL) if err != nil { - return nil, fmt.Errorf("ollama url parse failed: %w", err) + return nil, fmt.Errorf("解析Ollama URL失败: %w", err) } chatModel, err := ollama.NewChatModel(ctx, &ollama.ChatModelConfig{ @@ -316,14 +326,14 @@ func GetChatModel(ctx context.Context, model *domain.ModelMetadata) (model.BaseC }, }) if err != nil { - return nil, fmt.Errorf("create chat model failed: %w", err) + return nil, fmt.Errorf("创建Ollama聊天模型失败: %w", err) } return chatModel, nil // 兼容 openai api default: chatModel, err := openai.NewChatModel(ctx, config) if err != nil { - return nil, fmt.Errorf("create chat model failed: %w", err) + return nil, fmt.Errorf("创建OpenAI兼容聊天模型失败: %w", err) } return chatModel, nil }