diff --git a/ui/ModelModal/src/ModelModal.tsx b/ui/ModelModal/src/ModelModal.tsx index 65d8296..0d5d1c7 100644 --- a/ui/ModelModal/src/ModelModal.tsx +++ b/ui/ModelModal/src/ModelModal.tsx @@ -31,7 +31,7 @@ import { getLocaleMessage } from './constants/locale'; import './assets/fonts/iconfont'; import { lightTheme } from './theme'; import { isValidURL } from './utils'; -import { getModelGroup, getModelLogo } from './utils/model'; +import { getModelGroup, getModelLogo, shouldDisableTemperatureForModel } from './utils/model'; import Fuse from 'fuse.js'; const titleMap: Record = { @@ -108,6 +108,9 @@ export const ModelModal: React.FC = ({ const providerBrand = watch('provider'); const baseUrl = watch('base_url'); + const modelName = watch('model_name'); + const temperature = watch('temperature'); + const isTemperatureDisabled = shouldDisableTemperatureForModel(modelName || ''); // 判断是否需要显示手动输入模型名称(无法自动获取模型列表的情况) const shouldShowManualModelInput = () => { @@ -521,6 +524,12 @@ export const ModelModal: React.FC = ({ // eslint-disable-next-line react-hooks/exhaustive-deps }, [data, open]); + useEffect(() => { + if (isTemperatureDisabled && temperature !== null) { + setValue('temperature', null); + } + }, [isTemperatureDisabled, setValue, temperature]); + return ( = ({ render={({ field }) => ( = ({ render={({ field }) => ( field.onChange(val as number) } @@ -1558,6 +1569,19 @@ export const ModelModal: React.FC = ({ /> )} /> + {isTemperatureDisabled && ( + + 当前模型不支持自定义 temperature,将忽略该参数。 + + )} diff --git a/ui/ModelModal/src/utils/model.ts b/ui/ModelModal/src/utils/model.ts index e47c94b..c6b99b9 100644 --- a/ui/ModelModal/src/utils/model.ts +++ b/ui/ModelModal/src/utils/model.ts @@ -99,6 +99,20 @@ export function isCodeModel(model_id: string, provider: string): boolean { return /(?:^o3$|.*(code|claude\s+sonnet|claude\s+opus|gpt-4\.1|gpt-4o|gpt-5|gemini[\s-]+2\.5|o4-mini|kimi-k2).*)/i.test(modelId) || false } +export function shouldDisableTemperatureForModel(model_id: string): boolean { + if (!model_id) { + return false + } + + const modelId = getLowerBaseModelName(model_id, '/') + return ( + modelId.startsWith('o1') || + modelId.startsWith('o3') || + modelId.startsWith('o4') || + modelId.startsWith('gpt-5') + ) +} + export function isReasoningModel(model_id: string, provider: string): boolean { if (!model_id || isEmbeddingModel(model_id, provider) || isRerankModel(model_id)) { return false diff --git a/usecase/helper.go b/usecase/helper.go index 121c480..e7e2edc 100644 --- a/usecase/helper.go +++ b/usecase/helper.go @@ -730,10 +730,12 @@ func buildOpenAIChatConfig(md *domain.ModelMetadata) *openai.ChatModelConfig { t = *md.Temperature } cfg := &openai.ChatModelConfig{ - APIKey: md.APIKey, - BaseURL: md.BaseURL, - Model: string(md.ModelName), - Temperature: &t, + APIKey: md.APIKey, + BaseURL: md.BaseURL, + Model: string(md.ModelName), + } + if !shouldIgnoreOpenAITemperature(md.ModelName) { + cfg.Temperature = &t } if md.MaxTokens != nil { cfg.MaxTokens = md.MaxTokens @@ -778,6 +780,13 @@ func buildOpenAIChatConfig(md *domain.ModelMetadata) *openai.ChatModelConfig { return cfg } +func shouldIgnoreOpenAITemperature(model string) bool { + return strings.HasPrefix(model, "o1") || + strings.HasPrefix(model, "o3") || + strings.HasPrefix(model, "o4") || + strings.HasPrefix(model, "gpt-5") +} + func newDeepseekChatModel(ctx context.Context, md *domain.ModelMetadata) (model.BaseChatModel, error) { t := float32(0.0) if md.Temperature != nil { diff --git a/usecase/helper_test.go b/usecase/helper_test.go index ac7709e..9650e62 100644 --- a/usecase/helper_test.go +++ b/usecase/helper_test.go @@ -37,6 +37,46 @@ func TestBuildOpenAIChatConfig_Azure(t *testing.T) { } } +func TestBuildOpenAIChatConfig_IgnoreTemperatureForReasoningModels(t *testing.T) { + temp := float32(1.0) + testCases := []struct { + name string + modelName string + ignored bool + }{ + {name: "o1", modelName: "o1-preview", ignored: true}, + {name: "o3", modelName: "o3-mini", ignored: true}, + {name: "o4", modelName: "o4-mini", ignored: true}, + {name: "gpt-5", modelName: "gpt-5-mini", ignored: true}, + {name: "regular model", modelName: "gpt-4.1-mini", ignored: false}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + md := &domain.ModelMetadata{ + ModelName: tc.modelName, + Temperature: &temp, + } + + cfg := buildOpenAIChatConfig(md) + + if tc.ignored { + if cfg.Temperature != nil { + t.Fatalf("expected temperature to be ignored for model %q, got %v", tc.modelName, *cfg.Temperature) + } + return + } + + if cfg.Temperature == nil { + t.Fatalf("expected temperature to be preserved for model %q", tc.modelName) + } + if *cfg.Temperature != temp { + t.Fatalf("expected temperature %v for model %q, got %v", temp, tc.modelName, *cfg.Temperature) + } + }) + } +} + func TestCheckModel_TemperaturePassed(t *testing.T) { testName := "TestCheckModel_TemperaturePassed_Provider=Moonshot_Model=kimi-k2.5_Temp=1" // 1. Setup a test server to intercept the request