Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 25 additions & 1 deletion ui/ModelModal/src/ModelModal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import { getLocaleMessage } from './constants/locale';
import './assets/fonts/iconfont';
import { lightTheme } from './theme';
import { isValidURL } from './utils';
import { getModelGroup, getModelLogo } from './utils/model';
import { getModelGroup, getModelLogo, shouldDisableTemperatureForModel } from './utils/model';
import Fuse from 'fuse.js';

const titleMap: Record<string, string> = {
Expand Down Expand Up @@ -108,6 +108,9 @@ export const ModelModal: React.FC<ModelModalProps> = ({

const providerBrand = watch('provider');
const baseUrl = watch('base_url');
const modelName = watch('model_name');
const temperature = watch('temperature');
const isTemperatureDisabled = shouldDisableTemperatureForModel(modelName || '');

// 判断是否需要显示手动输入模型名称(无法自动获取模型列表的情况)
const shouldShowManualModelInput = () => {
Expand Down Expand Up @@ -521,6 +524,12 @@ export const ModelModal: React.FC<ModelModalProps> = ({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [data, open]);

useEffect(() => {
if (isTemperatureDisabled && temperature !== null) {
setValue('temperature', null);
}
}, [isTemperatureDisabled, setValue, temperature]);

return (
<ThemeProvider theme={lightTheme}>
<Modal
Expand Down Expand Up @@ -1510,6 +1519,7 @@ export const ModelModal: React.FC<ModelModalProps> = ({
render={({ field }) => (
<TextField
value={field.value ?? ''}
disabled={isTemperatureDisabled}
size='small'
type='number'
inputProps={{
Expand Down Expand Up @@ -1543,6 +1553,7 @@ export const ModelModal: React.FC<ModelModalProps> = ({
render={({ field }) => (
<Slider
value={field.value ?? 0}
disabled={isTemperatureDisabled}
onChange={(_, val) =>
field.onChange(val as number)
}
Expand All @@ -1558,6 +1569,19 @@ export const ModelModal: React.FC<ModelModalProps> = ({
/>
)}
/>
{isTemperatureDisabled && (
<Box
sx={{
mt: 1,
ml: 2,
fontSize: 12,
lineHeight: '18px',
color: 'text.secondary',
}}
>
当前模型不支持自定义 temperature,将忽略该参数。
</Box>
)}
</Box>
</Stack>
</AccordionDetails>
Expand Down
14 changes: 14 additions & 0 deletions ui/ModelModal/src/utils/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,20 @@ export function isCodeModel(model_id: string, provider: string): boolean {
return /(?:^o3$|.*(code|claude\s+sonnet|claude\s+opus|gpt-4\.1|gpt-4o|gpt-5|gemini[\s-]+2\.5|o4-mini|kimi-k2).*)/i.test(modelId) || false
}

export function shouldDisableTemperatureForModel(model_id: string): boolean {
if (!model_id) {
return false
}

const modelId = getLowerBaseModelName(model_id, '/')
return (
modelId.startsWith('o1') ||
modelId.startsWith('o3') ||
modelId.startsWith('o4') ||
modelId.startsWith('gpt-5')
)
}

export function isReasoningModel(model_id: string, provider: string): boolean {
if (!model_id || isEmbeddingModel(model_id, provider) || isRerankModel(model_id)) {
return false
Expand Down
17 changes: 13 additions & 4 deletions usecase/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -730,10 +730,12 @@ func buildOpenAIChatConfig(md *domain.ModelMetadata) *openai.ChatModelConfig {
t = *md.Temperature
}
cfg := &openai.ChatModelConfig{
APIKey: md.APIKey,
BaseURL: md.BaseURL,
Model: string(md.ModelName),
Temperature: &t,
APIKey: md.APIKey,
BaseURL: md.BaseURL,
Model: string(md.ModelName),
}
if !shouldIgnoreOpenAITemperature(md.ModelName) {
cfg.Temperature = &t
}
if md.MaxTokens != nil {
cfg.MaxTokens = md.MaxTokens
Expand Down Expand Up @@ -778,6 +780,13 @@ func buildOpenAIChatConfig(md *domain.ModelMetadata) *openai.ChatModelConfig {
return cfg
}

func shouldIgnoreOpenAITemperature(model string) bool {
return strings.HasPrefix(model, "o1") ||
strings.HasPrefix(model, "o3") ||
strings.HasPrefix(model, "o4") ||
strings.HasPrefix(model, "gpt-5")
}

func newDeepseekChatModel(ctx context.Context, md *domain.ModelMetadata) (model.BaseChatModel, error) {
t := float32(0.0)
if md.Temperature != nil {
Expand Down
40 changes: 40 additions & 0 deletions usecase/helper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,46 @@ func TestBuildOpenAIChatConfig_Azure(t *testing.T) {
}
}

func TestBuildOpenAIChatConfig_IgnoreTemperatureForReasoningModels(t *testing.T) {
temp := float32(1.0)
testCases := []struct {
name string
modelName string
ignored bool
}{
{name: "o1", modelName: "o1-preview", ignored: true},
{name: "o3", modelName: "o3-mini", ignored: true},
{name: "o4", modelName: "o4-mini", ignored: true},
{name: "gpt-5", modelName: "gpt-5-mini", ignored: true},
{name: "regular model", modelName: "gpt-4.1-mini", ignored: false},
}

for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
md := &domain.ModelMetadata{
ModelName: tc.modelName,
Temperature: &temp,
}

cfg := buildOpenAIChatConfig(md)

if tc.ignored {
if cfg.Temperature != nil {
t.Fatalf("expected temperature to be ignored for model %q, got %v", tc.modelName, *cfg.Temperature)
}
return
}

if cfg.Temperature == nil {
t.Fatalf("expected temperature to be preserved for model %q", tc.modelName)
}
if *cfg.Temperature != temp {
t.Fatalf("expected temperature %v for model %q, got %v", temp, tc.modelName, *cfg.Temperature)
}
})
}
}

func TestCheckModel_TemperaturePassed(t *testing.T) {
testName := "TestCheckModel_TemperaturePassed_Provider=Moonshot_Model=kimi-k2.5_Temp=1"
// 1. Setup a test server to intercept the request
Expand Down
Loading