Skip to content

Commit f7c0a93

Browse files
committed
fix: Exclude reasoning parameter for WebLLM model compatibility
1 parent 9b397f9 commit f7c0a93

File tree

2 files changed

+3
-1
lines changed

2 files changed

+3
-1
lines changed

utils/llm/models.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -112,6 +112,7 @@ export async function getModel(options: {
112112
},
113113
})
114114
options.onLoadingModel?.({ type: 'finished' })
115+
// WebLLM does not support reasoning parameter, so we do not pass it
115116
model = new WebLLMChatLanguageModel(
116117
options.model,
117118
engine,

utils/llm/providers/web-llm/openai-compatible-chat-language-model.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,8 @@ export class WebLLMChatLanguageModel implements LanguageModelV1 {
144144
seed,
145145
...providerMetadata?.[this.providerOptionsName],
146146

147-
reasoning_effort: providerMetadata?.[this.providerOptionsName]?.reasoningEffort ?? providerMetadata?.['openai-compatible']?.reasoningEffort,
147+
// Note: WebLLM does not support reasoning_effort parameter
148+
// This parameter is ignored for WebLLM models
148149

149150
// messages:
150151
messages: convertToOpenAICompatibleChatMessages(prompt),

0 commit comments

Comments
 (0)