diff --git a/Directory.Packages.props b/Directory.Packages.props
index d5721a141..4b4500203 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -46,7 +46,7 @@
-
+
diff --git a/src/Infrastructure/BotSharp.Abstraction/MLTasks/Settings/LlmModelSetting.cs b/src/Infrastructure/BotSharp.Abstraction/MLTasks/Settings/LlmModelSetting.cs
index e74a7c370..23e055d29 100644
--- a/src/Infrastructure/BotSharp.Abstraction/MLTasks/Settings/LlmModelSetting.cs
+++ b/src/Infrastructure/BotSharp.Abstraction/MLTasks/Settings/LlmModelSetting.cs
@@ -47,7 +47,15 @@ public class LlmModelSetting
///
public int Dimension { get; set; }
- public LlmCost Cost { get; set; } = new();
+ ///
+ /// Settings for reasoning model
+ ///
+ public ReasoningSetting? Reasoning { get; set; }
+
+ ///
+ /// Settings for llm cost
+ ///
+ public LlmCostSetting Cost { get; set; } = new();
public override string ToString()
{
@@ -55,10 +63,16 @@ public override string ToString()
}
}
+public class ReasoningSetting
+{
+ public float Temperature { get; set; } = 1.0f;
+ public string? EffortLevel { get; set; }
+}
+
///
/// Cost per 1K tokens
///
-public class LlmCost
+public class LlmCostSetting
{
// Input
public float TextInputCost { get; set; } = 0f;
diff --git a/src/Plugins/BotSharp.Plugin.OpenAI/Providers/Chat/ChatCompletionProvider.cs b/src/Plugins/BotSharp.Plugin.OpenAI/Providers/Chat/ChatCompletionProvider.cs
index c9318d79c..99de556f7 100644
--- a/src/Plugins/BotSharp.Plugin.OpenAI/Providers/Chat/ChatCompletionProvider.cs
+++ b/src/Plugins/BotSharp.Plugin.OpenAI/Providers/Chat/ChatCompletionProvider.cs
@@ -1,3 +1,4 @@
+#pragma warning disable OPENAI001
using BotSharp.Abstraction.Hooks;
using BotSharp.Abstraction.MessageHub.Models;
using BotSharp.Core.Infrastructures.Streams;
@@ -15,16 +16,6 @@ public class ChatCompletionProvider : IChatCompletion
protected string _model;
private List renderedInstructions = [];
- private readonly Dictionary _defaultTemperature = new()
- {
- { "o3", 1.0f },
- { "o3-mini", 1.0f },
- { "o4-mini", 1.0f },
- { "gpt-5", 1.0f },
- { "gpt-5-mini", 1.0f },
- { "gpt-5-nano", 1.0f }
- };
-
public virtual string Provider => "openai";
public string Model => _model;
@@ -493,22 +484,24 @@ private string GetPrompt(IEnumerable messages, ChatCompletionOption
private ChatCompletionOptions InitChatCompletionOption(Agent agent)
{
var state = _services.GetRequiredService();
+ var settingsService = _services.GetRequiredService();
+ var settings = settingsService.GetSetting(Provider, _model);
+ ChatReasoningEffortLevel? reasoningEffortLevel = null;
var temperature = float.Parse(state.GetState("temperature", "0.0"));
- if (_defaultTemperature.ContainsKey(_model))
+ if (settings?.Reasoning != null)
{
- temperature = _defaultTemperature[_model];
+ temperature = settings.Reasoning.Temperature;
+ var level = state.GetState("reasoning_effort_level")
+ .IfNullOrEmptyAs(agent?.LlmConfig?.ReasoningEffortLevel ?? string.Empty)
+ .IfNullOrEmptyAs(settings?.Reasoning?.EffortLevel ?? string.Empty);
+ reasoningEffortLevel = ParseReasoningEffortLevel(level);
}
var maxTokens = int.TryParse(state.GetState("max_tokens"), out var tokens)
? tokens
: agent.LlmConfig?.MaxOutputTokens ?? LlmConstant.DEFAULT_MAX_OUTPUT_TOKEN;
- var level = state.GetState("reasoning_effort_level")
- .IfNullOrEmptyAs(agent?.LlmConfig?.ReasoningEffortLevel ?? string.Empty)
- .IfNullOrEmptyAs(LlmConstant.DEFAULT_REASONING_EFFORT_LEVEL);
- var reasoningEffortLevel = ParseReasoningEffortLevel(level);
-
return new ChatCompletionOptions()
{
Temperature = temperature,
@@ -519,14 +512,17 @@ private ChatCompletionOptions InitChatCompletionOption(Agent agent)
private ChatReasoningEffortLevel? ParseReasoningEffortLevel(string? level)
{
- if (string.IsNullOrWhiteSpace(level) || !_defaultTemperature.ContainsKey(_model))
+ if (string.IsNullOrWhiteSpace(level))
{
return null;
}
- var effortLevel = ChatReasoningEffortLevel.Low;
+ var effortLevel = new ChatReasoningEffortLevel("minimal");
switch (level.ToLower())
{
+ case "low":
+ effortLevel = ChatReasoningEffortLevel.Low;
+ break;
case "medium":
effortLevel = ChatReasoningEffortLevel.Medium;
break;