Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions pkg/model/provider/anthropic/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@ import (
// Client represents an Anthropic client wrapper implementing provider.Provider
// It holds the anthropic client and model config
type Client struct {
client anthropic.Client
config *latest.ModelConfig
client anthropic.Client
config *latest.ModelConfig
modelOptions options.ModelOptions
// When using the Docker AI Gateway, tokens are short-lived. We rebuild
// the client per request when in gateway mode.
useGateway bool
Expand Down Expand Up @@ -114,6 +115,7 @@ func NewClient(ctx context.Context, cfg *latest.ModelConfig, env environment.Pro
return &Client{
client: client,
config: cfg,
modelOptions: globalOptions,
useGateway: useGateway,
gatewayBaseURL: gatewayBaseURL,
}, nil
Expand Down Expand Up @@ -402,3 +404,8 @@ func ConvertParametersToSchema(params tools.FunctionParameters) anthropic.ToolIn
func (c *Client) ID() string {
return c.config.Provider + "/" + c.config.Model
}

// Options returns the effective model options used by this client.
func (c *Client) Options() options.ModelOptions {
return c.modelOptions
}
42 changes: 42 additions & 0 deletions pkg/model/provider/clone.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package provider

import (
"context"
"log/slog"
"strings"

latest "github.com/docker/cagent/pkg/config/v2"
"github.com/docker/cagent/pkg/environment"
"github.com/docker/cagent/pkg/model/provider/options"
)

// CloneWithOptions returns a new Provider instance using the same provider/model
// as the base provider, applying the provided options. If cloning fails, the
// original base provider is returned.
func CloneWithOptions(ctx context.Context, base Provider, env environment.Provider, opts ...options.Opt) Provider {
if base == nil {
return nil
}

id := strings.TrimSpace(base.ID())
parts := strings.SplitN(id, "/", 2)
if len(parts) != 2 {
return base
}

cfg := &latest.ModelConfig{Provider: parts[0], Model: parts[1]}
if env == nil {
env = environment.NewDefaultProvider(ctx)
}

// Preserve existing options, then apply overrides. Later opts take precedence.
baseOpts := options.FromModelOptions(base.Options())
mergedOpts := append(baseOpts, opts...)

cloned, err := New(ctx, cfg, env, mergedOpts...)
if err != nil {
slog.Debug("Failed to clone provider; using base provider", "error", err, "id", id)
return base
}
return cloned
}
5 changes: 5 additions & 0 deletions pkg/model/provider/dmr/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -442,6 +442,11 @@ func (c *Client) ID() string {
return c.config.Provider + "/" + c.config.Model
}

// Options returns the effective model options used by this client.
func (c *Client) Options() options.ModelOptions {
return c.modelOptions
}

func parseDMRProviderOpts(cfg *latest.ModelConfig) (contextSize int, runtimeFlags []string) {
if cfg == nil {
return 0, nil
Expand Down
5 changes: 5 additions & 0 deletions pkg/model/provider/gemini/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -414,3 +414,8 @@ func (c *Client) CreateChatCompletionStream(
func (c *Client) ID() string {
return c.config.Provider + "/" + c.config.Model
}

// Options returns the effective model options used by this client.
func (c *Client) Options() options.ModelOptions {
return c.modelOptions
}
5 changes: 5 additions & 0 deletions pkg/model/provider/openai/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,11 @@ func (c *Client) ID() string {
return c.config.Provider + "/" + c.config.Model
}

// Options returns the effective model options used by this client.
func (c *Client) Options() options.ModelOptions {
return c.modelOptions
}

// getOpenAIReasoningEffort resolves the reasoning effort value from the
// model configuration's ThinkingBudget. Returns the effort (minimal|low|medium|high) or an error
func getOpenAIReasoningEffort(cfg *latest.ModelConfig) (effort string, err error) {
Expand Down
13 changes: 13 additions & 0 deletions pkg/model/provider/options/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,16 @@ func WithStructuredOutput(output *latest.StructuredOutput) Opt {
cfg.StructuredOutput = output
}
}

// FromModelOptions converts a concrete ModelOptions value into a slice of
// Opt configuration functions. Later Opts override earlier ones when applied.
func FromModelOptions(m ModelOptions) []Opt {
var out []Opt
if g := m.Gateway(); g != "" {
out = append(out, WithGateway(g))
}
if m.StructuredOutput != nil {
out = append(out, WithStructuredOutput(m.StructuredOutput))
}
return out
}
2 changes: 2 additions & 0 deletions pkg/model/provider/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@ type Provider interface {
messages []chat.Message,
tools []tools.Tool,
) (chat.MessageStream, error)
// Options returns the effective model options used by this provider
Options() options.ModelOptions
}

func New(ctx context.Context, cfg *latest.ModelConfig, env environment.Provider, opts ...options.Opt) (Provider, error) {
Expand Down
10 changes: 7 additions & 3 deletions pkg/runtime/runtime.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ import (

"github.com/docker/cagent/pkg/agent"
"github.com/docker/cagent/pkg/chat"
"github.com/docker/cagent/pkg/model/provider"
"github.com/docker/cagent/pkg/model/provider/options"
"github.com/docker/cagent/pkg/modelsdev"
"github.com/docker/cagent/pkg/session"
"github.com/docker/cagent/pkg/team"
Expand Down Expand Up @@ -959,9 +961,11 @@ func (r *runtime) generateSessionTitle(ctx context.Context, sess *session.Sessio
systemPrompt := "You are a helpful AI assistant that generates concise, descriptive titles for conversations. You will be given a conversation history and asked to create a title that captures the main topic."
userPrompt := fmt.Sprintf("Based on the following conversation between a user and an AI assistant, generate a short, descriptive title (maximum 50 characters) that captures the main topic or purpose of the conversation. Return ONLY the title text, nothing else.\n\nConversation history:%s\n\nGenerate a title for this conversation:", conversationHistory.String())

titleModel := provider.CloneWithOptions(ctx, r.CurrentAgent().Model(), nil, options.WithStructuredOutput(nil))

newTeam := team.New(
team.WithID("title-generator"),
team.WithAgents(agent.New("root", systemPrompt, agent.WithModel(r.CurrentAgent().Model()))),
team.WithAgents(agent.New("root", systemPrompt, agent.WithModel(titleModel))),
)

titleSession := session.New(session.WithSystemMessage(systemPrompt))
Expand Down Expand Up @@ -1019,10 +1023,10 @@ func (r *runtime) Summarize(ctx context.Context, sess *session.Session, events c
// Create a new session for summary generation
systemPrompt := "You are a helpful AI assistant that creates comprehensive summaries of conversations. You will be given a conversation history and asked to create a concise yet thorough summary that captures the key points, decisions made, and outcomes."
userPrompt := fmt.Sprintf("Based on the following conversation between a user and an AI assistant, create a comprehensive summary that captures:\n- The main topics discussed\n- Key information exchanged\n- Decisions made or conclusions reached\n- Important outcomes or results\n\nProvide a well-structured summary (2-4 paragraphs) that someone could read to understand what happened in this conversation. Return ONLY the summary text, nothing else.\n\nConversation history:%s\n\nGenerate a summary for this conversation:", conversationHistory.String())

newModel := provider.CloneWithOptions(ctx, r.CurrentAgent().Model(), nil, options.WithStructuredOutput(nil))
newTeam := team.New(
team.WithID("summary-generator"),
team.WithAgents(agent.New("root", systemPrompt, agent.WithModel(r.CurrentAgent().Model()))),
team.WithAgents(agent.New("root", systemPrompt, agent.WithModel(newModel))),
)

summarySession := session.New(session.WithSystemMessage(systemPrompt))
Expand Down
5 changes: 5 additions & 0 deletions pkg/runtime/runtime_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (

"github.com/docker/cagent/pkg/agent"
"github.com/docker/cagent/pkg/chat"
"github.com/docker/cagent/pkg/model/provider/options"
"github.com/docker/cagent/pkg/modelsdev"
"github.com/docker/cagent/pkg/session"
"github.com/docker/cagent/pkg/team"
Expand Down Expand Up @@ -112,6 +113,8 @@ func (m *mockProvider) CreateChatCompletionStream(ctx context.Context, messages
return m.stream, nil
}

func (m *mockProvider) Options() options.ModelOptions { return options.ModelOptions{} }

type mockProviderWithError struct {
id string
}
Expand All @@ -122,6 +125,8 @@ func (m *mockProviderWithError) CreateChatCompletionStream(ctx context.Context,
return nil, fmt.Errorf("simulated error creating chat completion stream")
}

func (m *mockProviderWithError) Options() options.ModelOptions { return options.ModelOptions{} }

type mockModelStore struct{}

func (m mockModelStore) GetModel(ctx context.Context, id string) (*modelsdev.Model, error) {
Expand Down
Loading