Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions pkg/agentdrain/coordinator.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ func NewCoordinator(cfg Config, stages []string) (*Coordinator, error) {
// TrainEvent routes the event to the miner responsible for evt.Stage.
// Returns an error when the stage has no associated miner.
func (c *Coordinator) TrainEvent(evt AgentEvent) (*MatchResult, error) {
coordinatorLog.Printf("TrainEvent: routing to stage=%s", evt.Stage)
m, err := c.minerFor(evt.Stage)
if err != nil {
return nil, err
Expand All @@ -46,6 +47,7 @@ func (c *Coordinator) TrainEvent(evt AgentEvent) (*MatchResult, error) {
// AnalyzeEvent routes the event to the correct stage miner and returns both
// the match result and an anomaly report.
func (c *Coordinator) AnalyzeEvent(evt AgentEvent) (*MatchResult, *AnomalyReport, error) {
coordinatorLog.Printf("AnalyzeEvent: routing to stage=%s", evt.Stage)
m, err := c.minerFor(evt.Stage)
if err != nil {
return nil, nil, err
Expand All @@ -67,6 +69,7 @@ func (c *Coordinator) AllClusters() map[string][]Cluster {
// SaveSnapshots serializes each stage miner's state and returns a map from
// stage name to JSON bytes.
func (c *Coordinator) SaveSnapshots() (map[string][]byte, error) {
coordinatorLog.Printf("SaveSnapshots: serializing %d stage miners", len(c.miners))
c.mu.RLock()
defer c.mu.RUnlock()
out := make(map[string][]byte, len(c.miners))
Expand All @@ -77,6 +80,7 @@ func (c *Coordinator) SaveSnapshots() (map[string][]byte, error) {
}
out[stage] = data
}
coordinatorLog.Printf("SaveSnapshots: completed, saved %d stages", len(out))
return out, nil
}

Expand Down Expand Up @@ -135,10 +139,12 @@ func (c *Coordinator) SaveWeightsJSON() ([]byte, error) {
// LoadWeightsJSON restores all stage miners from a combined JSON blob produced
// by SaveWeightsJSON.
func (c *Coordinator) LoadWeightsJSON(data []byte) error {
coordinatorLog.Printf("LoadWeightsJSON: loading from %d bytes", len(data))
var combined map[string]json.RawMessage
if err := json.Unmarshal(data, &combined); err != nil {
return fmt.Errorf("agentdrain: LoadWeightsJSON: %w", err)
}
coordinatorLog.Printf("LoadWeightsJSON: restoring %d stages", len(combined))
snapshots := make(map[string][]byte, len(combined))
for stage, raw := range combined {
snapshots[stage] = []byte(raw)
Expand Down
9 changes: 7 additions & 2 deletions pkg/parser/import_error.go
Original file line number Diff line number Diff line change
Expand Up @@ -151,15 +151,18 @@ func findImportsFieldLocation(yamlContent string) (line int, column int) {
if strings.HasPrefix(trimmed, "imports:") {
// Find the column where "imports:" starts
col := strings.Index(line, "imports:") + 1 // +1 for 1-based indexing
return i + 1, col // +1 for 1-based line indexing
importErrorLog.Printf("Found imports field at line=%d, col=%d", i+1, col)
return i + 1, col // +1 for 1-based line indexing
}
}
// Default to line 1, column 1 if not found
importErrorLog.Print("imports field not found in YAML content, defaulting to line=1, col=1")
return 1, 1
}

// findImportItemLocation finds the line and column number of a specific import item in YAML content
func findImportItemLocation(yamlContent string, importPath string) (line int, column int) {
importErrorLog.Printf("Locating import item in YAML: path=%s", importPath)
lines := strings.Split(yamlContent, "\n")
inImportsSection := false

Expand All @@ -183,11 +186,13 @@ func findImportItemLocation(yamlContent string, importPath string) (line int, co
if strings.Contains(line, importPath) {
// Find the column where the import path starts
col := strings.Index(line, importPath) + 1 // +1 for 1-based indexing
return i + 1, col // +1 for 1-based line indexing
importErrorLog.Printf("Located import item at line=%d, col=%d", i+1, col)
return i + 1, col // +1 for 1-based line indexing
}
}
}

// Fallback to imports field location
importErrorLog.Printf("Import item %q not found, falling back to imports field location", importPath)
return findImportsFieldLocation(yamlContent)
}
2 changes: 2 additions & 0 deletions pkg/workflow/known_needs_expressions.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ func generateKnownNeedsExpressions(data *WorkflowData, preActivationJobCreated b
// GitHub Actions contexts (github.*, env.*, etc.) and system job outputs (pre_activation) are
// always kept.
func filterExpressionsForActivation(mappings []*ExpressionMapping, customJobs map[string]any, beforeActivationJobs []string) []*ExpressionMapping {
knownNeedsLog.Printf("Filtering %d expression mappings for activation (customJobs=%d, beforeActivationJobs=%d)", len(mappings), len(customJobs), len(beforeActivationJobs))
if customJobs == nil || len(mappings) == 0 {
return mappings
}
Expand Down Expand Up @@ -153,6 +154,7 @@ func filterExpressionsForActivation(mappings []*ExpressionMapping, customJobs ma
}
filtered = append(filtered, m)
}
knownNeedsLog.Printf("Filtered expressions: %d remaining from %d total", len(filtered), len(mappings))
return filtered
}

Expand Down
5 changes: 5 additions & 0 deletions pkg/workflow/metrics.go
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,9 @@ func FinalizeToolMetrics(opts FinalizeToolMetricsOptions) {
sort.Slice(opts.Metrics.ToolCalls, func(i, j int) bool {
return opts.Metrics.ToolCalls[i].Name < opts.Metrics.ToolCalls[j].Name
})

metricsLog.Printf("FinalizeToolMetrics: turns=%d, tokenUsage=%d, toolCalls=%d, sequences=%d",
opts.Metrics.Turns, opts.Metrics.TokenUsage, len(opts.Metrics.ToolCalls), len(opts.Metrics.ToolSequences))
}

// FinalizeToolCallsAndSequence completes the tool call and sequence finalization.
Expand All @@ -255,4 +258,6 @@ func FinalizeToolCallsAndSequence(
sort.Slice(metrics.ToolCalls, func(i, j int) bool {
return metrics.ToolCalls[i].Name < metrics.ToolCalls[j].Name
})

metricsLog.Printf("FinalizeToolCallsAndSequence: toolCalls=%d, sequences=%d", len(metrics.ToolCalls), len(metrics.ToolSequences))
}
8 changes: 8 additions & 0 deletions pkg/workflow/universal_llm_consumer_engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ type universalLLMBackendProfile struct {
}

func resolveUniversalLLMBackendFromModel(model string) (UniversalLLMBackend, error) {
universalLLMConsumerLog.Printf("Resolving LLM backend from model: %q", model)
model = strings.TrimSpace(model)
if model == "" {
return "", errors.New("for universal consumer engines (OpenCode/Crush), engine.model is required and must use provider/model format (supported providers: copilot, anthropic, openai, codex)")
Expand All @@ -44,10 +45,13 @@ func resolveUniversalLLMBackendFromModel(model string) (UniversalLLMBackend, err

switch strings.ToLower(strings.TrimSpace(parts[0])) {
case "copilot":
universalLLMConsumerLog.Printf("Resolved backend: copilot (model=%s)", parts[1])
return UniversalLLMBackendCopilot, nil
case "anthropic":
universalLLMConsumerLog.Printf("Resolved backend: anthropic (model=%s)", parts[1])
return UniversalLLMBackendAnthropic, nil
case "openai", "codex":
universalLLMConsumerLog.Printf("Resolved backend: codex/openai (model=%s)", parts[1])
return UniversalLLMBackendCodex, nil
default:
return "", fmt.Errorf("unsupported provider %q in engine.model; supported providers: copilot, anthropic, openai, codex", parts[0])
Expand Down Expand Up @@ -109,6 +113,7 @@ func (e *UniversalLLMConsumerEngine) resolveBackend(workflowData *WorkflowData)

func (e *UniversalLLMConsumerEngine) GetUniversalRequiredSecretNames(workflowData *WorkflowData) []string {
backend := e.resolveBackend(workflowData)
universalLLMConsumerLog.Printf("Collecting required secret names for backend: %s", backend)
profile := getUniversalLLMBackendProfile(backend, isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData))
secrets := append([]string{}, profile.coreSecretNames...)

Expand All @@ -135,6 +140,7 @@ func (e *UniversalLLMConsumerEngine) GetUniversalRequiredSecretNames(workflowDat
secrets = append(secrets, varName)
}

universalLLMConsumerLog.Printf("Resolved %d required secret names for backend %s", len(secrets), backend)
return secrets
}

Expand All @@ -159,9 +165,11 @@ func (e *UniversalLLMConsumerEngine) GetUniversalSecretValidationStep(workflowDa

func (e *UniversalLLMConsumerEngine) ApplyUniversalProviderEnv(env map[string]string, workflowData *WorkflowData, firewallEnabled bool) {
backend := e.resolveBackend(workflowData)
universalLLMConsumerLog.Printf("Applying provider env for backend=%s, firewallEnabled=%t", backend, firewallEnabled)
profile := getUniversalLLMBackendProfile(backend, isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData))
maps.Copy(env, profile.env)
if firewallEnabled {
universalLLMConsumerLog.Printf("Setting %s to gateway port %d", profile.baseURLEnvName, profile.gatewayPort)
env[profile.baseURLEnvName] = fmt.Sprintf("http://host.docker.internal:%d", profile.gatewayPort)
}
}
Loading