Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pkg/agentdrain/anomaly.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ type AnomalyDetector struct {

// NewAnomalyDetector creates an AnomalyDetector with the given thresholds.
func NewAnomalyDetector(simThreshold float64, rareClusterThreshold int) *AnomalyDetector {
anomalyLog.Printf("Creating AnomalyDetector: simThreshold=%.2f, rareClusterThreshold=%d", simThreshold, rareClusterThreshold)
return &AnomalyDetector{
threshold: simThreshold,
rareThreshold: rareClusterThreshold,
Expand Down
4 changes: 4 additions & 0 deletions pkg/agentdrain/miner.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ func (m *Miner) Train(line string) (*MatchResult, error) {
// match is the internal (non-locking) lookup. Must be called with mu held.
func (m *Miner) match(tokens []string) (*MatchResult, bool) {
candidates := m.tree.search(tokens, m.cfg.Depth, m.cfg.ParamToken)
minerLog.Printf("match: searching %d candidate cluster(s) for %d token(s)", len(candidates), len(tokens))
bestSim := -1.0
var best *Cluster
for _, id := range candidates {
Expand All @@ -90,9 +91,11 @@ func (m *Miner) match(tokens []string) (*MatchResult, bool) {
}
}
if best == nil || bestSim < m.cfg.SimThreshold {
minerLog.Printf("match: no cluster matched (best_sim=%.2f, threshold=%.2f)", bestSim, m.cfg.SimThreshold)
return nil, false
}
params := extractParams(tokens, best.Template, m.cfg.ParamToken)
minerLog.Printf("match: matched cluster id=%d, similarity=%.2f, params=%d", best.ID, bestSim, len(params))
return &MatchResult{
ClusterID: best.ID,
Template: strings.Join(best.Template, " "),
Expand All @@ -104,6 +107,7 @@ func (m *Miner) match(tokens []string) (*MatchResult, bool) {

// TrainEvent flattens the AgentEvent and calls Train.
func (m *Miner) TrainEvent(evt AgentEvent) (*MatchResult, error) {
minerLog.Printf("TrainEvent: stage=%s", evt.Stage)
line := FlattenEvent(evt, m.cfg.ExcludeFields)
result, err := m.Train(line)
if err != nil {
Expand Down
9 changes: 9 additions & 0 deletions pkg/parser/frontmatter_hash.go
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,8 @@ func ComputeFrontmatterHashFromFileWithReader(filePath string, cache *ImportCach
// computeFrontmatterHashFromContent is the shared core that computes the hash given the
// already-read file content and pre-parsed frontmatter map (may be nil).
func computeFrontmatterHashFromContent(content string, parsedFrontmatter map[string]any, filePath string, cache *ImportCache, fileReader FileReader) (string, error) {
frontmatterHashLog.Printf("Computing hash from content: filePath=%s, content_size=%d bytes", filePath, len(content))

// Extract frontmatter and markdown as text (no YAML parsing)
frontmatterText, markdown, err := extractFrontmatterAndBodyText(content)
if err != nil {
Expand All @@ -201,6 +203,7 @@ func computeFrontmatterHashFromContent(content string, parsedFrontmatter map[str
// Detect inlined-imports from the pre-parsed frontmatter map.
// If nil (parsing failed or not provided), inlined-imports is treated as false.
inlinedImports := parseBoolFromFrontmatter(parsedFrontmatter, "inlined-imports")
frontmatterHashLog.Printf("Hash strategy: inlined_imports=%v, markdown_size=%d bytes", inlinedImports, len(markdown))

// When inlined-imports is enabled, the entire markdown body is compiled into the lock
// file, so any change to the body must invalidate the hash. Include the full body text.
Expand All @@ -220,6 +223,7 @@ func computeFrontmatterHashFromContent(content string, parsedFrontmatter map[str
// extractRelevantTemplateExpressions extracts template expressions from markdown
// that reference env. or vars. contexts
func extractRelevantTemplateExpressions(markdown string) []string {
frontmatterHashLog.Printf("Extracting relevant template expressions from markdown: size=%d bytes", len(markdown))
var expressions []string
seen := make(map[string]bool)

Expand Down Expand Up @@ -247,6 +251,7 @@ func extractRelevantTemplateExpressions(markdown string) []string {

// Sort for deterministic output
sort.Strings(expressions)
frontmatterHashLog.Printf("Found %d relevant template expression(s) referencing env./vars.", len(expressions))
return expressions
}

Expand Down Expand Up @@ -393,6 +398,8 @@ func processImportsTextBased(frontmatterText, baseDir string, visited map[string
return importedFiles, importedFrontmatterTexts, nil
}

frontmatterHashLog.Printf("Processing %d import(s) text-based from baseDir=%s", len(imports), baseDir)

// Sort imports for deterministic processing
sort.Strings(imports)

Expand All @@ -402,6 +409,7 @@ func processImportsTextBased(frontmatterText, baseDir string, visited map[string

// Skip if already visited (cycle detection)
if visited[fullPath] {
frontmatterHashLog.Printf("Skipping already-visited import (cycle detection): %s", fullPath)
continue
}
visited[fullPath] = true
Expand Down Expand Up @@ -437,6 +445,7 @@ func processImportsTextBased(frontmatterText, baseDir string, visited map[string
importedFrontmatterTexts = append(importedFrontmatterTexts, nestedTexts...)
}

frontmatterHashLog.Printf("Processed imports: found %d imported file(s) from baseDir=%s", len(importedFiles), baseDir)
return importedFiles, importedFrontmatterTexts, nil
}

Expand Down
3 changes: 3 additions & 0 deletions pkg/parser/schedule_cron_detection.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ func IsWeeklyCron(cron string) bool {
}
}

log.Printf("Cron expression classified as weekly: %q (minute=%s, hour=%s, dow=%s)", cron, minute, hour, dow)
return true
}

Expand All @@ -143,9 +144,11 @@ func IsCronExpression(input string) bool {
// Each field should match cron syntax (numbers, *, /, -, ,)
for _, field := range fields {
if !cronFieldPattern.MatchString(field) {
log.Printf("Cron field %q contains invalid characters in expression: %q", field, input)
return false
}
}

log.Printf("Input recognized as valid cron expression: %q", input)
return true
}
9 changes: 9 additions & 0 deletions pkg/workflow/template_injection_utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ func extractRunBlocks(data any) []string {
}
}

if len(runBlocks) > 0 {
templateInjectionValidationLog.Printf("Extracted %d run block(s) from YAML tree", len(runBlocks))
}
return runBlocks
}

Expand Down Expand Up @@ -63,11 +66,15 @@ var heredocPatterns = func() []heredocPattern {
// Heredocs (e.g., cat > file << 'EOF' ... EOF) are safe for template expressions
// because the content is written to files, not executed in the shell.
func removeHeredocContent(content string) string {
templateInjectionValidationLog.Printf("Removing heredoc content from shell command: input_size=%d bytes", len(content))
result := content
for _, p := range heredocPatterns {
result = p.quoted.ReplaceAllString(result, "# heredoc removed")
result = p.unquoted.ReplaceAllString(result, "# heredoc removed")
}
if len(result) != len(content) {
templateInjectionValidationLog.Printf("Heredoc content removed: output_size=%d bytes (reduced by %d bytes)", len(result), len(content)-len(result))
}
return result
}

Expand Down Expand Up @@ -102,6 +109,8 @@ func replaceOutsideQuotedHeredocs(s, old, new string) string {
return strings.ReplaceAll(s, old, new)
}

templateInjectionValidationLog.Printf("Replacing outside %d quoted heredoc region(s): replacing %q with %q", len(quotedRegions), old, new)

// Sort regions by start position so we can walk left-to-right.
sort.Slice(quotedRegions, func(i, j int) bool {
return quotedRegions[i].start < quotedRegions[j].start
Expand Down