Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 95 additions & 0 deletions debug_json.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
package main

import (
"encoding/json"
"fmt"
"os"
)

func main() {
if len(os.Args) < 2 {
fmt.Println("Usage: go run debug_json.go <file>")
return
}

filePath := os.Args[1]
content, err := os.ReadFile(filePath)
if err != nil {
fmt.Printf("Error reading file: %v\n", err)
return
}

fmt.Printf("File size: %d bytes\n", len(content))

// Try to parse as JSON array
var logEntries []map[string]interface{}
if err := json.Unmarshal(content, &logEntries); err != nil {
fmt.Printf("Failed to parse as JSON array: %v\n", err)

// Show first 200 characters
first200 := content
if len(first200) > 200 {
first200 = first200[:200]
}
fmt.Printf("First 200 chars: %q\n", string(first200))

// Show last 200 characters
last200 := content
if len(last200) > 200 {
last200 = last200[len(last200)-200:]
}
fmt.Printf("Last 200 chars: %q\n", string(last200))
return
}

fmt.Printf("Successfully parsed %d log entries\n", len(logEntries))

// Look at the structure of entries
assistantCount := 0
resultCount := 0
toolCallCount := 0

for i, entry := range logEntries {
if entryType, exists := entry["type"]; exists {
if typeStr, ok := entryType.(string); ok {
if typeStr == "result" {
resultCount++
fmt.Printf("Entry %d: type=result\n", i)
} else if typeStr == "assistant" {
assistantCount++
fmt.Printf("Entry %d: type=assistant", i)
if message, exists := entry["message"]; exists {
if messageMap, ok := message.(map[string]interface{}); ok {
if content, exists := messageMap["content"]; exists {
if contentArray, ok := content.([]interface{}); ok {
fmt.Printf(" (content array length: %d)", len(contentArray))
for _, contentItem := range contentArray {
if contentMap, ok := contentItem.(map[string]interface{}); ok {
if contentType, exists := contentMap["type"]; exists {
if typeStr, ok := contentType.(string); ok {
if typeStr == "tool_use" {
toolCallCount++
if name, exists := contentMap["name"]; exists {
fmt.Printf(" [tool_use: %v]", name)
}
} else {
fmt.Printf(" [%s]", typeStr)
}
}
}
}
}
}
}
}
}
fmt.Printf("\n")
} else {
fmt.Printf("Entry %d: type=%s\n", i, typeStr)
}
}
}
}
fmt.Printf("\nSummary: %d assistant entries, %d result entries, %d tool calls\n",
assistantCount, resultCount, toolCallCount)
}
2 changes: 1 addition & 1 deletion pkg/cli/access_log.go
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ func displayAccessLogAnalysis(processedRuns []ProcessedRun, verbose bool) {
}

if len(analyses) == 0 {
fmt.Println(console.FormatInfoMessage("No access logs found in downloaded runs"))
fmt.Println(console.FormatInfoMessage("No access logs found"))
return
}

Expand Down
54 changes: 27 additions & 27 deletions pkg/cli/logs.go
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,8 @@ Examples:
` + constants.CLIExtensionPrefix + ` logs --start-date -1mo # Filter runs from last month
` + constants.CLIExtensionPrefix + ` logs --engine claude # Filter logs by claude engine
` + constants.CLIExtensionPrefix + ` logs --engine codex # Filter logs by codex engine
` + constants.CLIExtensionPrefix + ` logs -o ./my-logs # Custom output directory`,
` + constants.CLIExtensionPrefix + ` logs -o ./my-logs # Custom output directory
` + constants.CLIExtensionPrefix + ` logs --tool-graph # Generate Mermaid tool sequence graph`,
Run: func(cmd *cobra.Command, args []string) {
var workflowName string
if len(args) > 0 && args[0] != "" {
Expand Down Expand Up @@ -165,6 +166,7 @@ Examples:
outputDir, _ := cmd.Flags().GetString("output")
engine, _ := cmd.Flags().GetString("engine")
verbose, _ := cmd.Flags().GetBool("verbose")
toolGraph, _ := cmd.Flags().GetBool("tool-graph")

// Resolve relative dates to absolute dates for GitHub CLI
now := time.Now()
Expand Down Expand Up @@ -204,7 +206,7 @@ Examples:
}
}

if err := DownloadWorkflowLogs(workflowName, count, startDate, endDate, outputDir, engine, verbose); err != nil {
if err := DownloadWorkflowLogs(workflowName, count, startDate, endDate, outputDir, engine, verbose, toolGraph); err != nil {
fmt.Fprintln(os.Stderr, console.FormatError(console.CompilerError{
Type: "error",
Message: err.Error(),
Expand All @@ -221,12 +223,13 @@ Examples:
logsCmd.Flags().StringP("output", "o", "./logs", "Output directory for downloaded logs and artifacts")
logsCmd.Flags().String("engine", "", "Filter logs by agentic engine type (claude, codex)")
logsCmd.Flags().BoolP("verbose", "v", false, "Show individual tool names instead of grouping by MCP server")
logsCmd.Flags().Bool("tool-graph", false, "Generate Mermaid tool sequence graph from agent logs")

return logsCmd
}

// DownloadWorkflowLogs downloads and analyzes workflow logs with metrics
func DownloadWorkflowLogs(workflowName string, count int, startDate, endDate, outputDir, engine string, verbose bool) error {
func DownloadWorkflowLogs(workflowName string, count int, startDate, endDate, outputDir, engine string, verbose bool, toolGraph bool) error {
if verbose {
fmt.Println(console.FormatInfoMessage("Fetching workflow runs from GitHub Actions..."))
}
Expand Down Expand Up @@ -410,6 +413,11 @@ func DownloadWorkflowLogs(workflowName string, count int, startDate, endDate, ou
// Display missing tools analysis
displayMissingToolsAnalysis(processedRuns, verbose)

// Generate tool sequence graph if requested
if toolGraph {
generateToolGraph(processedRuns, verbose)
}

// Display logs location prominently
absOutputDir, _ := filepath.Abs(outputDir)
fmt.Println(console.FormatSuccessMessage(fmt.Sprintf("Downloaded %d logs to %s", len(processedRuns), absOutputDir)))
Expand Down Expand Up @@ -731,10 +739,12 @@ func extractLogMetrics(logDir string, verbose bool) (LogMetrics, error) {
return nil
}

// Process log files
if strings.HasSuffix(strings.ToLower(info.Name()), ".log") ||
strings.HasSuffix(strings.ToLower(info.Name()), ".txt") ||
strings.Contains(strings.ToLower(info.Name()), "log") {
// Process log files - exclude output artifacts like aw_output.txt
fileName := strings.ToLower(info.Name())
if (strings.HasSuffix(fileName, ".log") ||
(strings.HasSuffix(fileName, ".txt") && strings.Contains(fileName, "log"))) &&
!strings.Contains(fileName, "aw_output") &&
!strings.Contains(fileName, "agent_output") {

fileMetrics, err := parseLogFileWithEngine(path, detectedEngine, verbose)
if err != nil && verbose {
Expand All @@ -752,6 +762,10 @@ func extractLogMetrics(logDir string, verbose bool) (LogMetrics, error) {
// the total conversation turns for the entire workflow run
metrics.Turns = fileMetrics.Turns
}

// Aggregate tool sequences and tool calls
metrics.ToolSequences = append(metrics.ToolSequences, fileMetrics.ToolSequences...)
metrics.ToolCalls = append(metrics.ToolCalls, fileMetrics.ToolCalls...)
}

return nil
Expand Down Expand Up @@ -824,24 +838,10 @@ func extractEngineFromAwInfo(infoFilePath string, verbose bool) workflow.CodingA

// parseLogFileWithEngine parses a log file using a specific engine or falls back to auto-detection
func parseLogFileWithEngine(filePath string, detectedEngine workflow.CodingAgentEngine, verbose bool) (LogMetrics, error) {
// Read the log file content
file, err := os.Open(filePath)
// Read the entire log file at once to avoid JSON parsing issues from chunked reading
content, err := os.ReadFile(filePath)
if err != nil {
return LogMetrics{}, fmt.Errorf("error opening log file: %w", err)
}
defer file.Close()

var content []byte
buffer := make([]byte, 4096)
for {
n, err := file.Read(buffer)
if err != nil && err != io.EOF {
return LogMetrics{}, fmt.Errorf("error reading log file: %w", err)
}
if n == 0 {
break
}
content = append(content, buffer[:n]...)
return LogMetrics{}, fmt.Errorf("error reading log file: %w", err)
}

logContent := string(content)
Expand Down Expand Up @@ -970,7 +970,7 @@ func displayToolCallReport(processedRuns []ProcessedRun, verbose bool) {

// For now, let's extract metrics from the run if available
// We'll process log files to get tool call information
logMetrics := extractLogMetricsFromRun(processedRun)
logMetrics := ExtractLogMetricsFromRun(processedRun)

for _, toolCall := range logMetrics.ToolCalls {
var displayKey string
Expand Down Expand Up @@ -1070,8 +1070,8 @@ func displayToolCallReport(processedRuns []ProcessedRun, verbose bool) {
fmt.Print(console.RenderTable(tableConfig))
}

// extractLogMetricsFromRun extracts log metrics from a processed run's log directory
func extractLogMetricsFromRun(processedRun ProcessedRun) workflow.LogMetrics {
// ExtractLogMetricsFromRun extracts log metrics from a processed run's log directory
func ExtractLogMetricsFromRun(processedRun ProcessedRun) workflow.LogMetrics {
// Use the LogsPath from the WorkflowRun to get metrics
if processedRun.Run.LogsPath == "" {
return workflow.LogMetrics{}
Expand Down
4 changes: 2 additions & 2 deletions pkg/cli/logs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ func TestDownloadWorkflowLogs(t *testing.T) {
// Test the DownloadWorkflowLogs function
// This should either fail with auth error (if not authenticated)
// or succeed with no results (if authenticated but no workflows match)
err := DownloadWorkflowLogs("", 1, "", "", "./test-logs", "", false)
err := DownloadWorkflowLogs("", 1, "", "", "./test-logs", "", false, false)

// If GitHub CLI is authenticated, the function may succeed but find no results
// If not authenticated, it should return an auth error
Expand Down Expand Up @@ -793,7 +793,7 @@ func TestDownloadWorkflowLogsWithEngineFilter(t *testing.T) {
if !tt.expectError {
// For valid engines, test that the function can be called without panic
// It may still fail with auth errors, which is expected
err := DownloadWorkflowLogs("", 1, "", "", "./test-logs", tt.engine, false)
err := DownloadWorkflowLogs("", 1, "", "", "./test-logs", tt.engine, false, false)

// Clean up any created directories
os.RemoveAll("./test-logs")
Expand Down
Loading