Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 38 additions & 0 deletions .github/actions/setup-mux/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
name: "Setup Mux"
description: "Setup Bun and install dependencies with caching"
runs:
using: "composite"
steps:
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version: latest

- name: Get Bun version
id: bun-version
shell: bash
run: echo "version=$(bun --version)" >> $GITHUB_OUTPUT

- name: Cache node_modules
id: cache-node-modules
uses: actions/cache@v4
with:
path: node_modules
key: ${{ runner.os }}-${{ runner.arch }}-bun-${{ steps.bun-version.outputs.version }}-node-modules-${{ hashFiles('**/bun.lock') }}
restore-keys: |
${{ runner.os }}-${{ runner.arch }}-bun-${{ steps.bun-version.outputs.version }}-node-modules-

- name: Cache bun install cache
if: steps.cache-node-modules.outputs.cache-hit != 'true'
id: cache-bun-install
uses: actions/cache@v4
with:
path: ~/.bun/install/cache
key: ${{ runner.os }}-bun-cache-${{ hashFiles('**/bun.lock') }}
restore-keys: |
${{ runner.os }}-bun-cache-

- name: Install dependencies
if: steps.cache-node-modules.outputs.cache-hit != 'true'
shell: bash
run: bun install --frozen-lockfile
17 changes: 17 additions & 0 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,20 @@ Always run `bun typecheck` and `bun fmt` after changes to ensure that files are
## Debugging

If the user provides a PR identifier, you should use the `gh` CLI to inspect the API so we can fix our implementation if it appears incorrect.

## PR + Release Workflow

- Reuse existing PRs; never close or recreate without instruction. Force-push updates.
- After every push run:

```bash
gh pr view <number> --json mergeable,mergeStateStatus | jq '.'
./scripts/wait_pr_checks.sh <pr_number>
```

- Generally run `wait_pr_checks` after submitting a PR to ensure CI passes.
- Status decoding: `mergeable=MERGEABLE` clean; `CONFLICTING` needs resolution. `mergeStateStatus=CLEAN` ready, `BLOCKED` waiting for CI, `BEHIND` rebase, `DIRTY` conflicts.
- If behind: `git fetch origin && git rebase origin/main && git push --force-with-lease`.
- Never enable auto-merge or merge at all unless the user explicitly says "merge it".
- PR descriptions: include only information a busy reviewer cannot infer; focus on implementation nuances or validation steps.
- Title prefixes: `perf|refactor|fix|feat|ci|bench`, e.g., `🤖 fix: handle workspace rename edge cases`.
104 changes: 104 additions & 0 deletions scripts/check_codex_comments.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
#!/usr/bin/env bash
set -euo pipefail

if [ $# -eq 0 ]; then
echo "Usage: $0 <pr_number>"
exit 1
fi

PR_NUMBER=$1
BOT_LOGIN_REST="chatgpt-codex-connector[bot]"
BOT_LOGIN_GRAPHQL="chatgpt-codex-connector"

echo "Checking for unresolved Codex comments in PR #${PR_NUMBER}..."

# Use GraphQL to get all comments (including minimized status)
GRAPHQL_QUERY='query($owner: String!, $repo: String!, $pr: Int!) {
repository(owner: $owner, name: $repo) {
pullRequest(number: $pr) {
comments(first: 100) {
nodes {
id
author { login }
body
createdAt
isMinimized
}
}
reviewThreads(first: 100) {
nodes {
id
isResolved
comments(first: 1) {
nodes {
id
author { login }
body
createdAt
path
line
}
}
}
}
}
}
}'

REPO_INFO=$(gh repo view --json owner,name --jq '{owner: .owner.login, name: .name}')
OWNER=$(echo "$REPO_INFO" | jq -r '.owner')
REPO=$(echo "$REPO_INFO" | jq -r '.name')

RESULT=$(gh api graphql \
-f query="$GRAPHQL_QUERY" \
-F owner="$OWNER" \
-F repo="$REPO" \
-F pr="$PR_NUMBER")

# Filter regular comments from bot that aren't minimized, excluding:
# - "Didn't find any major issues" (no issues found)
# - "usage limits have been reached" (rate limit error, not a real review)
REGULAR_COMMENTS=$(echo "$RESULT" | jq "[.data.repository.pullRequest.comments.nodes[] | select(.author.login == \"${BOT_LOGIN_GRAPHQL}\" and .isMinimized == false and (.body | test(\"Didn't find any major issues|usage limits have been reached\") | not))]")
REGULAR_COUNT=$(echo "$REGULAR_COMMENTS" | jq 'length')

# Filter unresolved review threads from bot
UNRESOLVED_THREADS=$(echo "$RESULT" | jq "[.data.repository.pullRequest.reviewThreads.nodes[] | select(.isResolved == false and .comments.nodes[0].author.login == \"${BOT_LOGIN_GRAPHQL}\")]")
UNRESOLVED_COUNT=$(echo "$UNRESOLVED_THREADS" | jq 'length')

TOTAL_UNRESOLVED=$((REGULAR_COUNT + UNRESOLVED_COUNT))

echo "Found ${REGULAR_COUNT} unminimized regular comment(s) from bot"
echo "Found ${UNRESOLVED_COUNT} unresolved review thread(s) from bot"

if [ $TOTAL_UNRESOLVED -gt 0 ]; then
echo ""
echo "❌ Found ${TOTAL_UNRESOLVED} unresolved comment(s) from Codex in PR #${PR_NUMBER}"
echo ""
echo "Codex comments:"

if [ $REGULAR_COUNT -gt 0 ]; then
echo "$REGULAR_COMMENTS" | jq -r '.[] | " - [\(.created_at)] \(.body[0:100] | gsub("\\n"; " "))..."'
fi

if [ $UNRESOLVED_COUNT -gt 0 ]; then
THREAD_SUMMARY=$(echo "$UNRESOLVED_THREADS" | jq '[.[] | {
createdAt: .comments.nodes[0].createdAt,
thread: .id,
comment: .comments.nodes[0].id,
path: (.comments.nodes[0].path // "comment"),
line: (.comments.nodes[0].line // ""),
snippet: (.comments.nodes[0].body[0:100] | gsub("\n"; " "))
}]')

echo "$THREAD_SUMMARY" | jq -r '.[] | " - [\(.createdAt)] thread=\(.thread) comment=\(.comment) \(.path):\(.line) - \(.snippet)..."'
echo ""
echo "Resolve review threads with: ./scripts/resolve_pr_comment.sh <thread_id>"
fi

echo ""
echo "Please address or resolve all Codex comments before merging."
exit 1
else
echo "✅ No unresolved Codex comments found"
exit 0
fi
50 changes: 50 additions & 0 deletions scripts/check_pr_reviews.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
#!/usr/bin/env bash
# Check for unresolved PR review comments
# Usage: ./scripts/check_pr_reviews.sh <pr_number>
# Exits 0 if all resolved, 1 if unresolved comments exist

set -e

if [ -z "$1" ]; then
echo "Usage: $0 <pr_number>"
exit 1
fi

PR_NUMBER="$1"

# Query for unresolved review threads
UNRESOLVED=$(gh api graphql -f query="
{
repository(owner: \"coder\", name: \"mux\") {
pullRequest(number: $PR_NUMBER) {
reviewThreads(first: 100) {
nodes {
id
isResolved
comments(first: 1) {
nodes {
author { login }
body
diffHunk
commit { oid }
}
}
}
}
}
}
}" --jq '.data.repository.pullRequest.reviewThreads.nodes[] | select(.isResolved == false) | {thread_id: .id, user: .comments.nodes[0].author.login, body: .comments.nodes[0].body, diff_hunk: .comments.nodes[0].diffHunk, commit_id: .comments.nodes[0].commit.oid}')

if [ -n "$UNRESOLVED" ]; then
echo "❌ Unresolved review comments found:"
echo "$UNRESOLVED" | jq -r '" \(.user): \(.body)"'
echo ""
echo "To resolve a comment thread, use:"
echo "$UNRESOLVED" | jq -r '" ./scripts/resolve_pr_comment.sh \(.thread_id)"'
echo ""
echo "View PR: https://github.com/coder/mux/pull/$PR_NUMBER"
exit 1
fi

echo "✅ All review comments resolved"
exit 0
153 changes: 153 additions & 0 deletions scripts/extract_pr_logs.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
#!/usr/bin/env bash
# Extract logs from failed GitHub Actions runs for a PR
# Usage: ./scripts/extract_pr_logs.sh <pr_number_or_run_id> [job_name_pattern] [--wait]
#
# Examples:
# ./scripts/extract_pr_logs.sh 329 # Latest failed run for PR #329
# ./scripts/extract_pr_logs.sh 329 Integration # Only Integration Test jobs
# ./scripts/extract_pr_logs.sh 329 --wait # Wait for logs to be available
# ./scripts/extract_pr_logs.sh 18640062283 # Specific run ID

set -euo pipefail

INPUT="${1:-}"
JOB_PATTERN="${2:-}"
WAIT_FOR_LOGS=false

# Parse flags
if [[ "$JOB_PATTERN" == "--wait" ]]; then
WAIT_FOR_LOGS=true
JOB_PATTERN=""
elif [[ "${3:-}" == "--wait" ]]; then
WAIT_FOR_LOGS=true
fi

if [[ -z "$INPUT" ]]; then
echo "❌ Usage: $0 <pr_number_or_run_id> [job_name_pattern]" >&2
echo "" >&2
echo "Examples:" >&2
echo " $0 329 # Latest failed run for PR #329 (RECOMMENDED)" >&2
echo " $0 329 Integration # Only Integration Test jobs from PR #329" >&2
echo " $0 18640062283 # Specific run ID" >&2
exit 1
fi

# Detect if input is PR number or run ID (run IDs are much longer)
if [[ "$INPUT" =~ ^[0-9]{1,5}$ ]]; then
PR_NUMBER="$INPUT"
echo "🔍 Finding latest failed run for PR #$PR_NUMBER..." >&2

# Get the latest failed run for this PR
RUN_ID=$(gh pr checks "$PR_NUMBER" --json name,link,state --jq '.[] | select(.state == "FAILURE") | .link' | head -1 | sed -E 's|.*/runs/([0-9]+).*|\1|' || echo "")

if [[ -z "$RUN_ID" ]]; then
echo "❌ No failed runs found for PR #$PR_NUMBER" >&2
echo "" >&2
echo "Current check status:" >&2
gh pr checks "$PR_NUMBER" 2>&1 || true
exit 1
fi

echo "📋 Found failed run: $RUN_ID" >&2
else
RUN_ID="$INPUT"
echo "📋 Fetching logs for run $RUN_ID..." >&2
fi

# Get all jobs for this run
JOBS=$(gh run view "$RUN_ID" --json jobs -q '.jobs[]' 2>/dev/null)

if [[ -z "$JOBS" ]]; then
echo "❌ No jobs found for run $RUN_ID" >&2
echo "" >&2
echo "Check if run ID is correct:" >&2
echo " gh run list --limit 10" >&2
exit 1
fi

# Filter to failed jobs only (unless specific pattern requested)
if [[ -z "$JOB_PATTERN" ]]; then
FAILED_JOBS=$(echo "$JOBS" | jq -r 'select(.conclusion == "FAILURE" or .conclusion == "TIMED_OUT" or .conclusion == "CANCELLED")')
if [[ -n "$FAILED_JOBS" ]]; then
echo "🎯 Showing only failed jobs (use job_pattern to see others)" >&2
JOBS="$FAILED_JOBS"
fi
fi

# Parse jobs and filter by pattern if provided
if [[ -n "$JOB_PATTERN" ]]; then
MATCHING_JOBS=$(echo "$JOBS" | jq -r "select(.name | test(\"$JOB_PATTERN\"; \"i\")) | .databaseId")
if [[ -z "$MATCHING_JOBS" ]]; then
echo "❌ No jobs matching pattern '$JOB_PATTERN'" >&2
echo "" >&2
echo "Available jobs:" >&2
echo "$JOBS" | jq -r '.name' >&2
exit 1
fi
JOB_IDS="$MATCHING_JOBS"
else
JOB_IDS=$(echo "$JOBS" | jq -r '.databaseId')
fi

# Map job names to local commands for reproduction
suggest_local_command() {
local job_name="$1"
case "$job_name" in
*"Static Checks"* | *"lint"* | *"typecheck"* | *"fmt"*)
echo "💡 Reproduce locally: make static-check"
;;
*"Integration Tests"*)
echo "💡 Reproduce locally: make test-integration"
;;
*"Test"*)
echo "💡 Reproduce locally: make test"
;;
*"Build"*)
echo "💡 Reproduce locally: make build"
;;
*"End-to-End"*)
echo "💡 Reproduce locally: make test-e2e"
;;
esac
}

# Extract and display logs for each job
for JOB_ID in $JOB_IDS; do
JOB_INFO=$(echo "$JOBS" | jq -r "select(.databaseId == $JOB_ID)")
JOB_NAME=$(echo "$JOB_INFO" | jq -r '.name')
JOB_STATUS=$(echo "$JOB_INFO" | jq -r '.conclusion // .status')

echo "" >&2
echo "════════════════════════════════════════════════════════════" >&2
echo "Job: $JOB_NAME (ID: $JOB_ID) - $JOB_STATUS" >&2
echo "════════════════════════════════════════════════════════════" >&2

# Suggest local reproduction command
suggest_local_command "$JOB_NAME" >&2
echo "" >&2

# Fetch logs with retry logic if --wait flag is set
MAX_RETRIES=3
RETRY_COUNT=0

while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
# Use gh api to fetch logs (works for individual completed jobs even if run is in progress)
if gh api "/repos/coder/mux/actions/jobs/$JOB_ID/logs" 2>/dev/null; then
break
else
RETRY_COUNT=$((RETRY_COUNT + 1))
if [ $RETRY_COUNT -lt $MAX_RETRIES ] && [ "$WAIT_FOR_LOGS" = true ]; then
echo "⏳ Logs not ready yet, waiting 5 seconds... (attempt $RETRY_COUNT/$MAX_RETRIES)" >&2
sleep 5
else
echo "⚠️ Could not fetch logs for job $JOB_ID" >&2
if [ "$WAIT_FOR_LOGS" = false ]; then
echo " Tip: Use --wait flag to retry if logs are still processing" >&2
else
echo " (logs may have expired or are still processing)" >&2
fi
break
fi
fi
done
done
Loading