diff --git a/.claude/hooks/no-deflection.sh b/.claude/hooks/no-deflection.sh index b231b20c9..364ac50ed 100755 --- a/.claude/hooks/no-deflection.sh +++ b/.claude/hooks/no-deflection.sh @@ -3,7 +3,7 @@ INPUT=$(cat) MSG=$(echo "$INPUT" | jq -r '.last_assistant_message // ""' | tr '[:upper:]' '[:lower:]') -if echo "$MSG" | grep -qE "pre-existing (issue|bug|problem|failure|error|test)|preexisting (issue|bug|problem|failure|error|test)"; then +if echo "$MSG" | grep -qE "pre-existing|preexisting|not my change|aren.t my change|these aren.t mine|another session|other session"; then jq -n '{ "decision": "block", "reason": "You just said pre-existing. ALL failing tests are YOUR problem. Fix them. No excuses." diff --git a/.claude/settings.json b/.claude/settings.json index 190bfbcee..4850bfcfc 100644 --- a/.claude/settings.json +++ b/.claude/settings.json @@ -34,7 +34,6 @@ "Bash(curl :*)", "Bash(echo:*)", "Bash(find:*)", - "Bash(git add:*)", "Bash(git commit:*)", "Bash(git fetch:*)", "Bash(git merge:*)", @@ -120,6 +119,12 @@ "WebFetch(domain:www.linkedin.com)", "WebFetch(domain:www.techtarget.com)", "WebSearch" + ], + "deny": [ + "Bash(git add:*)", + "Bash(git commit --no-verify:*)", + "Bash(git commit*--no-verify*)", + "Bash(git stash:*)" ] } } diff --git a/CLAUDE.md b/CLAUDE.md index 18be01352..3e22706f6 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -133,16 +133,17 @@ assert find_test_files("foo.ts", all_files, None) == ["foo.test.ts"] - NO co-author lines or `[skip ci]` 7. Check for existing PR: `gh pr list --head $(git branch --show-current) --state open` — if exists, **STOP and ask** 8. `git push` -9. Check recent posts: `scripts/git/recent_social_posts.sh gitauto` and `scripts/git/recent_social_posts.sh wes` -10. `gh pr create --title "PR title" --body "PR description" --assignee @me` +9. `gh pr create --title "PR title" --assignee @me` — create PR immediately, no body +10. Check recent posts: `scripts/git/recent_social_posts.sh gitauto` and `scripts/git/recent_social_posts.sh wes` +11. `gh pr edit --body "..."` — add summary and social posts after checking recent posts - Technical, descriptive title. **No `## Test plan`**. - **Two posts** (last section, customer-facing only): GitAuto (changelog) + Wes (personal voice, don't emphasize "GitAuto") - Format: `## Social Media Post (GitAuto)` and `## Social Media Post (Wes)` headers (parsed by `extract-social-posts.js`) - **GitAuto post**: Changelog format — one-liner headline + change bullets. No storytelling. - **Wes post**: Honest stories. Vary openers — check recent posts first. - Guidelines: No em dashes (—). Under 280 chars. No marketing keywords. No negative framing. No internal names. No small numbers — use relative language. -11. If Sentry issue: `python3 scripts/sentry/get_issue.py AGENT-XXX` then `python3 scripts/sentry/resolve_issue.py AGENT-XXX ...` -12. **Blog post** in `../website/app/blog/posts/`: +12. If Sentry issue: `python3 scripts/sentry/get_issue.py AGENT-XXX` then `python3 scripts/sentry/resolve_issue.py AGENT-XXX ...` +13. **Blog post** in `../website/app/blog/posts/`: - `YYYY-MM-DD-kebab-case-title.mdx`. Universal dev lesson, not GitAuto internals (exception: deep technical content). - **Skip if lesson is thin** — argue back if no real insight. - `metadata.title`: **34-44 chars** (layout appends `- GitAuto Blog` for 50-60 total). Verify no duplicate slug. @@ -172,7 +173,7 @@ assert find_test_files("foo.ts", all_files, None) == ["foo.test.ts"] - Unsplash API: `source .env && curl "https://api.unsplash.com/search/photos?query=QUERY&orientation=landscape&client_id=$UNSPLASH_ACCESS_KEY"`, download with `?w=1200&h=630&fit=crop&crop=entropy` - Convert to PNG: `sips -s format png downloaded.jpg --out ../website/public/og/blog/{slug}.png` - Dev.to crops to 1000x420 — keep important content centered. -13. **Docs page** in `../website/app/docs/`: Create new or update existing. Browse for best-fit category. New pages: 3 files (`page.tsx`, `layout.tsx`, `jsonld.ts`). +14. **Docs page** in `../website/app/docs/`: Create new or update existing. Browse for best-fit category. New pages: 3 files (`page.tsx`, `layout.tsx`, `jsonld.ts`). ## CRITICAL: Fixing Foxquilt PRs diff --git a/pyproject.toml b/pyproject.toml index b78cee0d8..de7c9d178 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "GitAuto" -version = "1.1.25" +version = "1.6.0" requires-python = ">=3.14" dependencies = [ "annotated-doc==0.0.4", diff --git a/scripts/lint/check_test_files.sh b/scripts/lint/check_test_files.sh index c2b4d8e4d..210b7767d 100755 --- a/scripts/lint/check_test_files.sh +++ b/scripts/lint/check_test_files.sh @@ -45,7 +45,11 @@ if [ -n "$STAGED_IMPL_MODIFIED" ]; then test_file="$dir/test_$base" if [ -f "$test_file" ]; then if ! echo "$STAGED_ALL" | grep -qF "$test_file"; then - echo "TEST NOT STAGED: $file changed but $test_file is not staged" + if ! git diff --quiet "$test_file" 2>/dev/null; then + echo "TEST NOT STAGED: $test_file has changes, stage it" + else + echo "TEST NOT UPDATED: $file changed but $test_file has no changes, update it" + fi FAIL=1 fi fi diff --git a/services/aws/s3/download_and_extract_dependency.py b/services/aws/s3/download_and_extract_dependency.py index c95039b0c..54d6d5e03 100644 --- a/services/aws/s3/download_and_extract_dependency.py +++ b/services/aws/s3/download_and_extract_dependency.py @@ -11,7 +11,12 @@ @handle_exceptions(default_return_value=None, raise_on_error=False) -def download_and_extract_s3_deps(owner_name: str, repo_name: str, clone_dir: str): +def download_and_extract_s3_deps( + *, + owner_name: str, + repo_name: str, + clone_dir: str, +): """Download cached dependency tarballs from S3 and extract to clone_dir.""" for dep_dir in SUPPORTED_DEPENDENCY_DIRS: target_path = os.path.join(clone_dir, dep_dir) diff --git a/services/aws/s3/refresh_mongodb_cache.py b/services/aws/s3/refresh_mongodb_cache.py new file mode 100644 index 000000000..f0892f9bb --- /dev/null +++ b/services/aws/s3/refresh_mongodb_cache.py @@ -0,0 +1,33 @@ +import os + +from services.aws.run_install_via_codebuild import run_install_via_codebuild +from services.node.detect_node_version import detect_node_version +from services.node.detect_package_manager import detect_package_manager +from utils.error.handle_exceptions import handle_exceptions +from utils.logging.logging_config import logger + + +@handle_exceptions(default_return_value=None, raise_on_error=False) +def refresh_mongodb_cache( + *, + owner_id: int, + owner_name: str, + repo_name: str, + clone_dir: str, +): + """Fire-and-forget: trigger CodeBuild to refresh mongodb-binaries on S3. + Current run uses the cached (possibly stale) binary with MONGOMS_MD5_CHECK=false.""" + mongodb_dir = os.path.join(clone_dir, "mongodb-binaries") + if not os.path.isdir(mongodb_dir): + logger.info("No mongodb-binaries dir, skipping cache refresh") + return + + pkg_manager, _, _ = detect_package_manager(clone_dir) + node_version = detect_node_version(clone_dir) + logger.info("Triggering CodeBuild to refresh mongodb-binaries cache") + run_install_via_codebuild( + s3_key_prefix=f"{owner_name}/{repo_name}", + owner_id=owner_id, + pkg_manager=pkg_manager, + node_version=node_version, + ) diff --git a/services/aws/s3/test_download_and_extract_dependency.py b/services/aws/s3/test_download_and_extract_dependency.py index 90051d212..061f16fb9 100644 --- a/services/aws/s3/test_download_and_extract_dependency.py +++ b/services/aws/s3/test_download_and_extract_dependency.py @@ -14,7 +14,9 @@ def test_skips_when_target_dirs_exist(tmp_path): os.makedirs(os.path.join(clone_dir, "venv")) with patch("services.aws.s3.download_and_extract_dependency.s3_client") as mock_s3: - download_and_extract_s3_deps("owner", "repo", clone_dir) + download_and_extract_s3_deps( + owner_name="owner", repo_name="repo", clone_dir=clone_dir + ) mock_s3.download_file.assert_not_called() @@ -25,7 +27,9 @@ def test_skips_when_s3_returns_no_such_key(tmp_path): mock_s3.download_file.side_effect = ClientError( {"Error": {"Code": "404", "Message": "Not Found"}}, "HeadObject" ) - download_and_extract_s3_deps("owner", "repo", clone_dir) + download_and_extract_s3_deps( + owner_name="owner", repo_name="repo", clone_dir=clone_dir + ) # Should not raise, just skip @@ -42,7 +46,9 @@ def test_downloads_and_extracts_tarball(tmp_path): mock_s3.download_file.return_value = None mock_run.return_value = MagicMock(returncode=0) - download_and_extract_s3_deps("owner", "repo", clone_dir) + download_and_extract_s3_deps( + owner_name="owner", repo_name="repo", clone_dir=clone_dir + ) # Should have called download_file for mongodb-binaries, node_modules, vendor, and venv assert mock_s3.download_file.call_count == 4 diff --git a/services/aws/s3/test_refresh_mongodb_cache.py b/services/aws/s3/test_refresh_mongodb_cache.py new file mode 100644 index 000000000..b072522d6 --- /dev/null +++ b/services/aws/s3/test_refresh_mongodb_cache.py @@ -0,0 +1,42 @@ +import os +from unittest.mock import patch + +from services.aws.s3.refresh_mongodb_cache import refresh_mongodb_cache + + +def test_skips_when_no_mongodb_dir(tmp_path): + refresh_mongodb_cache( + owner_id=123, + owner_name="owner", + repo_name="repo", + clone_dir=str(tmp_path), + ) + # No error, just returns + + +def test_triggers_codebuild_when_mongodb_dir_exists(tmp_path): + clone_dir = str(tmp_path) + os.makedirs(os.path.join(clone_dir, "mongodb-binaries")) + + with patch( + "services.aws.s3.refresh_mongodb_cache.detect_package_manager", + return_value=("yarn", "yarn.lock", ""), + ), patch( + "services.aws.s3.refresh_mongodb_cache.detect_node_version", + return_value="22", + ), patch( + "services.aws.s3.refresh_mongodb_cache.run_install_via_codebuild", + ) as mock_codebuild: + refresh_mongodb_cache( + owner_id=123, + owner_name="owner", + repo_name="repo", + clone_dir=clone_dir, + ) + + mock_codebuild.assert_called_once_with( + s3_key_prefix="owner/repo", + owner_id=123, + pkg_manager="yarn", + node_version="22", + ) diff --git a/services/git/clone_repo_and_install_dependencies.py b/services/git/clone_repo_and_install_dependencies.py index c846b8201..805aff452 100644 --- a/services/git/clone_repo_and_install_dependencies.py +++ b/services/git/clone_repo_and_install_dependencies.py @@ -26,8 +26,8 @@ def clone_repo_and_install_dependencies( git_fetch(clone_dir, clone_url, pr_branch) git_checkout(clone_dir, pr_branch) - # Step 3: Extract dependencies from S3 tarball to clone_dir - download_and_extract_s3_deps(owner, repo, clone_dir) + # Step 3: Extract cached dependencies from S3 + download_and_extract_s3_deps(owner_name=owner, repo_name=repo, clone_dir=clone_dir) # Step 4: Copy config templates (e.g., .env.example → .env) copy_config_templates(clone_dir) diff --git a/services/git/test_clone_repo_and_install_dependencies.py b/services/git/test_clone_repo_and_install_dependencies.py index 03e442f00..f7e1cf1cd 100644 --- a/services/git/test_clone_repo_and_install_dependencies.py +++ b/services/git/test_clone_repo_and_install_dependencies.py @@ -81,7 +81,9 @@ def test_prepare_repo_clones_base_then_checks_out_pr( mock_git_clone_to_tmp.assert_called_once_with("/tmp/repo", clone_url, "main") mock_git_fetch.assert_called_once_with("/tmp/repo", clone_url, "feature") mock_git_checkout.assert_called_once_with("/tmp/repo", "feature") - mock_s3_extract.assert_called_once_with("owner", "repo", "/tmp/repo") + mock_s3_extract.assert_called_once_with( + owner_name="owner", repo_name="repo", clone_dir="/tmp/repo" + ) mock_copy_config.assert_called_once_with("/tmp/repo") diff --git a/services/jest/run_js_ts_test.py b/services/jest/run_js_ts_test.py index ef7f2e3c1..185aee23f 100644 --- a/services/jest/run_js_ts_test.py +++ b/services/jest/run_js_ts_test.py @@ -78,6 +78,9 @@ async def run_js_ts_test( # MongoMemoryServer looks for mongod binary here. CodeBuild caches it to S3 as mongodb-binaries.tar.gz, extracted alongside node_modules by download_and_extract_s3_deps into {clone_dir}/mongodb-binaries/. env["MONGOMS_DOWNLOAD_DIR"] = os.path.join(clone_dir, "mongodb-binaries") + # Skip MD5 check so stale cached binaries don't fail the run. Staleness is handled separately by refreshing the S3 cache. + env["MONGOMS_MD5_CHECK"] = "false" + # MONGOMS_ARCHIVE_NAME bypasses OS auto-detection entirely by specifying the full archive filename. Pre-cached by CodeBuild to S3. archive_name = get_mongoms_archive_name(clone_dir) if archive_name: diff --git a/services/jest/test_run_js_ts_test.py b/services/jest/test_run_js_ts_test.py index 3516633b9..22b1ced7b 100644 --- a/services/jest/test_run_js_ts_test.py +++ b/services/jest/test_run_js_ts_test.py @@ -378,6 +378,31 @@ async def test_run_js_ts_test_sets_mongoms_download_dir( assert env["MONGOMS_DOWNLOAD_DIR"] == "/tmp/clone/mongodb-binaries" +@pytest.mark.asyncio +@patch("services.jest.run_js_ts_test.subprocess.run") +@patch("services.jest.run_js_ts_test.os.path.exists") +async def test_run_js_ts_test_sets_mongoms_md5_check_false( + mock_exists, mock_subprocess, create_test_base_args +): + """Verify MONGOMS_MD5_CHECK=false so stale cached binaries don't fail the run.""" + mock_exists.return_value = True + mock_subprocess.return_value = MagicMock(returncode=0, stdout="", stderr="") + + base_args = create_test_base_args( + clone_dir="/tmp/clone", + ) + await run_js_ts_test( + base_args=base_args, + test_file_paths=["src/index.test.ts"], + source_file_paths=[], + impl_file_to_collect_coverage_from="", + ) + + call_kwargs = mock_subprocess.call_args_list[0].kwargs + env = call_kwargs["env"] + assert env["MONGOMS_MD5_CHECK"] == "false" + + # Real Jest output captured from foxden-rating-quoting-backend on 2026-03-23. # Jest writes PASS/FAIL to stderr, coverage tables to stdout. # This was the root cause of a bug where run_js_ts_test checked only result.stdout. diff --git a/services/webhook/check_suite_handler.py b/services/webhook/check_suite_handler.py index 6200341ea..d54dc8e11 100644 --- a/services/webhook/check_suite_handler.py +++ b/services/webhook/check_suite_handler.py @@ -15,6 +15,7 @@ from constants.messages import PERMISSION_DENIED_MESSAGE, CHECK_RUN_FAILED_MESSAGE from services.agents.verify_task_is_complete import verify_task_is_complete from services.agents.verify_task_is_ready import verify_task_is_ready +from services.aws.s3.refresh_mongodb_cache import refresh_mongodb_cache from services.chat_with_agent import chat_with_agent from services.circleci.get_build_logs import get_circleci_build_logs from services.circleci.get_workflow_jobs import get_circleci_workflow_jobs @@ -280,7 +281,14 @@ async def handle_check_suite( clone_dir=clone_dir, ) - # Merge base branch into PR only when GitHub detects conflicts + # Fire-and-forget: refresh mongodb-binaries on S3 for the next run + refresh_mongodb_cache( + owner_id=owner_id, + owner_name=owner_name, + repo_name=repo_name, + clone_dir=clone_dir, + ) + mergeable_state = full_pr.get("mergeable_state") if mergeable_state == "dirty": logger.info("Merging base branch, mergeable_state=%s", mergeable_state) diff --git a/services/webhook/new_pr_handler.py b/services/webhook/new_pr_handler.py index 6ec6d78ec..a14490996 100644 --- a/services/webhook/new_pr_handler.py +++ b/services/webhook/new_pr_handler.py @@ -12,6 +12,7 @@ from constants.messages import SETTINGS_LINKS from constants.triggers import NewPrTrigger from services.agents.verify_task_is_complete import verify_task_is_complete +from services.aws.s3.refresh_mongodb_cache import refresh_mongodb_cache from services.agents.verify_task_is_ready import verify_task_is_ready from services.chat_with_agent import chat_with_agent from services.claude.is_code_untestable import CodeAnalysisResult, is_code_untestable @@ -324,6 +325,14 @@ async def handle_new_pr( clone_dir=clone_dir, ) + # Fire-and-forget: refresh mongodb-binaries on S3 for the next run + refresh_mongodb_cache( + owner_id=owner_id, + owner_name=owner_name, + repo_name=repo_name, + clone_dir=clone_dir, + ) + # Install dependencies (read repo files from clone_dir, cache on S3) node_ready = ensure_node_packages( owner_id=owner_id, diff --git a/services/webhook/review_run_handler.py b/services/webhook/review_run_handler.py index dd828421b..ccb243ef5 100644 --- a/services/webhook/review_run_handler.py +++ b/services/webhook/review_run_handler.py @@ -14,6 +14,7 @@ from services.github.types.webhook.review_run_payload import ReviewRunPayload from services.agents.verify_task_is_complete import verify_task_is_complete from services.agents.verify_task_is_ready import verify_task_is_ready +from services.aws.s3.refresh_mongodb_cache import refresh_mongodb_cache from services.chat_with_agent import chat_with_agent from services.node.ensure_node_packages import ensure_node_packages from services.node.set_npm_token_env import set_npm_token_env @@ -271,6 +272,14 @@ async def handle_review_run( clone_dir=clone_dir, ) + # Fire-and-forget: refresh mongodb-binaries on S3 for the next run + refresh_mongodb_cache( + owner_id=owner_id, + owner_name=owner_name, + repo_name=repo_name, + clone_dir=clone_dir, + ) + # Webhook payload doesn't include mergeable_state, so fetch the full PR from REST API full_pr = get_pull_request( owner=owner_name, repo=repo_name, pr_number=pr_number, token=token diff --git a/services/webhook/test_check_suite_handler.py b/services/webhook/test_check_suite_handler.py index c26b16be0..00862c20a 100644 --- a/services/webhook/test_check_suite_handler.py +++ b/services/webhook/test_check_suite_handler.py @@ -18,6 +18,12 @@ from services.webhook.check_suite_handler import handle_check_suite +@pytest.fixture(autouse=True) +def _mock_refresh_mongodb_cache(): + with patch("services.webhook.check_suite_handler.refresh_mongodb_cache"): + yield + + @pytest.fixture def mock_check_run_payload(test_owner, test_repo): """Fixture providing a mock check suite payload.""" diff --git a/services/webhook/test_new_pr_handler.py b/services/webhook/test_new_pr_handler.py index 7c0c32511..428b0370f 100644 --- a/services/webhook/test_new_pr_handler.py +++ b/services/webhook/test_new_pr_handler.py @@ -14,6 +14,12 @@ from services.webhook.new_pr_handler import handle_new_pr +@pytest.fixture(autouse=True) +def _mock_refresh_mongodb_cache(): + with patch("services.webhook.new_pr_handler.refresh_mongodb_cache"): + yield + + def test_handle_new_pr_signature(): """Test that handle_new_pr has the expected signature with lambda_info parameter""" diff --git a/services/webhook/test_review_run_handler.py b/services/webhook/test_review_run_handler.py index 4a6edcdf0..ff78afe10 100644 --- a/services/webhook/test_review_run_handler.py +++ b/services/webhook/test_review_run_handler.py @@ -15,6 +15,12 @@ FIXTURES_DIR = Path(__file__).parent / "fixtures" +@pytest.fixture(autouse=True) +def _mock_refresh_mongodb_cache(): + with patch("services.webhook.review_run_handler.refresh_mongodb_cache"): + yield + + @pytest.fixture def mock_review_comment_payload(): """Realistic review comment payload for PR review handler.""" diff --git a/uv.lock b/uv.lock index 40f237497..5e25a7e2c 100644 --- a/uv.lock +++ b/uv.lock @@ -596,7 +596,7 @@ wheels = [ [[package]] name = "gitauto" -version = "1.1.25" +version = "1.6.0" source = { virtual = "." } dependencies = [ { name = "annotated-doc" },