diff --git a/.github/workflows/apm-js-mirror-monitor.lock.yml b/.github/workflows/apm-js-mirror-monitor.lock.yml
new file mode 100644
index 00000000000..bdf7c5413d8
--- /dev/null
+++ b/.github/workflows/apm-js-mirror-monitor.lock.yml
@@ -0,0 +1,1390 @@
+# ___ _ _
+# / _ \ | | (_)
+# | |_| | __ _ ___ _ __ | |_ _ ___
+# | _ |/ _` |/ _ \ '_ \| __| |/ __|
+# | | | | (_| | __/ | | | |_| | (__
+# \_| |_/\__, |\___|_| |_|\__|_|\___|
+# __/ |
+# _ _ |___/
+# | | | | / _| |
+# | | | | ___ _ __ _ __| |_| | _____ ____
+# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___|
+# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \
+# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/
+#
+# This file was automatically generated by gh-aw. DO NOT EDIT.
+#
+# To update this file, edit the corresponding .md file and run:
+# gh aw compile
+# Not all edits will cause changes to this file.
+#
+# For more information: https://github.github.com/gh-aw/introduction/overview/
+#
+# Daily monitor that checks the microsoft/APM Python source (packer.py, unpacker.py) for changes and ensures apm_pack.cjs and apm_unpack.cjs stay in sync; creates a PR when updates are needed
+#
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"8bae12c1786de151222414b82f8e9b03260fae96b9beca0caf249e1071ed1183","agent_id":"claude"}
+
+name: "APM JavaScript Mirror Monitor"
+"on":
+ schedule:
+ - cron: "45 22 * * *"
+ # Friendly format: daily (scattered)
+ # skip-if-match: is:pr is:open in:title "[apm-js-mirror]" # Skip-if-match processed as search check in pre-activation job
+ workflow_dispatch:
+ inputs:
+ aw_context:
+ default: ""
+ description: Agent caller context (used internally by Agentic Workflows).
+ required: false
+ type: string
+
+permissions: {}
+
+concurrency:
+ group: "gh-aw-${{ github.workflow }}"
+
+run-name: "APM JavaScript Mirror Monitor"
+
+jobs:
+ activation:
+ needs: pre_activation
+ if: needs.pre_activation.outputs.activated == 'true'
+ runs-on: ubuntu-slim
+ permissions:
+ contents: read
+ outputs:
+ comment_id: ""
+ comment_repo: ""
+ lockdown_check_failed: ${{ steps.generate_aw_info.outputs.lockdown_check_failed == 'true' }}
+ model: ${{ steps.generate_aw_info.outputs.model }}
+ secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }}
+ steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+ - name: Setup Scripts
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+ - name: Generate agentic run info
+ id: generate_aw_info
+ env:
+ GH_AW_INFO_ENGINE_ID: "claude"
+ GH_AW_INFO_ENGINE_NAME: "Claude Code"
+ GH_AW_INFO_MODEL: ${{ vars.GH_AW_MODEL_AGENT_CLAUDE || 'auto' }}
+ GH_AW_INFO_VERSION: "latest"
+ GH_AW_INFO_AGENT_VERSION: "latest"
+ GH_AW_INFO_WORKFLOW_NAME: "APM JavaScript Mirror Monitor"
+ GH_AW_INFO_EXPERIMENTAL: "false"
+ GH_AW_INFO_SUPPORTS_TOOLS_ALLOWLIST: "true"
+ GH_AW_INFO_STAGED: "false"
+ GH_AW_INFO_ALLOWED_DOMAINS: '["defaults","github","api.github.com","raw.githubusercontent.com"]'
+ GH_AW_INFO_FIREWALL_ENABLED: "true"
+ GH_AW_INFO_AWF_VERSION: "v0.25.3"
+ GH_AW_INFO_AWMG_VERSION: ""
+ GH_AW_INFO_FIREWALL_TYPE: "squid"
+ GH_AW_COMPILED_STRICT: "false"
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/generate_aw_info.cjs');
+ await main(core, context);
+ - name: Validate ANTHROPIC_API_KEY secret
+ id: validate-secret
+ run: ${RUNNER_TEMP}/gh-aw/actions/validate_multi_secret.sh ANTHROPIC_API_KEY 'Claude Code' https://github.github.com/gh-aw/reference/engines/#anthropic-claude-code
+ env:
+ ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+ - name: Checkout .github and .agents folders
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ persist-credentials: false
+ sparse-checkout: |
+ .github
+ .agents
+ actions/setup
+ sparse-checkout-cone-mode: true
+ fetch-depth: 1
+ - name: Check workflow file timestamps
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_WORKFLOW_FILE: "apm-js-mirror-monitor.lock.yml"
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/check_workflow_timestamp_api.cjs');
+ await main();
+ - name: Create prompt with built-in context
+ env:
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_SAFE_OUTPUTS: ${{ runner.temp }}/gh-aw/safeoutputs/outputs.jsonl
+ GH_AW_GITHUB_ACTOR: ${{ github.actor }}
+ GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }}
+ GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }}
+ GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }}
+ GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }}
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
+ GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ # poutine:ignore untrusted_checkout_exec
+ run: |
+ bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
+ {
+ cat << 'GH_AW_PROMPT_0b6f75bcad32c8ab_EOF'
+
+ GH_AW_PROMPT_0b6f75bcad32c8ab_EOF
+ cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
+ cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
+ cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
+ cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
+ cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
+ cat << 'GH_AW_PROMPT_0b6f75bcad32c8ab_EOF'
+
+ Tools: create_issue, create_pull_request, missing_tool, missing_data, noop
+ GH_AW_PROMPT_0b6f75bcad32c8ab_EOF
+ cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_create_pull_request.md"
+ cat << 'GH_AW_PROMPT_0b6f75bcad32c8ab_EOF'
+
+
+ The following GitHub context information is available for this workflow:
+ {{#if __GH_AW_GITHUB_ACTOR__ }}
+ - **actor**: __GH_AW_GITHUB_ACTOR__
+ {{/if}}
+ {{#if __GH_AW_GITHUB_REPOSITORY__ }}
+ - **repository**: __GH_AW_GITHUB_REPOSITORY__
+ {{/if}}
+ {{#if __GH_AW_GITHUB_WORKSPACE__ }}
+ - **workspace**: __GH_AW_GITHUB_WORKSPACE__
+ {{/if}}
+ {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }}
+ - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__
+ {{/if}}
+ {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }}
+ - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__
+ {{/if}}
+ {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }}
+ - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__
+ {{/if}}
+ {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }}
+ - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__
+ {{/if}}
+ {{#if __GH_AW_GITHUB_RUN_ID__ }}
+ - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__
+ {{/if}}
+
+
+ GH_AW_PROMPT_0b6f75bcad32c8ab_EOF
+ cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
+ cat << 'GH_AW_PROMPT_0b6f75bcad32c8ab_EOF'
+
+ GH_AW_PROMPT_0b6f75bcad32c8ab_EOF
+ cat << 'GH_AW_PROMPT_0b6f75bcad32c8ab_EOF'
+ {{#runtime-import .github/workflows/apm-js-mirror-monitor.md}}
+ GH_AW_PROMPT_0b6f75bcad32c8ab_EOF
+ } > "$GH_AW_PROMPT"
+ - name: Interpolate variables and render templates
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
+ GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/interpolate_prompt.cjs');
+ await main();
+ - name: Substitute placeholders
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_ALLOWED_EXTENSIONS: ''
+ GH_AW_CACHE_DESCRIPTION: ''
+ GH_AW_CACHE_DIR: '/tmp/gh-aw/cache-memory/'
+ GH_AW_GITHUB_ACTOR: ${{ github.actor }}
+ GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }}
+ GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }}
+ GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }}
+ GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }}
+ GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
+ GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
+ GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: ${{ needs.pre_activation.outputs.activated }}
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+
+ const substitutePlaceholders = require('${{ runner.temp }}/gh-aw/actions/substitute_placeholders.cjs');
+
+ // Call the substitution function
+ return await substitutePlaceholders({
+ file: process.env.GH_AW_PROMPT,
+ substitutions: {
+ GH_AW_ALLOWED_EXTENSIONS: process.env.GH_AW_ALLOWED_EXTENSIONS,
+ GH_AW_CACHE_DESCRIPTION: process.env.GH_AW_CACHE_DESCRIPTION,
+ GH_AW_CACHE_DIR: process.env.GH_AW_CACHE_DIR,
+ GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR,
+ GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID,
+ GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER,
+ GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER,
+ GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER,
+ GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY,
+ GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID,
+ GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE,
+ GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED
+ }
+ });
+ - name: Validate prompt placeholders
+ env:
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ # poutine:ignore untrusted_checkout_exec
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/validate_prompt_placeholders.sh
+ - name: Print prompt
+ env:
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ # poutine:ignore untrusted_checkout_exec
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/print_prompt_summary.sh
+ - name: Upload activation artifact
+ if: success()
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
+ with:
+ name: activation
+ path: |
+ /tmp/gh-aw/aw_info.json
+ /tmp/gh-aw/aw-prompts/prompt.txt
+ retention-days: 1
+
+ agent:
+ needs: activation
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ issues: read
+ pull-requests: read
+ concurrency:
+ group: "gh-aw-claude-${{ github.workflow }}"
+ env:
+ DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ""
+ GH_AW_ASSETS_BRANCH: ""
+ GH_AW_ASSETS_MAX_SIZE_KB: 0
+ GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
+ GH_AW_WORKFLOW_ID_SANITIZED: apmjsmirrormonitor
+ outputs:
+ checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }}
+ has_patch: ${{ steps.collect_output.outputs.has_patch }}
+ model: ${{ needs.activation.outputs.model }}
+ output: ${{ steps.collect_output.outputs.output }}
+ output_types: ${{ steps.collect_output.outputs.output_types }}
+ steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+ - name: Setup Scripts
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+ - name: Set runtime paths
+ id: set-runtime-paths
+ run: |
+ echo "GH_AW_SAFE_OUTPUTS=${RUNNER_TEMP}/gh-aw/safeoutputs/outputs.jsonl" >> "$GITHUB_OUTPUT"
+ echo "GH_AW_SAFE_OUTPUTS_CONFIG_PATH=${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" >> "$GITHUB_OUTPUT"
+ echo "GH_AW_SAFE_OUTPUTS_TOOLS_PATH=${RUNNER_TEMP}/gh-aw/safeoutputs/tools.json" >> "$GITHUB_OUTPUT"
+ - name: Checkout repository
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ persist-credentials: false
+ - name: Create gh-aw temp directory
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/create_gh_aw_tmp_dir.sh
+ - name: Configure gh CLI for GitHub Enterprise
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/configure_gh_for_ghe.sh
+ env:
+ GH_TOKEN: ${{ github.token }}
+ # Cache memory file share configuration from frontmatter processed below
+ - name: Create cache-memory directory
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/create_cache_memory_dir.sh
+ - name: Restore cache-memory file share data
+ uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
+ with:
+ key: memory-none-nopolicy-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+ restore-keys: |
+ memory-none-nopolicy-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-
+ - name: Set up cache-memory git repository
+ env:
+ GH_AW_CACHE_DIR: /tmp/gh-aw/cache-memory
+ GH_AW_MIN_INTEGRITY: none
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/setup_cache_memory_git.sh
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ git config --global am.keepcr true
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Checkout PR branch
+ id: checkout-pr
+ if: |
+ github.event.pull_request || github.event.issue.pull_request
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/checkout_pr_branch.cjs');
+ await main();
+ - name: Setup Node.js
+ uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
+ with:
+ node-version: '24'
+ package-manager-cache: false
+ - name: Install AWF binary
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh v0.25.3
+ - name: Install Claude Code CLI
+ run: npm install -g @anthropic-ai/claude-code@latest
+ - name: Determine automatic lockdown mode for GitHub MCP Server
+ id: determine-automatic-lockdown
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }}
+ GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }}
+ with:
+ script: |
+ const determineAutomaticLockdown = require('${{ runner.temp }}/gh-aw/actions/determine_automatic_lockdown.cjs');
+ await determineAutomaticLockdown(github, context, core);
+ - name: Download container images
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.25.3 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.3 ghcr.io/github/gh-aw-firewall/squid:0.25.3 ghcr.io/github/gh-aw-mcpg:v0.2.8 ghcr.io/github/github-mcp-server:v0.32.0 node:lts-alpine
+ - name: Write Safe Outputs Config
+ run: |
+ mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
+ mkdir -p /tmp/gh-aw/safeoutputs
+ mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_8458872bb709c2cb_EOF'
+ {"create_issue":{"expires":72,"labels":["automation","dependencies","apm"],"max":1,"title_prefix":"[apm-js-mirror] "},"create_pull_request":{"expires":72,"labels":["automation","dependencies","apm"],"max":1,"max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS"],"protected_path_prefixes":[".github/",".agents/"],"reviewers":["copilot"],"title_prefix":"[apm-js-mirror] "},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}}
+ GH_AW_SAFE_OUTPUTS_CONFIG_8458872bb709c2cb_EOF
+ - name: Write Safe Outputs Tools
+ run: |
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_dc79e1e171b93ffd_EOF'
+ {
+ "description_suffixes": {
+ "create_issue": " CONSTRAINTS: Maximum 1 issue(s) can be created. Title will be prefixed with \"[apm-js-mirror] \". Labels [\"automation\" \"dependencies\" \"apm\"] will be automatically added.",
+ "create_pull_request": " CONSTRAINTS: Maximum 1 pull request(s) can be created. Title will be prefixed with \"[apm-js-mirror] \". Labels [\"automation\" \"dependencies\" \"apm\"] will be automatically added. Reviewers [\"copilot\"] will be assigned."
+ },
+ "repo_params": {},
+ "dynamic_tools": []
+ }
+ GH_AW_SAFE_OUTPUTS_TOOLS_META_dc79e1e171b93ffd_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_7aa880297afd96b2_EOF'
+ {
+ "create_issue": {
+ "defaultMax": 1,
+ "fields": {
+ "body": {
+ "required": true,
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 65000
+ },
+ "labels": {
+ "type": "array",
+ "itemType": "string",
+ "itemSanitize": true,
+ "itemMaxLength": 128
+ },
+ "parent": {
+ "issueOrPRNumber": true
+ },
+ "repo": {
+ "type": "string",
+ "maxLength": 256
+ },
+ "temporary_id": {
+ "type": "string"
+ },
+ "title": {
+ "required": true,
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 128
+ }
+ }
+ },
+ "create_pull_request": {
+ "defaultMax": 1,
+ "fields": {
+ "body": {
+ "required": true,
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 65000
+ },
+ "branch": {
+ "required": true,
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 256
+ },
+ "draft": {
+ "type": "boolean"
+ },
+ "labels": {
+ "type": "array",
+ "itemType": "string",
+ "itemSanitize": true,
+ "itemMaxLength": 128
+ },
+ "repo": {
+ "type": "string",
+ "maxLength": 256
+ },
+ "title": {
+ "required": true,
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 128
+ }
+ }
+ },
+ "missing_data": {
+ "defaultMax": 20,
+ "fields": {
+ "alternatives": {
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 256
+ },
+ "context": {
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 256
+ },
+ "data_type": {
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 128
+ },
+ "reason": {
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 256
+ }
+ }
+ },
+ "missing_tool": {
+ "defaultMax": 20,
+ "fields": {
+ "alternatives": {
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 512
+ },
+ "reason": {
+ "required": true,
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 256
+ },
+ "tool": {
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 128
+ }
+ }
+ },
+ "noop": {
+ "defaultMax": 1,
+ "fields": {
+ "message": {
+ "required": true,
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 65000
+ }
+ }
+ }
+ }
+ GH_AW_SAFE_OUTPUTS_VALIDATION_7aa880297afd96b2_EOF
+ node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs
+ - name: Generate Safe Outputs MCP Server Config
+ id: safe-outputs-config
+ run: |
+ # Generate a secure random API key (360 bits of entropy, 40+ chars)
+ # Mask immediately to prevent timing vulnerabilities
+ API_KEY=$(openssl rand -base64 45 | tr -d '/+=')
+ echo "::add-mask::${API_KEY}"
+
+ PORT=3001
+
+ # Set outputs for next steps
+ {
+ echo "safe_outputs_api_key=${API_KEY}"
+ echo "safe_outputs_port=${PORT}"
+ } >> "$GITHUB_OUTPUT"
+
+ echo "Safe Outputs MCP server will run on port ${PORT}"
+
+ - name: Start Safe Outputs MCP HTTP Server
+ id: safe-outputs-start
+ env:
+ DEBUG: '*'
+ GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }}
+ GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }}
+ GH_AW_SAFE_OUTPUTS_TOOLS_PATH: ${{ runner.temp }}/gh-aw/safeoutputs/tools.json
+ GH_AW_SAFE_OUTPUTS_CONFIG_PATH: ${{ runner.temp }}/gh-aw/safeoutputs/config.json
+ GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
+ run: |
+ # Environment variables are set above to prevent template injection
+ export DEBUG
+ export GH_AW_SAFE_OUTPUTS_PORT
+ export GH_AW_SAFE_OUTPUTS_API_KEY
+ export GH_AW_SAFE_OUTPUTS_TOOLS_PATH
+ export GH_AW_SAFE_OUTPUTS_CONFIG_PATH
+ export GH_AW_MCP_LOG_DIR
+
+ bash ${RUNNER_TEMP}/gh-aw/actions/start_safe_outputs_server.sh
+
+ - name: Start MCP Gateway
+ id: start-mcp-gateway
+ env:
+ GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }}
+ GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }}
+ GITHUB_MCP_GUARD_MIN_INTEGRITY: ${{ steps.determine-automatic-lockdown.outputs.min_integrity }}
+ GITHUB_MCP_GUARD_REPOS: ${{ steps.determine-automatic-lockdown.outputs.repos }}
+ GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ run: |
+ set -eo pipefail
+ mkdir -p /tmp/gh-aw/mcp-config
+
+ # Export gateway environment variables for MCP config and gateway script
+ export MCP_GATEWAY_PORT="80"
+ export MCP_GATEWAY_DOMAIN="host.docker.internal"
+ MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=')
+ echo "::add-mask::${MCP_GATEWAY_API_KEY}"
+ export MCP_GATEWAY_API_KEY
+ export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads"
+ mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}"
+ export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288"
+ export DEBUG="*"
+
+ export GH_AW_ENGINE="claude"
+ export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.8'
+
+ cat << GH_AW_MCP_CONFIG_0b687154616ed8db_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ {
+ "mcpServers": {
+ "github": {
+ "container": "ghcr.io/github/github-mcp-server:v0.32.0",
+ "env": {
+ "GITHUB_HOST": "$GITHUB_SERVER_URL",
+ "GITHUB_PERSONAL_ACCESS_TOKEN": "$GITHUB_MCP_SERVER_TOKEN",
+ "GITHUB_READ_ONLY": "1",
+ "GITHUB_TOOLSETS": "repos,pull_requests"
+ },
+ "guard-policies": {
+ "allow-only": {
+ "min-integrity": "$GITHUB_MCP_GUARD_MIN_INTEGRITY",
+ "repos": "$GITHUB_MCP_GUARD_REPOS"
+ }
+ }
+ },
+ "safeoutputs": {
+ "type": "http",
+ "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT",
+ "headers": {
+ "Authorization": "$GH_AW_SAFE_OUTPUTS_API_KEY"
+ },
+ "guard-policies": {
+ "write-sink": {
+ "accept": [
+ "*"
+ ]
+ }
+ }
+ }
+ },
+ "gateway": {
+ "port": $MCP_GATEWAY_PORT,
+ "domain": "${MCP_GATEWAY_DOMAIN}",
+ "apiKey": "${MCP_GATEWAY_API_KEY}",
+ "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
+ }
+ }
+ GH_AW_MCP_CONFIG_0b687154616ed8db_EOF
+ - name: Download activation artifact
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
+ with:
+ name: activation
+ path: /tmp/gh-aw
+ - name: Clean git credentials
+ continue-on-error: true
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/clean_git_credentials.sh
+ - name: Execute Claude Code CLI
+ id: agentic_execution
+ # Allowed tools (sorted):
+ # - Bash
+ # - BashOutput
+ # - Edit
+ # - Edit(/tmp/gh-aw/cache-memory/*)
+ # - ExitPlanMode
+ # - Glob
+ # - Grep
+ # - KillBash
+ # - LS
+ # - MultiEdit
+ # - MultiEdit(/tmp/gh-aw/cache-memory/*)
+ # - NotebookEdit
+ # - NotebookRead
+ # - Read
+ # - Read(/tmp/gh-aw/cache-memory/*)
+ # - Task
+ # - TodoWrite
+ # - WebFetch
+ # - Write
+ # - Write(/tmp/gh-aw/cache-memory/*)
+ # - mcp__github__download_workflow_run_artifact
+ # - mcp__github__get_code_scanning_alert
+ # - mcp__github__get_commit
+ # - mcp__github__get_dependabot_alert
+ # - mcp__github__get_discussion
+ # - mcp__github__get_discussion_comments
+ # - mcp__github__get_file_contents
+ # - mcp__github__get_job_logs
+ # - mcp__github__get_label
+ # - mcp__github__get_latest_release
+ # - mcp__github__get_me
+ # - mcp__github__get_notification_details
+ # - mcp__github__get_pull_request
+ # - mcp__github__get_pull_request_comments
+ # - mcp__github__get_pull_request_diff
+ # - mcp__github__get_pull_request_files
+ # - mcp__github__get_pull_request_review_comments
+ # - mcp__github__get_pull_request_reviews
+ # - mcp__github__get_pull_request_status
+ # - mcp__github__get_release_by_tag
+ # - mcp__github__get_secret_scanning_alert
+ # - mcp__github__get_tag
+ # - mcp__github__get_workflow_run
+ # - mcp__github__get_workflow_run_logs
+ # - mcp__github__get_workflow_run_usage
+ # - mcp__github__issue_read
+ # - mcp__github__list_branches
+ # - mcp__github__list_code_scanning_alerts
+ # - mcp__github__list_commits
+ # - mcp__github__list_dependabot_alerts
+ # - mcp__github__list_discussion_categories
+ # - mcp__github__list_discussions
+ # - mcp__github__list_issue_types
+ # - mcp__github__list_issues
+ # - mcp__github__list_label
+ # - mcp__github__list_notifications
+ # - mcp__github__list_pull_requests
+ # - mcp__github__list_releases
+ # - mcp__github__list_secret_scanning_alerts
+ # - mcp__github__list_starred_repositories
+ # - mcp__github__list_tags
+ # - mcp__github__list_workflow_jobs
+ # - mcp__github__list_workflow_run_artifacts
+ # - mcp__github__list_workflow_runs
+ # - mcp__github__list_workflows
+ # - mcp__github__pull_request_read
+ # - mcp__github__search_code
+ # - mcp__github__search_issues
+ # - mcp__github__search_orgs
+ # - mcp__github__search_pull_requests
+ # - mcp__github__search_repositories
+ # - mcp__github__search_users
+ timeout-minutes: 30
+ run: |
+ set -o pipefail
+ touch /tmp/gh-aw/agent-step-summary.md
+ # shellcheck disable=SC1003
+ sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --exclude-env ANTHROPIC_API_KEY --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,docs.github.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.blog,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.3 --skip-pull --enable-api-proxy \
+ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash,BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,WebFetch,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ env:
+ ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+ BASH_DEFAULT_TIMEOUT_MS: 60000
+ BASH_MAX_TIMEOUT_MS: 60000
+ DISABLE_BUG_COMMAND: 1
+ DISABLE_ERROR_REPORTING: 1
+ DISABLE_TELEMETRY: 1
+ GH_AW_MCP_CONFIG: /tmp/gh-aw/mcp-config/mcp-servers.json
+ GH_AW_MODEL_AGENT_CLAUDE: ${{ vars.GH_AW_MODEL_AGENT_CLAUDE || '' }}
+ GH_AW_PHASE: agent
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_VERSION: dev
+ GITHUB_AW: true
+ GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com
+ GIT_AUTHOR_NAME: github-actions[bot]
+ GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com
+ GIT_COMMITTER_NAME: github-actions[bot]
+ MCP_TIMEOUT: 120000
+ MCP_TOOL_TIMEOUT: 60000
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ git config --global am.keepcr true
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Stop MCP Gateway
+ if: always()
+ continue-on-error: true
+ env:
+ MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }}
+ MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }}
+ GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }}
+ run: |
+ bash ${RUNNER_TEMP}/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID"
+ - name: Redact secrets in logs
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/redact_secrets.cjs');
+ await main();
+ env:
+ GH_AW_SECRET_NAMES: 'ANTHROPIC_API_KEY,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN'
+ SECRET_ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+ SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }}
+ SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }}
+ SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - name: Append agent step summary
+ if: always()
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/append_agent_step_summary.sh
+ - name: Copy Safe Outputs
+ if: always()
+ env:
+ GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }}
+ run: |
+ mkdir -p /tmp/gh-aw
+ cp "$GH_AW_SAFE_OUTPUTS" /tmp/gh-aw/safeoutputs.jsonl 2>/dev/null || true
+ - name: Ingest agent output
+ id: collect_output
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_ALLOWED_DOMAINS: "*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,docs.github.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.blog,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com"
+ GITHUB_SERVER_URL: ${{ github.server_url }}
+ GITHUB_API_URL: ${{ github.api_url }}
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/collect_ndjson_output.cjs');
+ await main();
+ - name: Parse agent logs for step summary
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_AGENT_OUTPUT: /tmp/gh-aw/agent-stdio.log
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_claude_log.cjs');
+ await main();
+ - name: Parse MCP Gateway logs for step summary
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_mcp_gateway_log.cjs');
+ await main();
+ - name: Print firewall logs
+ if: always()
+ continue-on-error: true
+ env:
+ AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs
+ run: |
+ # Fix permissions on firewall logs so they can be uploaded as artifacts
+ # AWF runs with sudo, creating files owned by root
+ sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true
+ # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step)
+ if command -v awf &> /dev/null; then
+ awf logs summary | tee -a "$GITHUB_STEP_SUMMARY"
+ else
+ echo 'AWF binary not installed, skipping firewall log summary'
+ fi
+ - name: Write agent output placeholder if missing
+ if: always()
+ run: |
+ if [ ! -f /tmp/gh-aw/agent_output.json ]; then
+ echo '{"items":[]}' > /tmp/gh-aw/agent_output.json
+ fi
+ - name: Commit cache-memory changes
+ if: always()
+ env:
+ GH_AW_CACHE_DIR: /tmp/gh-aw/cache-memory
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/commit_cache_memory_git.sh
+ - name: Upload cache-memory data as artifact
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
+ if: always()
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Upload agent artifacts
+ if: always()
+ continue-on-error: true
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
+ with:
+ name: agent
+ path: |
+ /tmp/gh-aw/aw-prompts/prompt.txt
+ /tmp/gh-aw/mcp-logs/
+ /tmp/gh-aw/agent-stdio.log
+ /tmp/gh-aw/agent/
+ /tmp/gh-aw/safeoutputs.jsonl
+ /tmp/gh-aw/agent_output.json
+ /tmp/gh-aw/aw-*.patch
+ /tmp/gh-aw/aw-*.bundle
+ if-no-files-found: ignore
+ - name: Upload firewall audit logs
+ if: always()
+ continue-on-error: true
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
+ with:
+ name: firewall-audit-logs
+ path: |
+ /tmp/gh-aw/sandbox/firewall/logs/
+ /tmp/gh-aw/sandbox/firewall/audit/
+ if-no-files-found: ignore
+
+ conclusion:
+ needs:
+ - activation
+ - agent
+ - detection
+ - safe_outputs
+ - update_cache_memory
+ if: always() && (needs.agent.result != 'skipped' || needs.activation.outputs.lockdown_check_failed == 'true')
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ issues: write
+ pull-requests: write
+ concurrency:
+ group: "gh-aw-conclusion-apm-js-mirror-monitor"
+ cancel-in-progress: false
+ outputs:
+ noop_message: ${{ steps.noop.outputs.noop_message }}
+ tools_reported: ${{ steps.missing_tool.outputs.tools_reported }}
+ total_count: ${{ steps.missing_tool.outputs.total_count }}
+ steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+ - name: Setup Scripts
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+ - name: Download agent output artifact
+ id: download-agent-output
+ continue-on-error: true
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
+ with:
+ name: agent
+ path: /tmp/gh-aw/
+ - name: Setup agent output environment variable
+ id: setup-agent-output-env
+ if: steps.download-agent-output.outcome == 'success'
+ run: |
+ mkdir -p /tmp/gh-aw/
+ find "/tmp/gh-aw/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT"
+ - name: Process No-Op Messages
+ id: noop
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }}
+ GH_AW_NOOP_MAX: "1"
+ GH_AW_WORKFLOW_NAME: "APM JavaScript Mirror Monitor"
+ GH_AW_TRACKER_ID: "apm-js-mirror-monitor"
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/noop.cjs');
+ await main();
+ - name: Record Missing Tool
+ id: missing_tool
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }}
+ GH_AW_MISSING_TOOL_CREATE_ISSUE: "true"
+ GH_AW_WORKFLOW_NAME: "APM JavaScript Mirror Monitor"
+ GH_AW_TRACKER_ID: "apm-js-mirror-monitor"
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/missing_tool.cjs');
+ await main();
+ - name: Handle Agent Failure
+ id: handle_agent_failure
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }}
+ GH_AW_WORKFLOW_NAME: "APM JavaScript Mirror Monitor"
+ GH_AW_TRACKER_ID: "apm-js-mirror-monitor"
+ GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+ GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }}
+ GH_AW_WORKFLOW_ID: "apm-js-mirror-monitor"
+ GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.activation.outputs.secret_verification_result }}
+ GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }}
+ GH_AW_CODE_PUSH_FAILURE_ERRORS: ${{ needs.safe_outputs.outputs.code_push_failure_errors }}
+ GH_AW_CODE_PUSH_FAILURE_COUNT: ${{ needs.safe_outputs.outputs.code_push_failure_count }}
+ GH_AW_LOCKDOWN_CHECK_FAILED: ${{ needs.activation.outputs.lockdown_check_failed }}
+ GH_AW_GROUP_REPORTS: "false"
+ GH_AW_FAILURE_REPORT_AS_ISSUE: "true"
+ GH_AW_TIMEOUT_MINUTES: "30"
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_agent_failure.cjs');
+ await main();
+ - name: Handle No-Op Message
+ id: handle_noop_message
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }}
+ GH_AW_WORKFLOW_NAME: "APM JavaScript Mirror Monitor"
+ GH_AW_TRACKER_ID: "apm-js-mirror-monitor"
+ GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+ GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }}
+ GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }}
+ GH_AW_NOOP_REPORT_AS_ISSUE: "true"
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_noop_message.cjs');
+ await main();
+ - name: Handle Create Pull Request Error
+ id: handle_create_pr_error
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }}
+ GH_AW_WORKFLOW_NAME: "APM JavaScript Mirror Monitor"
+ GH_AW_TRACKER_ID: "apm-js-mirror-monitor"
+ GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_create_pr_error.cjs');
+ await main();
+
+ detection:
+ needs: agent
+ if: >
+ always() && needs.agent.result != 'skipped' && (needs.agent.outputs.output_types != '' || needs.agent.outputs.has_patch == 'true')
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ outputs:
+ detection_conclusion: ${{ steps.detection_conclusion.outputs.conclusion }}
+ detection_success: ${{ steps.detection_conclusion.outputs.success }}
+ steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+ - name: Setup Scripts
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+ - name: Download agent output artifact
+ id: download-agent-output
+ continue-on-error: true
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
+ with:
+ name: agent
+ path: /tmp/gh-aw/
+ - name: Setup agent output environment variable
+ id: setup-agent-output-env
+ if: steps.download-agent-output.outcome == 'success'
+ run: |
+ mkdir -p /tmp/gh-aw/
+ find "/tmp/gh-aw/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT"
+ # --- Threat Detection ---
+ - name: Download container images
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.25.3 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.3 ghcr.io/github/gh-aw-firewall/squid:0.25.3
+ - name: Check if detection needed
+ id: detection_guard
+ if: always()
+ env:
+ OUTPUT_TYPES: ${{ needs.agent.outputs.output_types }}
+ HAS_PATCH: ${{ needs.agent.outputs.has_patch }}
+ run: |
+ if [[ -n "$OUTPUT_TYPES" || "$HAS_PATCH" == "true" ]]; then
+ echo "run_detection=true" >> "$GITHUB_OUTPUT"
+ echo "Detection will run: output_types=$OUTPUT_TYPES, has_patch=$HAS_PATCH"
+ else
+ echo "run_detection=false" >> "$GITHUB_OUTPUT"
+ echo "Detection skipped: no agent outputs or patches to analyze"
+ fi
+ - name: Clear MCP configuration for detection
+ if: always() && steps.detection_guard.outputs.run_detection == 'true'
+ run: |
+ rm -f /tmp/gh-aw/mcp-config/mcp-servers.json
+ rm -f /home/runner/.copilot/mcp-config.json
+ rm -f "$GITHUB_WORKSPACE/.gemini/settings.json"
+ - name: Prepare threat detection files
+ if: always() && steps.detection_guard.outputs.run_detection == 'true'
+ run: |
+ mkdir -p /tmp/gh-aw/threat-detection/aw-prompts
+ cp /tmp/gh-aw/aw-prompts/prompt.txt /tmp/gh-aw/threat-detection/aw-prompts/prompt.txt 2>/dev/null || true
+ cp /tmp/gh-aw/agent_output.json /tmp/gh-aw/threat-detection/agent_output.json 2>/dev/null || true
+ for f in /tmp/gh-aw/aw-*.patch; do
+ [ -f "$f" ] && cp "$f" /tmp/gh-aw/threat-detection/ 2>/dev/null || true
+ done
+ for f in /tmp/gh-aw/aw-*.bundle; do
+ [ -f "$f" ] && cp "$f" /tmp/gh-aw/threat-detection/ 2>/dev/null || true
+ done
+ echo "Prepared threat detection files:"
+ ls -la /tmp/gh-aw/threat-detection/ 2>/dev/null || true
+ - name: Setup threat detection
+ if: always() && steps.detection_guard.outputs.run_detection == 'true'
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ WORKFLOW_NAME: "APM JavaScript Mirror Monitor"
+ WORKFLOW_DESCRIPTION: "Daily monitor that checks the microsoft/APM Python source (packer.py, unpacker.py) for changes and ensures apm_pack.cjs and apm_unpack.cjs stay in sync; creates a PR when updates are needed"
+ HAS_PATCH: ${{ needs.agent.outputs.has_patch }}
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/setup_threat_detection.cjs');
+ await main();
+ - name: Ensure threat-detection directory and log
+ if: always() && steps.detection_guard.outputs.run_detection == 'true'
+ run: |
+ mkdir -p /tmp/gh-aw/threat-detection
+ touch /tmp/gh-aw/threat-detection/detection.log
+ - name: Setup Node.js
+ uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
+ with:
+ node-version: '24'
+ package-manager-cache: false
+ - name: Install AWF binary
+ run: bash ${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh v0.25.3
+ - name: Install Claude Code CLI
+ run: npm install -g @anthropic-ai/claude-code@latest
+ - name: Execute Claude Code CLI
+ if: always() && steps.detection_guard.outputs.run_detection == 'true'
+ id: detection_agentic_execution
+ # Allowed tools (sorted):
+ # - Bash
+ # - BashOutput
+ # - ExitPlanMode
+ # - Glob
+ # - Grep
+ # - KillBash
+ # - LS
+ # - NotebookRead
+ # - Read
+ # - Task
+ # - TodoWrite
+ timeout-minutes: 20
+ run: |
+ set -o pipefail
+ touch /tmp/gh-aw/agent-step-summary.md
+ # shellcheck disable=SC1003
+ sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --exclude-env ANTHROPIC_API_KEY --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.3 --skip-pull --enable-api-proxy \
+ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --allowed-tools Bash,BashOutput,ExitPlanMode,Glob,Grep,KillBash,LS,NotebookRead,Read,Task,TodoWrite --debug-file /tmp/gh-aw/threat-detection/detection.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_DETECTION_CLAUDE:+ --model "$GH_AW_MODEL_DETECTION_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log
+ env:
+ ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+ BASH_DEFAULT_TIMEOUT_MS: 60000
+ BASH_MAX_TIMEOUT_MS: 60000
+ DISABLE_BUG_COMMAND: 1
+ DISABLE_ERROR_REPORTING: 1
+ DISABLE_TELEMETRY: 1
+ GH_AW_MODEL_DETECTION_CLAUDE: ${{ vars.GH_AW_MODEL_DETECTION_CLAUDE || '' }}
+ GH_AW_PHASE: detection
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_VERSION: dev
+ GITHUB_AW: true
+ GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com
+ GIT_AUTHOR_NAME: github-actions[bot]
+ GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com
+ GIT_COMMITTER_NAME: github-actions[bot]
+ MCP_TIMEOUT: 120000
+ MCP_TOOL_TIMEOUT: 60000
+ - name: Upload threat detection log
+ if: always() && steps.detection_guard.outputs.run_detection == 'true'
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
+ with:
+ name: detection
+ path: /tmp/gh-aw/threat-detection/detection.log
+ if-no-files-found: ignore
+ - name: Parse and conclude threat detection
+ id: detection_conclusion
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ RUN_DETECTION: ${{ steps.detection_guard.outputs.run_detection }}
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_threat_detection_results.cjs');
+ await main();
+
+ pre_activation:
+ runs-on: ubuntu-slim
+ permissions:
+ contents: read
+ outputs:
+ activated: ${{ steps.check_membership.outputs.is_team_member == 'true' && steps.check_skip_if_match.outputs.skip_check_ok == 'true' }}
+ matched_command: ''
+ steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+ - name: Setup Scripts
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+ - name: Check team membership for workflow
+ id: check_membership
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_REQUIRED_ROLES: "admin,maintainer,write"
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/check_membership.cjs');
+ await main();
+ - name: Check skip-if-match query
+ id: check_skip_if_match
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_SKIP_QUERY: "is:pr is:open in:title \"[apm-js-mirror]\""
+ GH_AW_WORKFLOW_NAME: "APM JavaScript Mirror Monitor"
+ GH_AW_SKIP_MAX_MATCHES: "1"
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/check_skip_if_match.cjs');
+ await main();
+
+ safe_outputs:
+ needs:
+ - activation
+ - agent
+ - detection
+ if: (!cancelled()) && needs.agent.result != 'skipped' && needs.detection.result == 'success'
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ issues: write
+ pull-requests: write
+ timeout-minutes: 15
+ env:
+ GH_AW_CALLER_WORKFLOW_ID: "${{ github.repository }}/apm-js-mirror-monitor"
+ GH_AW_ENGINE_ID: "claude"
+ GH_AW_ENGINE_MODEL: ${{ needs.agent.outputs.model }}
+ GH_AW_TRACKER_ID: "apm-js-mirror-monitor"
+ GH_AW_WORKFLOW_ID: "apm-js-mirror-monitor"
+ GH_AW_WORKFLOW_NAME: "APM JavaScript Mirror Monitor"
+ outputs:
+ code_push_failure_count: ${{ steps.process_safe_outputs.outputs.code_push_failure_count }}
+ code_push_failure_errors: ${{ steps.process_safe_outputs.outputs.code_push_failure_errors }}
+ create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }}
+ create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }}
+ created_issue_number: ${{ steps.process_safe_outputs.outputs.created_issue_number }}
+ created_issue_url: ${{ steps.process_safe_outputs.outputs.created_issue_url }}
+ created_pr_number: ${{ steps.process_safe_outputs.outputs.created_pr_number }}
+ created_pr_url: ${{ steps.process_safe_outputs.outputs.created_pr_url }}
+ process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }}
+ process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }}
+ steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+ - name: Setup Scripts
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+ - name: Download agent output artifact
+ id: download-agent-output
+ continue-on-error: true
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
+ with:
+ name: agent
+ path: /tmp/gh-aw/
+ - name: Setup agent output environment variable
+ id: setup-agent-output-env
+ if: steps.download-agent-output.outcome == 'success'
+ run: |
+ mkdir -p /tmp/gh-aw/
+ find "/tmp/gh-aw/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT"
+ - name: Download patch artifact
+ continue-on-error: true
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
+ with:
+ name: agent
+ path: /tmp/gh-aw/
+ - name: Checkout repository
+ if: (!cancelled()) && needs.agent.result != 'skipped' && contains(needs.agent.outputs.output_types, 'create_pull_request')
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ ref: ${{ github.base_ref || github.event.pull_request.base.ref || github.ref_name || github.event.repository.default_branch }}
+ token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ persist-credentials: false
+ fetch-depth: 1
+ - name: Configure Git credentials
+ if: (!cancelled()) && needs.agent.result != 'skipped' && contains(needs.agent.outputs.output_types, 'create_pull_request')
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ GIT_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ git config --global am.keepcr true
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${GIT_TOKEN}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Configure GH_HOST for enterprise compatibility
+ id: ghes-host-config
+ shell: bash
+ run: |
+ # Derive GH_HOST from GITHUB_SERVER_URL so the gh CLI targets the correct
+ # GitHub instance (GHES/GHEC). On github.com this is a harmless no-op.
+ GH_HOST="${GITHUB_SERVER_URL#https://}"
+ GH_HOST="${GH_HOST#http://}"
+ echo "GH_HOST=${GH_HOST}" >> "$GITHUB_ENV"
+ - name: Process Safe Outputs
+ id: process_safe_outputs
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ALLOWED_DOMAINS: "*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,docs.github.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.blog,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com"
+ GITHUB_SERVER_URL: ${{ github.server_url }}
+ GITHUB_API_URL: ${{ github.api_url }}
+ GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_issue\":{\"expires\":72,\"labels\":[\"automation\",\"dependencies\",\"apm\"],\"max\":1,\"title_prefix\":\"[apm-js-mirror] \"},\"create_pull_request\":{\"expires\":72,\"labels\":[\"automation\",\"dependencies\",\"apm\"],\"max\":1,\"max_patch_size\":1024,\"protected_files\":[\"package.json\",\"bun.lockb\",\"bunfig.toml\",\"deno.json\",\"deno.jsonc\",\"deno.lock\",\"global.json\",\"NuGet.Config\",\"Directory.Packages.props\",\"mix.exs\",\"mix.lock\",\"go.mod\",\"go.sum\",\"stack.yaml\",\"stack.yaml.lock\",\"pom.xml\",\"build.gradle\",\"build.gradle.kts\",\"settings.gradle\",\"settings.gradle.kts\",\"gradle.properties\",\"package-lock.json\",\"yarn.lock\",\"pnpm-lock.yaml\",\"npm-shrinkwrap.json\",\"requirements.txt\",\"Pipfile\",\"Pipfile.lock\",\"pyproject.toml\",\"setup.py\",\"setup.cfg\",\"Gemfile\",\"Gemfile.lock\",\"uv.lock\",\"CODEOWNERS\",\"CLAUDE.md\"],\"protected_path_prefixes\":[\".github/\",\".agents/\",\".claude/\"],\"reviewers\":[\"copilot\"],\"title_prefix\":\"[apm-js-mirror] \"},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"}}"
+ GH_AW_CI_TRIGGER_TOKEN: ${{ secrets.GH_AW_CI_TRIGGER_TOKEN }}
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/safe_output_handler_manager.cjs');
+ await main();
+ - name: Upload Safe Output Items
+ if: always()
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
+ with:
+ name: safe-output-items
+ path: /tmp/gh-aw/safe-output-items.jsonl
+ if-no-files-found: ignore
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && (needs.detection.result == 'success' || needs.detection.result == 'skipped')
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ env:
+ GH_AW_WORKFLOW_ID_SANITIZED: apmjsmirrormonitor
+ steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+ - name: Setup Scripts
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+ - name: Download cache-memory artifact (default)
+ id: download_cache_default
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Check if cache-memory folder has content (default)
+ id: check_cache_default
+ shell: bash
+ run: |
+ if [ -d "/tmp/gh-aw/cache-memory" ] && [ "$(ls -A /tmp/gh-aw/cache-memory 2>/dev/null)" ]; then
+ echo "has_content=true" >> "$GITHUB_OUTPUT"
+ else
+ echo "has_content=false" >> "$GITHUB_OUTPUT"
+ fi
+ - name: Save cache-memory to cache (default)
+ if: steps.check_cache_default.outputs.has_content == 'true'
+ uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
+ with:
+ key: memory-none-nopolicy-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
diff --git a/.github/workflows/apm-js-mirror-monitor.md b/.github/workflows/apm-js-mirror-monitor.md
new file mode 100644
index 00000000000..2ef73ae415f
--- /dev/null
+++ b/.github/workflows/apm-js-mirror-monitor.md
@@ -0,0 +1,268 @@
+---
+description: Daily monitor that checks the microsoft/APM Python source (packer.py, unpacker.py) for changes and ensures apm_pack.cjs and apm_unpack.cjs stay in sync; creates a PR when updates are needed
+on:
+ schedule: daily
+ workflow_dispatch:
+ skip-if-match: 'is:pr is:open in:title "[apm-js-mirror]"'
+permissions:
+ contents: read
+ pull-requests: read
+ issues: read
+tracker-id: apm-js-mirror-monitor
+engine: claude
+strict: false
+network:
+ allowed:
+ - defaults
+ - github
+ - "api.github.com"
+ - "raw.githubusercontent.com"
+tools:
+ cache-memory: true
+ web-fetch:
+ github:
+ toolsets: [repos, pull_requests]
+ bash:
+ - "*"
+ edit:
+safe-outputs:
+ create-pull-request:
+ title-prefix: "[apm-js-mirror] "
+ labels: [automation, dependencies, apm]
+ reviewers: [copilot]
+ expires: 3d
+ create-issue:
+ expires: 3d
+ title-prefix: "[apm-js-mirror] "
+ labels: [automation, dependencies, apm]
+ noop:
+timeout-minutes: 30
+---
+
+# APM JavaScript Mirror Monitor
+
+You are an expert JavaScript developer who maintains the gh-aw JavaScript reimplementations of the `microsoft/APM` Python package. Your job is to watch for changes to the upstream Python source and update the JS files when needed.
+
+## Current Context
+
+- **Repository**: ${{ github.repository }}
+- **Run**: ${{ github.run_id }}
+
+## Background
+
+gh-aw maintains two JavaScript files that mirror the Python implementations in [microsoft/APM](https://github.com/microsoft/APM):
+
+| JS file (in `actions/setup/js/`) | Python source (in `microsoft/APM`) | Purpose |
+|---|---|---|
+| `apm_unpack.cjs` | `src/apm/unpacker.py` | Extracts and deploys APM bundles |
+| `apm_pack.cjs` | `src/apm/packer.py` + `src/apm/lockfile_enrichment.py` | Packs workspace into a `.tar.gz` bundle |
+
+The JS files must stay functionally equivalent to their Python counterparts. Critical areas to keep in sync:
+- `TARGET_PREFIXES` map (target → deployed-file path prefixes)
+- `CROSS_TARGET_MAPS` map (cross-target path equivalences)
+- Pack/unpack algorithm steps and security checks
+- Lockfile YAML format (`apm.lock.yaml` structure)
+- New fields added to `LockedDependency`
+
+## Phase 1: Check Cache and Decide Whether to Proceed
+
+**Read cache-memory first** at `/tmp/gh-aw/cache-memory/apm-js-mirror/`:
+
+```bash
+ls /tmp/gh-aw/cache-memory/apm-js-mirror/ 2>/dev/null || echo "No cache found"
+cat /tmp/gh-aw/cache-memory/apm-js-mirror/state.json 2>/dev/null || echo "No state found"
+```
+
+The state file tracks:
+- `last_checked_at` — ISO timestamp of last check
+- `packer_sha` — last known commit SHA of `src/apm/packer.py`
+- `unpacker_sha` — last known commit SHA of `src/apm/unpacker.py`
+- `enrichment_sha` — last known commit SHA of `src/apm/lockfile_enrichment.py`
+- `apm_version` — last known APM release version
+- `js_in_sync` — boolean, whether JS files were in sync at last check
+
+**If** the cache shows a check within the last 20 hours AND `js_in_sync` is `true`, verify by quickly comparing the stored SHAs against current upstream. If unchanged, save a new timestamp and exit with noop.
+
+## Phase 2: Fetch Upstream Python Source
+
+Fetch the Python source files from the `microsoft/APM` repository using web-fetch.
+
+### 2.1 Get latest release version and commit SHAs
+
+```bash
+# Fetch latest APM release
+curl -s "https://api.github.com/repos/microsoft/APM/releases/latest" \
+ -H "Accept: application/vnd.github.v3+json"
+```
+
+Also fetch the commit history for each Python file to get its latest SHA:
+
+```bash
+# Latest commit for each relevant file
+curl -s "https://api.github.com/repos/microsoft/APM/commits?path=src/apm/packer.py&per_page=1" \
+ -H "Accept: application/vnd.github.v3+json"
+curl -s "https://api.github.com/repos/microsoft/APM/commits?path=src/apm/unpacker.py&per_page=1" \
+ -H "Accept: application/vnd.github.v3+json"
+curl -s "https://api.github.com/repos/microsoft/APM/commits?path=src/apm/lockfile_enrichment.py&per_page=1" \
+ -H "Accept: application/vnd.github.v3+json"
+```
+
+### 2.2 Compare SHAs with cached values
+
+If all three SHAs match the cached values and the cache is recent, there are no upstream changes. Save updated timestamp and exit with noop.
+
+### 2.3 Fetch Python source content
+
+If SHAs differ (or no cache), fetch the full source:
+
+Use web-fetch to retrieve:
+1. `https://raw.githubusercontent.com/microsoft/APM/main/src/apm/packer.py`
+2. `https://raw.githubusercontent.com/microsoft/APM/main/src/apm/unpacker.py`
+3. `https://raw.githubusercontent.com/microsoft/APM/main/src/apm/lockfile_enrichment.py`
+
+Save them locally for analysis:
+
+```bash
+mkdir -p /tmp/apm-upstream
+# Save fetched content to:
+# /tmp/apm-upstream/packer.py
+# /tmp/apm-upstream/unpacker.py
+# /tmp/apm-upstream/lockfile_enrichment.py
+```
+
+## Phase 3: Analyze Differences
+
+### 3.1 Read the current JS files
+
+```bash
+cat actions/setup/js/apm_pack.cjs
+cat actions/setup/js/apm_unpack.cjs
+```
+
+### 3.2 Compare TARGET_PREFIXES
+
+In `lockfile_enrichment.py`, look for the `TARGET_PREFIXES` dict (or equivalent constant). Compare with `TARGET_PREFIXES` in `apm_pack.cjs`. Flag any differences.
+
+### 3.3 Compare CROSS_TARGET_MAPS
+
+In `lockfile_enrichment.py`, look for the cross-target mapping (maps like `{".github/skills/": ".claude/skills/", ...}` per target). Compare with `CROSS_TARGET_MAPS` in `apm_pack.cjs`. Flag any differences.
+
+### 3.4 Compare pack algorithm steps
+
+In `packer.py`, look for `pack_bundle()` or equivalent. Compare the algorithm steps with `packBundle()` in `apm_pack.cjs`:
+1. Read apm.yml for name/version
+2. Read apm.lock.yaml
+3. Detect target
+4. Filter deployed_files by target
+5. Verify files exist
+6. Copy files (skip symlinks)
+7. Write enriched lockfile with pack: header
+8. Create tar.gz archive
+
+Note any new steps or changed semantics.
+
+### 3.5 Compare unpack algorithm steps
+
+In `unpacker.py`, look for `unpack_bundle()` or equivalent. Compare with `unpackBundle()` in `apm_unpack.cjs`:
+1. Find tar.gz bundle
+2. Extract to temp directory
+3. Find inner bundle directory
+4. Read lockfile
+5. Collect deployed_files
+6. Verify bundle completeness
+7. Copy files to output directory
+8. Clean up temp directory
+
+Note any new steps or changed semantics.
+
+### 3.6 Compare LockedDependency fields
+
+In `unpacker.py` or a shared model file, find the fields of the lock file dependency object. Compare with the `LockedDependency` typedef in `apm_unpack.cjs`. Flag any new or removed fields.
+
+### 3.7 Compare lockfile YAML format
+
+Look for changes in how PyYAML serializes the lockfile (field order, quoting conventions). Compare with `serializeLockfileYaml()` in `apm_pack.cjs`.
+
+## Phase 4: Produce Updates or Report
+
+### Case A: No functional differences
+
+If the analysis finds only cosmetic differences (comments, whitespace, variable names) with no functional impact:
+
+1. Update cache with new SHAs and `js_in_sync: true`
+2. Exit with noop:
+
+```json
+{"noop": {"message": "APM JS mirror is up to date. Checked packer.py (SHA: ), unpacker.py (SHA: ), lockfile_enrichment.py (SHA: ). No functional differences found."}}
+```
+
+### Case B: Functional differences found — create a PR
+
+When the analysis identifies functional differences that require JS updates:
+
+1. **Make the changes** to `actions/setup/js/apm_pack.cjs` and/or `actions/setup/js/apm_unpack.cjs` to mirror the upstream Python changes.
+ - Update `TARGET_PREFIXES` if target mappings changed
+ - Update `CROSS_TARGET_MAPS` if cross-target mappings changed
+ - Update algorithm steps if pack/unpack logic changed
+ - Add/remove `LockedDependency` fields if the lockfile schema changed
+ - Update `serializeLockfileYaml()` if lockfile format changed
+ - Update `parseAPMLockfile()` if the YAML parser needs changes
+
+2. **Run the JS tests** to verify nothing is broken:
+ ```bash
+ cd actions/setup/js && npm ci --silent && npx vitest run --no-file-parallelism apm_pack.test.cjs apm_unpack.test.cjs
+ ```
+ If tests fail, update them to reflect the new behavior (the tests should match the Python reference behavior).
+
+3. **Format** the modified files:
+ ```bash
+ cd actions/setup/js && npx prettier --write 'apm_pack.cjs' 'apm_unpack.cjs' 'apm_pack.test.cjs' 'apm_unpack.test.cjs' --ignore-path ../../../.prettierignore
+ ```
+
+4. Update the cache state with new SHAs and `js_in_sync: true`.
+
+5. Create a pull request with all modified files. The PR description must include:
+ - Which Python files changed (with links to the commits)
+ - What functional differences were found
+ - What was updated in the JS files
+ - Test results
+
+### Case C: Breaking changes that cannot be auto-fixed
+
+If the Python source has changed in a way that is too complex to automatically mirror (e.g., major algorithmic refactor, new external dependencies):
+
+1. Update cache with new SHAs and `js_in_sync: false`.
+
+2. Create an issue describing:
+ - What changed in the upstream Python source
+ - What needs to be updated in the JS files
+ - Suggested approach for the manual update
+
+## Cache State
+
+Save updated state after every run (success or failure):
+
+```bash
+mkdir -p /tmp/gh-aw/cache-memory/apm-js-mirror
+cat > /tmp/gh-aw/cache-memory/apm-js-mirror/state.json << EOF
+{
+ "last_checked_at": "$(date -u +%Y-%m-%dT%H-%M-%S-000Z)",
+ "packer_sha": "",
+ "unpacker_sha": "",
+ "enrichment_sha": "",
+ "apm_version": "",
+ "js_in_sync": true
+}
+EOF
+```
+
+**Note on timestamps**: Use `YYYY-MM-DDTHH-MM-SS-mmmZ` format (hyphens instead of colons) to comply with filesystem naming restrictions for cache-memory artifact uploads.
+
+## Guidelines
+
+- Always check cache first to avoid redundant upstream API calls
+- Only create a PR if there are genuine functional differences
+- Keep JS files functionally equivalent but not necessarily structurally identical to Python
+- Preserve JSDoc comments and the existing code style
+- Never remove security checks (path-traversal, symlink skipping, boundary checks)
+- If the Python source is unreachable, save error to cache and exit with noop
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index bf9ad979874..8edc874f03f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -953,28 +953,20 @@ jobs:
echo "✨ Live API test completed successfully" >> $GITHUB_STEP_SUMMARY
fi
- js-apm-unpack-integration:
- name: APM Pack/Unpack Integration (Python vs JS)
+ js-apm-integration:
+ name: APM JS Pack/Unpack Integration
runs-on: ubuntu-latest
timeout-minutes: 15
needs: validate-yaml
permissions:
contents: read
concurrency:
- group: ci-${{ github.ref }}-js-apm-unpack-integration
+ group: ci-${{ github.ref }}-js-apm-integration
cancel-in-progress: true
steps:
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- - name: Set up Python
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
- with:
- python-version: "3.12"
-
- - name: Install APM CLI
- run: pip install --quiet apm-cli
-
- name: Set up Node.js
id: setup-node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
@@ -1000,6 +992,8 @@ jobs:
APMEOF
# apm.lock.yaml — two dependencies, mixed files and a directory entry
+ # (apm_install.cjs would normally produce this from real GitHub repos;
+ # here we create it manually so the CI test runs without GitHub API access)
cat > apm.lock.yaml << 'APMEOF'
lockfile_version: '1'
apm_version: '0.8.5'
@@ -1034,68 +1028,264 @@ jobs:
echo "✅ APM test project created at $APM_PROJECT"
find "$APM_PROJECT" -type f | sort
- - name: Pack APM bundle
+ - name: Pack APM bundle with JavaScript (apm_pack.cjs)
+ env:
+ APM_WORKSPACE: /tmp/apm-test-project
+ APM_BUNDLE_OUTPUT: /tmp/apm-bundle-js
+ APM_TARGET: all
run: |
set -e
- cd /tmp/apm-test-project
- mkdir -p /tmp/apm-bundle
- apm pack --archive -o /tmp/apm-bundle
+ mkdir -p /tmp/apm-bundle-js
+ node actions/setup/js/run_apm_pack.cjs
echo ""
- echo "✅ Bundle created:"
- ls -lh /tmp/apm-bundle/*.tar.gz
+ echo "✅ JavaScript bundle created:"
+ ls -lh /tmp/apm-bundle-js/*.tar.gz
- - name: Unpack with Python (microsoft/apm reference)
+ - name: Unpack JavaScript bundle with JavaScript (apm_unpack.cjs)
+ env:
+ APM_BUNDLE_DIR: /tmp/apm-bundle-js
+ OUTPUT_DIR: /tmp/apm-out-js-js
run: |
set -e
- mkdir -p /tmp/apm-out-python
- BUNDLE=$(ls /tmp/apm-bundle/*.tar.gz)
- apm unpack "$BUNDLE" -o /tmp/apm-out-python
+ mkdir -p /tmp/apm-out-js-js
+ node actions/setup/js/run_apm_unpack.cjs
echo ""
- echo "=== Python unpack result ==="
- find /tmp/apm-out-python -type f | sort
+ echo "=== JavaScript pack + JavaScript unpack ==="
+ find /tmp/apm-out-js-js -type f | sort
+
+ - name: Verify JS pack/unpack round-trip output
+ run: |
+ set -e
+ echo "## APM JS Pack/Unpack Integration Test" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+
+ PASS=true
+
+ check_file() {
+ local file="$1"
+ local expected_content="$2"
+ if [ -f "/tmp/apm-out-js-js/$file" ]; then
+ if [ -n "$expected_content" ]; then
+ actual=$(cat "/tmp/apm-out-js-js/$file")
+ if echo "$actual" | grep -qF "$expected_content"; then
+ echo "✅ $file — content ok" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "❌ $file — unexpected content" >> $GITHUB_STEP_SUMMARY
+ echo " expected: $expected_content" >> $GITHUB_STEP_SUMMARY
+ echo " actual: $actual" >> $GITHUB_STEP_SUMMARY
+ PASS=false
+ fi
+ else
+ echo "✅ $file — present" >> $GITHUB_STEP_SUMMARY
+ fi
+ else
+ echo "❌ $file — missing" >> $GITHUB_STEP_SUMMARY
+ PASS=false
+ fi
+ }
+
+ check_file ".github/skills/skill-a/skill.md" "Skill A"
+ check_file ".github/skills/skill-a/notes.txt" "Skill A helper notes"
+ check_file ".github/copilot-instructions.md" "Copilot Instructions"
+ check_file ".github/skills/skill-b/skill.md" "Skill B"
+ check_file ".github/agents.md" "Agent configuration"
+
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "### Files in output:" >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ find /tmp/apm-out-js-js -type f | sort | sed "s|/tmp/apm-out-js-js/||" >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+
+ if [ "$PASS" = "true" ]; then
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "### ✅ JS pack/unpack round-trip passed" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "### ❌ JS pack/unpack round-trip failed" >> $GITHUB_STEP_SUMMARY
+ exit 1
+ fi
+
+
+ apm-parity-test:
+ name: APM JS/Python Parity Test (microsoft/apm-sample-package)
+ runs-on: ubuntu-latest
+ timeout-minutes: 15
+ needs: validate-yaml
+ permissions:
+ contents: read
+ concurrency:
+ group: ci-${{ github.ref }}-apm-parity-test
+ cancel-in-progress: true
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+
+ - name: Set up Python
+ uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
+ with:
+ python-version: "3.x"
+
+ - name: Set up Node.js
+ uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
+ with:
+ node-version: "24"
+ cache: npm
+ cache-dependency-path: actions/setup/js/package-lock.json
- - name: Unpack with JavaScript (apm_unpack.cjs)
+ - name: Install npm dependencies
+ run: cd actions/setup/js && npm ci
+
+ - name: Install apm-cli
+ run: pip install apm-cli
+
+ - name: Check if GitHub PAT is available
+ id: check-pat
env:
- APM_BUNDLE_DIR: /tmp/apm-bundle
- OUTPUT_DIR: /tmp/apm-out-js
+ GITHUB_APM_PAT: ${{ secrets.GH_AW_PLUGINS_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ run: |
+ if [ -z "$GITHUB_APM_PAT" ]; then
+ echo "skip=true" >> "$GITHUB_OUTPUT"
+ echo "⚠️ No GITHUB_APM_PAT available — skipping parity test" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "skip=false" >> "$GITHUB_OUTPUT"
+ echo "✅ GITHUB_APM_PAT available" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ # ── Python path ──────────────────────────────────────────────────────────
+
+ - name: Install package with Python apm-cli
+ if: steps.check-pat.outputs.skip == 'false'
+ env:
+ GITHUB_APM_PAT: ${{ secrets.GH_AW_PLUGINS_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
run: |
set -e
- mkdir -p /tmp/apm-out-js
- node actions/setup/js/run_apm_unpack.cjs
- echo ""
- echo "=== JavaScript unpack result ==="
+ mkdir -p /tmp/apm-workspace-python
+ cd /tmp/apm-workspace-python
+ cat > apm.yml << 'APMEOF'
+ name: apm-parity-test
+ version: 0.0.0
+ dependencies:
+ apm:
+ - microsoft/apm-sample-package
+ APMEOF
+ apm install
+ echo "=== Python workspace after apm install ==="
+ find /tmp/apm-workspace-python -type f | sort
+
+ - name: Pack with Python apm-cli
+ if: steps.check-pat.outputs.skip == 'false'
+ run: |
+ set -e
+ mkdir -p /tmp/apm-bundle-python
+ cd /tmp/apm-workspace-python
+ apm pack --archive --output /tmp/apm-bundle-python
+ echo "=== Python bundle ==="
+ ls -lh /tmp/apm-bundle-python/*.tar.gz
+
+ # ── JavaScript path ──────────────────────────────────────────────────────
+
+ - name: Install package with JavaScript (apm_install.cjs)
+ if: steps.check-pat.outputs.skip == 'false'
+ env:
+ GITHUB_APM_PAT: ${{ secrets.GH_AW_PLUGINS_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ APM_PACKAGES: '["microsoft/apm-sample-package"]'
+ APM_WORKSPACE: /tmp/apm-workspace-js
+ run: |
+ set -e
+ mkdir -p /tmp/apm-workspace-js
+ node actions/setup/js/run_apm_install.cjs
+ echo "=== JS workspace after apm_install.cjs ==="
+ find /tmp/apm-workspace-js -type f | sort
+
+ - name: Pack with JavaScript (apm_pack.cjs)
+ if: steps.check-pat.outputs.skip == 'false'
+ env:
+ APM_WORKSPACE: /tmp/apm-workspace-js
+ APM_BUNDLE_OUTPUT: /tmp/apm-bundle-js
+ run: |
+ set -e
+ mkdir -p /tmp/apm-bundle-js
+ node actions/setup/js/run_apm_pack.cjs
+ echo "=== JS bundle ==="
+ ls -lh /tmp/apm-bundle-js/*.tar.gz
+
+ # ── Comparison ───────────────────────────────────────────────────────────
+
+ - name: Unpack both bundles with JavaScript (apm_unpack.cjs)
+ if: steps.check-pat.outputs.skip == 'false'
+ run: |
+ set -e
+ mkdir -p /tmp/apm-out-python /tmp/apm-out-js
+ APM_BUNDLE_DIR=/tmp/apm-bundle-python OUTPUT_DIR=/tmp/apm-out-python \
+ node actions/setup/js/run_apm_unpack.cjs
+ APM_BUNDLE_DIR=/tmp/apm-bundle-js OUTPUT_DIR=/tmp/apm-out-js \
+ node actions/setup/js/run_apm_unpack.cjs
+ echo "=== Python bundle unpacked ==="
+ find /tmp/apm-out-python -type f | sort
+ echo "=== JS bundle unpacked ==="
find /tmp/apm-out-js -type f | sort
- - name: Compare Python vs JavaScript unpack outputs
+ - name: Compare Python and JavaScript outputs
+ if: steps.check-pat.outputs.skip == 'false'
run: |
set -e
- echo "## APM Unpack Integration Test" >> $GITHUB_STEP_SUMMARY
+ echo "## APM JS/Python Parity Test" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "Comparing pack output for \`microsoft/apm-sample-package\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
- echo "### Files unpacked by Python (reference)" >> $GITHUB_STEP_SUMMARY
- echo '```' >> $GITHUB_STEP_SUMMARY
- find /tmp/apm-out-python -type f | sort | sed "s|/tmp/apm-out-python/||" >> $GITHUB_STEP_SUMMARY
- echo '```' >> $GITHUB_STEP_SUMMARY
+ PASS=true
- echo "### Files unpacked by JavaScript" >> $GITHUB_STEP_SUMMARY
- echo '```' >> $GITHUB_STEP_SUMMARY
- find /tmp/apm-out-js -type f | sort | sed "s|/tmp/apm-out-js/||" >> $GITHUB_STEP_SUMMARY
- echo '```' >> $GITHUB_STEP_SUMMARY
+ # Compare file lists, excluding apm.lock.yaml which has minor metadata
+ # differences between implementations (resolved_by, timestamps, etc.)
+ python_files=$(find /tmp/apm-out-python -type f | sort | sed "s|/tmp/apm-out-python/||" | grep -v "^apm\.lock\.yaml$")
+ js_files=$(find /tmp/apm-out-js -type f | sort | sed "s|/tmp/apm-out-js/||" | grep -v "^apm\.lock\.yaml$")
- if diff -rq /tmp/apm-out-python /tmp/apm-out-js > /tmp/apm-diff.txt 2>&1; then
- echo "### ✅ Outputs are identical" >> $GITHUB_STEP_SUMMARY
- echo "✅ Python and JavaScript unpack results match"
+ if [ "$python_files" = "$js_files" ]; then
+ echo "✅ File lists match" >> $GITHUB_STEP_SUMMARY
else
- echo "### ❌ Outputs differ" >> $GITHUB_STEP_SUMMARY
+ echo "❌ File lists differ" >> $GITHUB_STEP_SUMMARY
echo '```diff' >> $GITHUB_STEP_SUMMARY
- diff -r /tmp/apm-out-python /tmp/apm-out-js >> $GITHUB_STEP_SUMMARY 2>&1 || true
+ diff <(echo "$python_files") <(echo "$js_files") >> $GITHUB_STEP_SUMMARY || true
echo '```' >> $GITHUB_STEP_SUMMARY
- echo "❌ Python and JavaScript unpack results differ:"
- cat /tmp/apm-diff.txt
- diff -r /tmp/apm-out-python /tmp/apm-out-js || true
+ PASS=false
+ fi
+
+ # Compare content of each matching file
+ CONTENT_DIFFS=0
+ for rel_path in $python_files; do
+ py_file="/tmp/apm-out-python/$rel_path"
+ js_file="/tmp/apm-out-js/$rel_path"
+ if [ ! -f "$js_file" ]; then
+ continue
+ fi
+ if ! diff -q "$py_file" "$js_file" > /dev/null 2>&1; then
+ echo "❌ Content differs: $rel_path" >> $GITHUB_STEP_SUMMARY
+ echo '```diff' >> $GITHUB_STEP_SUMMARY
+ diff "$py_file" "$js_file" >> $GITHUB_STEP_SUMMARY || true
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ CONTENT_DIFFS=$((CONTENT_DIFFS + 1))
+ PASS=false
+ fi
+ done
+ if [ "$CONTENT_DIFFS" -eq 0 ] && [ "$PASS" = "true" ]; then
+ echo "✅ All file contents match" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "### Files in Python output:" >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ echo "$python_files" >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+
+ if [ "$PASS" = "true" ]; then
+ echo "### ✅ JS and Python outputs are identical" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "### ❌ JS and Python outputs differ" >> $GITHUB_STEP_SUMMARY
exit 1
fi
+
bench:
# Only run benchmarks on main branch for performance tracking
if: github.ref == 'refs/heads/main'
diff --git a/.github/workflows/mcp-inspector.lock.yml b/.github/workflows/mcp-inspector.lock.yml
index 8b2ab211bc3..4c0949008c2 100644
--- a/.github/workflows/mcp-inspector.lock.yml
+++ b/.github/workflows/mcp-inspector.lock.yml
@@ -164,7 +164,7 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
cat << 'GH_AW_PROMPT_595ce40f90294a31_EOF'
- Tools: create_discussion, missing_tool, missing_data, noop
+ Tools: create_discussion, missing_tool, missing_data, noop, notion_add_comment, post_to_slack_channel
The following GitHub context information is available for this workflow:
diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml
index 4e0070840ab..038841568cd 100644
--- a/.github/workflows/smoke-claude.lock.yml
+++ b/.github/workflows/smoke-claude.lock.yml
@@ -212,7 +212,7 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
cat << 'GH_AW_PROMPT_c07dfaff6885ae7d_EOF'
- Tools: add_comment(max:2), create_issue, close_pull_request, update_pull_request, create_pull_request_review_comment(max:5), submit_pull_request_review, resolve_pull_request_review_thread(max:5), add_labels, add_reviewer(max:2), push_to_pull_request_branch, missing_tool, missing_data, noop
+ Tools: add_comment(max:2), create_issue, close_pull_request, update_pull_request, create_pull_request_review_comment(max:5), submit_pull_request_review, resolve_pull_request_review_thread(max:5), add_labels, add_reviewer(max:2), push_to_pull_request_branch, missing_tool, missing_data, noop, post_slack_message
GH_AW_PROMPT_c07dfaff6885ae7d_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_push_to_pr_branch.md"
cat << 'GH_AW_PROMPT_c07dfaff6885ae7d_EOF'
@@ -2307,24 +2307,40 @@ jobs:
apm:
needs: activation
runs-on: ubuntu-slim
- permissions: {}
+ permissions:
+ contents: read
env:
GH_AW_INFO_APM_VERSION: v0.8.6
steps:
- - name: Install and pack APM dependencies
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+ - name: Setup Scripts
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+ safe-output-custom-tokens: 'true'
+ - name: Install and pack APM bundle
id: apm_pack
- uses: microsoft/apm-action@a190b0b1a91031057144dc136acf9757a59c9e4d # v1.4.1
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
- GITHUB_TOKEN: ${{ secrets.GH_AW_PLUGINS_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ GITHUB_APM_PAT: ${{ secrets.GH_AW_PLUGINS_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ APM_PACKAGES: '["microsoft/apm-sample-package"]'
+ APM_WORKSPACE: /tmp/gh-aw/apm-workspace
+ APM_BUNDLE_OUTPUT: /tmp/gh-aw/apm-bundle-output
+ APM_TARGET: claude
with:
- dependencies: |
- - microsoft/apm-sample-package
- isolated: 'true'
- pack: 'true'
- archive: 'true'
- target: claude
- working-directory: /tmp/gh-aw/apm-workspace
- apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main: apmInstall } = require('${{ runner.temp }}/gh-aw/actions/apm_install.cjs');
+ await apmInstall();
+ const { main: apmPack } = require('${{ runner.temp }}/gh-aw/actions/apm_pack.cjs');
+ await apmPack();
- name: Upload APM bundle artifact
if: success()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml
index 24cfc051efa..302eb683f9a 100644
--- a/.github/workflows/smoke-codex.lock.yml
+++ b/.github/workflows/smoke-codex.lock.yml
@@ -206,7 +206,7 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
cat << 'GH_AW_PROMPT_20c992e85e9b3ade_EOF'
- Tools: add_comment(max:2), create_issue, add_labels, remove_labels, unassign_from_user, hide_comment(max:5), missing_tool, missing_data, noop
+ Tools: add_comment(max:2), create_issue, add_labels, remove_labels, unassign_from_user, hide_comment(max:5), missing_tool, missing_data, noop, add_smoked_label
The following GitHub context information is available for this workflow:
diff --git a/.github/workflows/smoke-copilot-arm.lock.yml b/.github/workflows/smoke-copilot-arm.lock.yml
index b8efd704e47..f0d5c80a195 100644
--- a/.github/workflows/smoke-copilot-arm.lock.yml
+++ b/.github/workflows/smoke-copilot-arm.lock.yml
@@ -203,7 +203,7 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
cat << 'GH_AW_PROMPT_1fdc0a0cb4a09695_EOF'
- Tools: add_comment(max:2), create_issue, create_discussion, create_pull_request_review_comment(max:5), submit_pull_request_review, add_labels, remove_labels, dispatch_workflow, missing_tool, missing_data, noop
+ Tools: add_comment(max:2), create_issue, create_discussion, create_pull_request_review_comment(max:5), submit_pull_request_review, add_labels, remove_labels, dispatch_workflow, missing_tool, missing_data, noop, send_slack_message
The following GitHub context information is available for this workflow:
diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml
index c9dd1595d80..bc49dd5ca57 100644
--- a/.github/workflows/smoke-copilot.lock.yml
+++ b/.github/workflows/smoke-copilot.lock.yml
@@ -209,7 +209,7 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
cat << 'GH_AW_PROMPT_7b74fbc49451ebfd_EOF'
- Tools: add_comment(max:2), create_issue, create_discussion, create_pull_request_review_comment(max:5), submit_pull_request_review, reply_to_pull_request_review_comment(max:5), add_labels, remove_labels, set_issue_type, dispatch_workflow, missing_tool, missing_data, noop
+ Tools: add_comment(max:2), create_issue, create_discussion, create_pull_request_review_comment(max:5), submit_pull_request_review, reply_to_pull_request_review_comment(max:5), add_labels, remove_labels, set_issue_type, dispatch_workflow, missing_tool, missing_data, noop, send_slack_message
The following GitHub context information is available for this workflow:
diff --git a/Makefile b/Makefile
index d9dd91b5a6d..fc8b868da6f 100644
--- a/Makefile
+++ b/Makefile
@@ -216,6 +216,11 @@ security-govulncheck:
test-js: build-js
cd actions/setup/js && npm run test:js -- --no-file-parallelism
+# Test APM JavaScript support (apm_install, apm_pack, apm_unpack)
+.PHONY: test-apm
+test-apm: build-js
+ cd actions/setup/js && npx vitest run --no-file-parallelism --reporter=verbose apm_install.test.cjs apm_pack.test.cjs apm_unpack.test.cjs
+
# Install JavaScript dependencies
.PHONY: deps-js
deps-js: check-node-version
@@ -760,6 +765,7 @@ help:
@echo " test-unit - Run Go unit tests only (faster)"
@echo " test-security - Run security regression tests"
@echo " test-js - Run JavaScript tests"
+ @echo " test-apm - Run APM JavaScript tests (apm_install, apm_pack, apm_unpack)"
@echo " test-all - Run all tests (Go, JavaScript, and wasm golden)"
@echo " test-wasm-golden - Run wasm golden tests (Go string API path)"
@echo " test-wasm - Build wasm and run Node.js golden comparison test"
diff --git a/actions/setup/js/apm_install.cjs b/actions/setup/js/apm_install.cjs
new file mode 100644
index 00000000000..e79472cf400
--- /dev/null
+++ b/actions/setup/js/apm_install.cjs
@@ -0,0 +1,499 @@
+// @ts-check
+///
+
+/**
+ * APM Package Installer
+ *
+ * JavaScript reimplementation of `apm install`. Downloads APM packages
+ * from GitHub and creates the installed workspace used by `apm pack`.
+ *
+ * Algorithm:
+ * 1. Parse APM_PACKAGES (JSON array of package slugs) from the environment
+ * 2. For each package slug:
+ * a. Parse the slug: owner/repo[/subpath][#ref]
+ * b. Resolve the ref (branch/tag/SHA) to a full commit SHA
+ * c. Scan the repo tree recursively for deployable files
+ * - Full package (no subpath): files under .github/, .claude/, .cursor/, .opencode/
+ * - Individual primitive (with subpath): files under {target_dir}/{subpath}/
+ * d. Download each file and write to APM_WORKSPACE at its original path
+ * e. Record the resolved dependency in the lockfile
+ * 3. Write apm.yml (workspace metadata for the packer) to APM_WORKSPACE
+ * 4. Write apm.lock.yaml (resolved dependency manifest) to APM_WORKSPACE
+ *
+ * Environment variables:
+ * GITHUB_APM_PAT – GitHub token for API access (required for private repos;
+ * falls back to GITHUB_TOKEN if not set)
+ * APM_PACKAGES – JSON array of package slugs,
+ * e.g. '["microsoft/apm-sample-package","org/repo/skills/foo#v2"]'
+ * APM_WORKSPACE – destination directory for downloaded files + lockfile
+ * (default: /tmp/gh-aw/apm-workspace)
+ *
+ * @module apm_install
+ */
+
+"use strict";
+
+const fs = require("fs");
+const path = require("path");
+
+/** Lockfile filename written to the workspace. */
+const LOCKFILE_NAME = "apm.lock.yaml";
+
+/** apm.yml filename for workspace metadata (consumed by apm_pack). */
+const APM_YML_NAME = "apm.yml";
+
+/** Directories that contain deployable APM primitives. */
+const TARGET_DIRS = [".github/", ".claude/", ".cursor/", ".opencode/"];
+
+// ---------------------------------------------------------------------------
+// Package slug parser
+// ---------------------------------------------------------------------------
+
+/**
+ * @typedef {Object} PackageRef
+ * @property {string} owner - GitHub org / user
+ * @property {string} repo - Repository name
+ * @property {string | null} subpath - Path within the repo (e.g. "skills/foo"); null = full package
+ * @property {string | null} ref - Git ref (branch, tag, SHA); null = default branch
+ */
+
+/**
+ * Parse an APM package slug into its components.
+ *
+ * Formats:
+ * owner/repo
+ * owner/repo#ref
+ * owner/repo/path/to/primitive
+ * owner/repo/path/to/primitive#ref
+ *
+ * @param {string} slug
+ * @returns {PackageRef}
+ */
+function parsePackageSlug(slug) {
+ if (!slug || typeof slug !== "string") throw new Error(`Invalid package slug: ${JSON.stringify(slug)}`);
+
+ // Split off optional #ref suffix
+ const hashIdx = slug.indexOf("#");
+ const ref = hashIdx >= 0 ? slug.slice(hashIdx + 1) || null : null;
+ const pathPart = hashIdx >= 0 ? slug.slice(0, hashIdx) : slug;
+
+ const parts = pathPart.split("/");
+ if (parts.length < 2 || !parts[0] || !parts[1]) {
+ throw new Error(`Invalid package slug (expected owner/repo[/subpath][#ref]): ${JSON.stringify(slug)}`);
+ }
+
+ const owner = parts[0];
+ const repo = parts[1];
+ const subpath = parts.length > 2 ? parts.slice(2).join("/") : null;
+
+ return { owner, repo, subpath, ref };
+}
+
+// ---------------------------------------------------------------------------
+// YAML scalar serializer (inline copy — avoids cross-module circular dep)
+// ---------------------------------------------------------------------------
+
+/**
+ * Serialize a scalar value to YAML, quoting strings that YAML would misinterpret
+ * (e.g. keywords, numbers, ISO timestamps). Mirrors PyYAML safe_dump quoting.
+ *
+ * @param {string | number | boolean | null | undefined} value
+ * @returns {string}
+ */
+function scalarToYaml(value) {
+ if (value === null || value === undefined) return "null";
+ if (typeof value === "boolean") return value ? "true" : "false";
+ if (typeof value === "number") return String(value);
+ const s = String(value);
+ // YAML keywords and special patterns that must be quoted to preserve string type
+ const YAML_KEYWORDS = new Set(["", "null", "~", "true", "false", "yes", "no", "on", "off"]);
+ const NEEDS_QUOTING =
+ YAML_KEYWORDS.has(s) ||
+ /^-?\d+$/.test(s) || // integer
+ /^-?\d+\.\d+$/.test(s) || // float
+ /^\d{4}-\d{2}-\d{2}T/.test(s); // ISO 8601 datetime
+ if (NEEDS_QUOTING) {
+ return `'${s.replace(/'/g, "''")}'`;
+ }
+ return s;
+}
+
+// ---------------------------------------------------------------------------
+// Lockfile writer
+// ---------------------------------------------------------------------------
+
+/**
+ * @typedef {Object} InstalledDependency
+ * @property {string} repo_url
+ * @property {string} resolved_commit
+ * @property {string} resolved_ref
+ * @property {string[]} deployed_files
+ */
+
+/**
+ * Write the workspace apm.lock.yaml based on the resolved dependencies.
+ *
+ * @param {string} workspaceDir
+ * @param {InstalledDependency[]} dependencies
+ */
+function writeWorkspaceLockfile(workspaceDir, dependencies) {
+ const lines = [];
+ lines.push(`lockfile_version: ${scalarToYaml("1")}`);
+ lines.push(`generated_at: ${scalarToYaml(new Date().toISOString())}`);
+ lines.push("apm_version: null");
+ lines.push("dependencies:");
+
+ for (const dep of dependencies) {
+ lines.push(`- repo_url: ${scalarToYaml(dep.repo_url)}`);
+ lines.push(` host: github.com`);
+ lines.push(` resolved_commit: ${scalarToYaml(dep.resolved_commit)}`);
+ lines.push(` resolved_ref: ${scalarToYaml(dep.resolved_ref)}`);
+ lines.push(` version: null`);
+ lines.push(` virtual_path: null`);
+ lines.push(` is_virtual: false`);
+ lines.push(` depth: 1`);
+ lines.push(` resolved_by: apm_install.cjs`);
+ lines.push(` package_type: apm`);
+ lines.push(` source: null`);
+ lines.push(` local_path: null`);
+ lines.push(` content_hash: null`);
+ lines.push(` is_dev: false`);
+ lines.push(` deployed_files:`);
+ for (const f of dep.deployed_files) {
+ lines.push(` - ${scalarToYaml(f)}`);
+ }
+ }
+
+ const lockfileContent = lines.join("\n") + "\n";
+ fs.writeFileSync(path.join(workspaceDir, LOCKFILE_NAME), lockfileContent, "utf-8");
+}
+
+/**
+ * Write a minimal apm.yml to the workspace (consumed by apm_pack for bundle naming).
+ *
+ * @param {string} workspaceDir
+ */
+function writeWorkspaceApmYml(workspaceDir) {
+ const content = "name: gh-aw-workspace\nversion: 0.0.0\n";
+ fs.writeFileSync(path.join(workspaceDir, APM_YML_NAME), content, "utf-8");
+}
+
+// ---------------------------------------------------------------------------
+// GitHub API client factory
+// ---------------------------------------------------------------------------
+
+/**
+ * Create an authenticated Octokit client.
+ *
+ * Priority:
+ * 1. Custom token in GITHUB_APM_PAT (may differ from GITHUB_TOKEN for private repos)
+ * 2. GITHUB_TOKEN (workflow token, public repos + repos accessible to workflow)
+ *
+ * When running via actions/github-script, global.github is available but is
+ * authenticated with GITHUB_TOKEN. We create a dedicated instance with
+ * GITHUB_APM_PAT so private package repos are accessible.
+ *
+ * @param {string} token - GitHub PAT or workflow token
+ * @returns Octokit instance
+ */
+function createOctokit(token) {
+ // @actions/github is bundled with actions/github-script and available in
+ // older CJS-compatible versions. When running standalone with @actions/github
+ // v9+ (ESM-only), fall back to @octokit/core + plugins.
+ try {
+ // @ts-ignore – dynamic require at runtime
+ const { getOctokit } = require("@actions/github");
+ return getOctokit(token);
+ } catch {
+ // @actions/github v9+ is ESM-only; use @octokit/core + rest-endpoint-methods
+ // @ts-ignore – dynamic require at runtime
+ const { Octokit } = require("@octokit/core");
+ // @ts-ignore – dynamic require at runtime
+ const { restEndpointMethods } = require("@octokit/plugin-rest-endpoint-methods");
+ // @ts-ignore – dynamic require at runtime
+ const { paginateRest } = require("@octokit/plugin-paginate-rest");
+ const MyOctokit = Octokit.plugin(restEndpointMethods, paginateRest);
+ return new MyOctokit({ auth: token });
+ }
+}
+
+// ---------------------------------------------------------------------------
+// GitHub REST helpers
+// ---------------------------------------------------------------------------
+
+/**
+ * Resolve a git ref (branch name, tag, SHA, etc.) to a full commit SHA.
+ * Returns the commit SHA and the effective ref string used.
+ *
+ * @param {*} octokit
+ * @param {string} owner
+ * @param {string} repo
+ * @param {string | null} ref
+ * @returns {Promise<{commitSha: string, resolvedRef: string, treeSha: string}>}
+ */
+async function resolveCommit(octokit, owner, repo, ref) {
+ let effectiveRef = ref;
+ if (!effectiveRef) {
+ const { data: repoData } = await octokit.rest.repos.get({ owner, repo });
+ effectiveRef = repoData.default_branch;
+ }
+
+ const { data: commitData } = await octokit.rest.repos.getCommit({
+ owner,
+ repo,
+ ref: effectiveRef,
+ });
+
+ // effectiveRef is always non-null here (set to default_branch if null was passed)
+ const resolvedRef = effectiveRef ?? "";
+ return {
+ commitSha: commitData.sha,
+ resolvedRef,
+ treeSha: commitData.commit.tree.sha,
+ };
+}
+
+/**
+ * Get the recursive file tree for a commit.
+ * Returns only blob (file) entries with their paths and blob SHAs.
+ *
+ * @param {*} octokit
+ * @param {string} owner
+ * @param {string} repo
+ * @param {string} treeSha
+ * @returns {Promise>}
+ */
+async function getFileTree(octokit, owner, repo, treeSha) {
+ const { data } = await octokit.rest.git.getTree({
+ owner,
+ repo,
+ tree_sha: treeSha,
+ recursive: "1",
+ });
+ const blobs = (data.tree || []).filter(entry => entry.type === "blob" && entry.path);
+ return blobs.map(entry => ({
+ path: /** @type {string} */ entry.path,
+ sha: entry.sha || "",
+ }));
+}
+
+/**
+ * Download raw file content from a GitHub repo at a specific commit SHA.
+ *
+ * @param {*} octokit
+ * @param {string} owner
+ * @param {string} repo
+ * @param {string} filePath
+ * @param {string} ref
+ * @returns {Promise}
+ */
+async function downloadFileContent(octokit, owner, repo, filePath, ref) {
+ const { data } = await octokit.rest.repos.getContent({
+ owner,
+ repo,
+ path: filePath,
+ ref,
+ });
+
+ // getContent returns different shapes; we only handle file blobs here
+ if (Array.isArray(data)) {
+ throw new Error(`Expected a file at '${filePath}' in ${owner}/${repo} but got a directory listing`);
+ }
+ if (!("content" in data) || !("encoding" in data)) {
+ throw new Error(`Unexpected content shape for '${filePath}' in ${owner}/${repo}`);
+ }
+
+ // Content is base64-encoded (the default encoding from the API)
+ const content = /** @type {{ content: string; encoding: string }} */ data;
+ if (content.encoding !== "base64") {
+ throw new Error(`Unexpected encoding '${content.encoding}' for '${filePath}'`);
+ }
+ return Buffer.from(content.content.replace(/\n/g, ""), "base64");
+}
+
+// ---------------------------------------------------------------------------
+// Package installation
+// ---------------------------------------------------------------------------
+
+/**
+ * Determine which files from a repo tree should be installed for a given package ref.
+ *
+ * - Full package (no subpath): all files under .github/, .claude/, .cursor/, .opencode/
+ * - Primitive subpath: files under {target_dir}/{subpath}/ for every target_dir
+ *
+ * @param {Array<{path: string, sha: string}>} tree
+ * @param {string | null} subpath
+ * @returns {Array<{path: string, sha: string}>}
+ */
+function selectDeployableFiles(tree, subpath) {
+ if (!subpath) {
+ // Full package — include all files under any known target directory
+ return tree.filter(entry => TARGET_DIRS.some(tdir => entry.path.startsWith(tdir)));
+ }
+
+ // Individual primitive — look for the subpath under every target directory,
+ // plus the subpath itself if files live directly at it (no target prefix)
+ const normalizedSubpath = subpath.endsWith("/") ? subpath : subpath + "/";
+ return tree.filter(entry => {
+ // Case 1: file already has a target dir prefix (common for APM package repos)
+ if (TARGET_DIRS.some(tdir => entry.path.startsWith(tdir + normalizedSubpath))) return true;
+ // Case 2: file is directly under the subpath (without target dir prefix)
+ if (entry.path.startsWith(normalizedSubpath)) return true;
+ // Case 3: exact match (e.g. subpath points to a single file)
+ if (entry.path === subpath) return true;
+ return false;
+ });
+}
+
+/**
+ * Install a single APM package into the workspace.
+ *
+ * @param {*} octokit - Authenticated Octokit instance
+ * @param {PackageRef} pkgRef - Parsed package reference
+ * @param {string} workspaceDir - Absolute path to workspace
+ * @returns {Promise}
+ */
+async function installPackage(octokit, pkgRef, workspaceDir) {
+ const { owner, repo, subpath, ref } = pkgRef;
+ const repoUrl = `https://github.com/${owner}/${repo}`;
+
+ core.info(`[APM Install] Installing ${owner}/${repo}${subpath ? `/${subpath}` : ""}${ref ? `#${ref}` : ""}`);
+
+ // Resolve ref → commit SHA + tree SHA
+ const { commitSha, resolvedRef, treeSha } = await resolveCommit(octokit, owner, repo, ref);
+ core.info(`[APM Install] ref: ${resolvedRef} → ${commitSha.slice(0, 12)}`);
+
+ // Get recursive file tree
+ const tree = await getFileTree(octokit, owner, repo, treeSha);
+ core.info(`[APM Install] repo tree: ${tree.length} blob(s)`);
+
+ // Filter to deployable files
+ const deployable = selectDeployableFiles(tree, subpath);
+ if (deployable.length === 0) {
+ core.warning(`[APM Install] No deployable files found in ${owner}/${repo}${subpath ? `/${subpath}` : ""}. ` + `Checked for files under ${TARGET_DIRS.join(", ")}${subpath ? `${subpath}/` : ""}.`);
+ }
+ core.info(`[APM Install] deployable: ${deployable.length} file(s)`);
+
+ // Download and write each file
+ const deployedFiles = [];
+ const workspaceDirResolved = path.resolve(workspaceDir);
+
+ for (let i = 0; i < deployable.length; i++) {
+ const entry = deployable[i];
+ const filePath = entry.path;
+
+ // Security: reject absolute paths and path traversal
+ if (path.isAbsolute(filePath) || filePath.includes("..")) {
+ core.warning(`[APM Install] Skipping unsafe path from ${owner}/${repo}: ${JSON.stringify(filePath)}`);
+ continue;
+ }
+
+ const destAbsPath = path.resolve(path.join(workspaceDir, filePath));
+ // Guard: destination must stay inside workspace
+ if (!destAbsPath.startsWith(workspaceDirResolved + path.sep) && destAbsPath !== workspaceDirResolved) {
+ core.warning(`[APM Install] Skipping path that escapes workspace: ${JSON.stringify(filePath)}`);
+ continue;
+ }
+
+ fs.mkdirSync(path.dirname(destAbsPath), { recursive: true });
+ const content = await downloadFileContent(octokit, owner, repo, filePath, commitSha);
+ fs.writeFileSync(destAbsPath, content);
+ deployedFiles.push(filePath);
+
+ if ((i + 1) % 10 === 0 || i + 1 === deployable.length) {
+ core.info(`[APM Install] progress: ${i + 1}/${deployable.length} downloaded`);
+ }
+ }
+
+ core.info(`[APM Install] ✓ ${owner}/${repo}: ${deployedFiles.length} file(s) installed`);
+
+ return {
+ repo_url: repoUrl,
+ resolved_commit: commitSha,
+ resolved_ref: resolvedRef,
+ deployed_files: deployedFiles,
+ };
+}
+
+// ---------------------------------------------------------------------------
+// Entry point
+// ---------------------------------------------------------------------------
+
+/**
+ * Main entry point.
+ *
+ * Accepts an optional options object for dependency injection (used in tests).
+ *
+ * @param {object} [opts]
+ * @param {*} [opts.octokitOverride] - Override Octokit client (for testing)
+ * @param {string} [opts.workspaceDir] - Override workspace directory (for testing)
+ * @param {string[]} [opts.packages] - Override packages list (for testing)
+ * @param {string} [opts.token] - Override auth token (for testing)
+ */
+async function main(opts = {}) {
+ const { octokitOverride = null, workspaceDir = process.env.APM_WORKSPACE || "/tmp/gh-aw/apm-workspace", packages = parsePackagesFromEnv(), token = process.env.GITHUB_APM_PAT || process.env.GITHUB_TOKEN || "" } = opts;
+
+ core.info("=== APM Package Install ===");
+ core.info(`[APM Install] Workspace directory: ${workspaceDir}`);
+ core.info(`[APM Install] Packages : ${packages.length}`);
+
+ if (packages.length === 0) {
+ core.warning("[APM Install] No packages to install (APM_PACKAGES is empty)");
+ fs.mkdirSync(workspaceDir, { recursive: true });
+ writeWorkspaceApmYml(workspaceDir);
+ writeWorkspaceLockfile(workspaceDir, []);
+ return;
+ }
+
+ const octokit = octokitOverride || createOctokit(token);
+
+ fs.mkdirSync(workspaceDir, { recursive: true });
+
+ /** @type {InstalledDependency[]} */
+ const dependencies = [];
+
+ for (const slug of packages) {
+ core.info(`[APM Install] ── ${slug}`);
+ const pkgRef = parsePackageSlug(slug);
+ const dep = await installPackage(octokit, pkgRef, workspaceDir);
+ dependencies.push(dep);
+ }
+
+ writeWorkspaceApmYml(workspaceDir);
+ writeWorkspaceLockfile(workspaceDir, dependencies);
+
+ core.info(`[APM Install] ✅ Installed ${dependencies.length} package(s)`);
+ core.info(`[APM Install] Workspace: ${workspaceDir}`);
+}
+
+/**
+ * Parse APM_PACKAGES env var into an array of package slug strings.
+ * Accepts a JSON array: '["owner/repo","owner/repo2"]'
+ *
+ * @returns {string[]}
+ */
+function parsePackagesFromEnv() {
+ const raw = process.env.APM_PACKAGES;
+ if (!raw || raw.trim() === "") return [];
+ try {
+ const parsed = JSON.parse(raw);
+ if (!Array.isArray(parsed)) throw new Error("APM_PACKAGES must be a JSON array");
+ return parsed.filter(p => typeof p === "string" && p.trim() !== "");
+ } catch (e) {
+ throw new Error(`[APM Install] Failed to parse APM_PACKAGES env var: ${e instanceof Error ? e.message : String(e)}\n` + ` Expected a JSON array, e.g.: ["owner/repo", "owner/repo/skills/foo#v2"]`);
+ }
+}
+
+module.exports = {
+ main,
+ parsePackageSlug,
+ selectDeployableFiles,
+ writeWorkspaceLockfile,
+ writeWorkspaceApmYml,
+ parsePackagesFromEnv,
+ // Exported for tests only
+ resolveCommit,
+ installPackage,
+ createOctokit,
+ scalarToYaml,
+};
diff --git a/actions/setup/js/apm_install.test.cjs b/actions/setup/js/apm_install.test.cjs
new file mode 100644
index 00000000000..590dc7106fb
--- /dev/null
+++ b/actions/setup/js/apm_install.test.cjs
@@ -0,0 +1,419 @@
+// @ts-check
+///
+
+import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
+const fs = require("fs");
+const path = require("path");
+const os = require("os");
+
+// ---------------------------------------------------------------------------
+// Global mock setup — must be done before requiring apm_install.cjs
+// ---------------------------------------------------------------------------
+
+const mockCore = {
+ info: vi.fn(),
+ warning: vi.fn(),
+ error: vi.fn(),
+ setFailed: vi.fn(),
+ setOutput: vi.fn(),
+};
+
+global.core = mockCore;
+
+// apm_install.cjs calls require('@actions/github') only inside createOctokit(),
+// which is only reached when octokitOverride is null. All unit tests inject
+// octokitOverride, so we never actually load @actions/github here.
+
+const { parsePackageSlug, selectDeployableFiles, writeWorkspaceLockfile, writeWorkspaceApmYml, parsePackagesFromEnv, scalarToYaml, main } = require("./apm_install.cjs");
+
+// ---------------------------------------------------------------------------
+// Helpers
+// ---------------------------------------------------------------------------
+
+/** Create a temp directory and return its path. */
+function makeTempDir() {
+ return fs.mkdtempSync(path.join(os.tmpdir(), "apm-install-test-"));
+}
+
+/** Remove a temp directory (best-effort). */
+function removeTempDir(dir) {
+ if (dir && fs.existsSync(dir)) {
+ fs.rmSync(dir, { recursive: true, force: true });
+ }
+}
+
+/** Build a minimal mock Octokit that returns the given tree and content map. */
+function makeMockOctokit({ defaultBranch = "main", commitSha = "aabbcc00", treeSha = "tree00", tree = [], contentMap = {} } = {}) {
+ return {
+ rest: {
+ repos: {
+ get: vi.fn().mockResolvedValue({ data: { default_branch: defaultBranch } }),
+ getCommit: vi.fn().mockResolvedValue({
+ data: { sha: commitSha, commit: { tree: { sha: treeSha } } },
+ }),
+ getContent: vi.fn().mockImplementation(({ path: filePath }) => {
+ const text = contentMap[filePath];
+ if (text === undefined) {
+ return Promise.reject(Object.assign(new Error("Not Found"), { status: 404 }));
+ }
+ return Promise.resolve({
+ data: {
+ type: "file",
+ encoding: "base64",
+ content: Buffer.from(text).toString("base64"),
+ },
+ });
+ }),
+ },
+ git: {
+ getTree: vi.fn().mockResolvedValue({ data: { tree } }),
+ },
+ },
+ };
+}
+
+// ---------------------------------------------------------------------------
+// parsePackageSlug
+// ---------------------------------------------------------------------------
+
+describe("parsePackageSlug", () => {
+ it("parses owner/repo", () => {
+ expect(parsePackageSlug("microsoft/apm-sample-package")).toEqual({
+ owner: "microsoft",
+ repo: "apm-sample-package",
+ subpath: null,
+ ref: null,
+ });
+ });
+
+ it("parses owner/repo#ref", () => {
+ expect(parsePackageSlug("microsoft/apm-sample-package#v2.0")).toEqual({
+ owner: "microsoft",
+ repo: "apm-sample-package",
+ subpath: null,
+ ref: "v2.0",
+ });
+ });
+
+ it("parses owner/repo/subpath", () => {
+ expect(parsePackageSlug("github/awesome-copilot/skills/review-and-refactor")).toEqual({
+ owner: "github",
+ repo: "awesome-copilot",
+ subpath: "skills/review-and-refactor",
+ ref: null,
+ });
+ });
+
+ it("parses owner/repo/subpath#ref", () => {
+ expect(parsePackageSlug("org/repo/skills/foo#main")).toEqual({
+ owner: "org",
+ repo: "repo",
+ subpath: "skills/foo",
+ ref: "main",
+ });
+ });
+
+ it("handles a commit SHA as ref", () => {
+ const { ref } = parsePackageSlug("org/repo#abc123def456");
+ expect(ref).toBe("abc123def456");
+ });
+
+ it("throws for slug without slash", () => {
+ expect(() => parsePackageSlug("just-one-part")).toThrow(/Invalid package slug/);
+ });
+
+ it("throws for empty string", () => {
+ expect(() => parsePackageSlug("")).toThrow(/Invalid package slug/);
+ });
+
+ it("throws for missing repo component", () => {
+ expect(() => parsePackageSlug("owner/")).toThrow(/Invalid package slug/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// selectDeployableFiles – full package (no subpath)
+// ---------------------------------------------------------------------------
+
+describe("selectDeployableFiles – full package", () => {
+ /** @type {Array<{path: string, sha: string}>} */
+ const tree = [
+ { path: ".github/skills/foo/skill.md", sha: "a1" },
+ { path: ".github/agents/bar.md", sha: "a2" },
+ { path: ".claude/skills/foo/skill.md", sha: "a3" },
+ { path: ".cursor/rules/style.md", sha: "a4" },
+ { path: ".opencode/instructions.md", sha: "a5" },
+ { path: "README.md", sha: "a6" },
+ { path: "apm.yml", sha: "a7" },
+ ];
+
+ it("includes files under all target dirs", () => {
+ const result = selectDeployableFiles(tree, null).map(e => e.path);
+ expect(result).toContain(".github/skills/foo/skill.md");
+ expect(result).toContain(".github/agents/bar.md");
+ expect(result).toContain(".claude/skills/foo/skill.md");
+ expect(result).toContain(".cursor/rules/style.md");
+ expect(result).toContain(".opencode/instructions.md");
+ });
+
+ it("excludes non-target files", () => {
+ const result = selectDeployableFiles(tree, null).map(e => e.path);
+ expect(result).not.toContain("README.md");
+ expect(result).not.toContain("apm.yml");
+ });
+});
+
+// ---------------------------------------------------------------------------
+// selectDeployableFiles – individual primitive (with subpath)
+// ---------------------------------------------------------------------------
+
+describe("selectDeployableFiles – primitive subpath", () => {
+ /** @type {Array<{path: string, sha: string}>} */
+ const tree = [
+ { path: ".github/skills/review-and-refactor/skill.md", sha: "b1" },
+ { path: ".github/skills/review-and-refactor/notes.txt", sha: "b2" },
+ { path: ".claude/skills/review-and-refactor/skill.md", sha: "b3" },
+ { path: ".github/skills/other-skill/skill.md", sha: "b4" },
+ { path: ".github/agents/agent.md", sha: "b5" },
+ { path: "README.md", sha: "b6" },
+ ];
+
+ it("selects files under the subpath in all target dirs", () => {
+ const result = selectDeployableFiles(tree, "skills/review-and-refactor").map(e => e.path);
+ expect(result).toContain(".github/skills/review-and-refactor/skill.md");
+ expect(result).toContain(".github/skills/review-and-refactor/notes.txt");
+ expect(result).toContain(".claude/skills/review-and-refactor/skill.md");
+ });
+
+ it("excludes sibling skills", () => {
+ const result = selectDeployableFiles(tree, "skills/review-and-refactor").map(e => e.path);
+ expect(result).not.toContain(".github/skills/other-skill/skill.md");
+ });
+
+ it("excludes files from unrelated directories", () => {
+ const result = selectDeployableFiles(tree, "skills/review-and-refactor").map(e => e.path);
+ expect(result).not.toContain(".github/agents/agent.md");
+ expect(result).not.toContain("README.md");
+ });
+
+ it("returns empty array when no files match subpath", () => {
+ expect(selectDeployableFiles(tree, "skills/nonexistent")).toHaveLength(0);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// writeWorkspaceLockfile
+// ---------------------------------------------------------------------------
+
+describe("writeWorkspaceLockfile", () => {
+ /** @type {string} */
+ let tmpDir;
+
+ beforeEach(() => {
+ tmpDir = makeTempDir();
+ });
+ afterEach(() => removeTempDir(tmpDir));
+
+ it("writes lockfile_version, generated_at, dependencies", () => {
+ writeWorkspaceLockfile(tmpDir, []);
+ const content = fs.readFileSync(path.join(tmpDir, "apm.lock.yaml"), "utf-8");
+ expect(content).toContain("lockfile_version: '1'");
+ expect(content).toContain("generated_at:");
+ expect(content).toContain("dependencies:");
+ });
+
+ it("quotes ISO timestamp in generated_at so YAML parsers keep string type", () => {
+ writeWorkspaceLockfile(tmpDir, []);
+ const content = fs.readFileSync(path.join(tmpDir, "apm.lock.yaml"), "utf-8");
+ expect(content).toMatch(/generated_at: '\d{4}-\d{2}-\d{2}T/);
+ });
+
+ it("writes one dependency entry", () => {
+ writeWorkspaceLockfile(tmpDir, [
+ {
+ repo_url: "https://github.com/owner/pkg",
+ resolved_commit: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
+ resolved_ref: "main",
+ deployed_files: [".github/skills/skill-a/skill.md"],
+ },
+ ]);
+ const content = fs.readFileSync(path.join(tmpDir, "apm.lock.yaml"), "utf-8");
+ expect(content).toContain("- repo_url: https://github.com/owner/pkg");
+ expect(content).toContain(" resolved_commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
+ expect(content).toContain(" resolved_ref: main");
+ expect(content).toContain(" - .github/skills/skill-a/skill.md");
+ });
+
+ it("round-trips through parseAPMLockfile from apm_unpack", () => {
+ const { parseAPMLockfile } = require("./apm_unpack.cjs");
+ writeWorkspaceLockfile(tmpDir, [
+ {
+ repo_url: "https://github.com/o/r",
+ resolved_commit: "deadbeef",
+ resolved_ref: "v1.0",
+ deployed_files: [".github/skills/foo/skill.md"],
+ },
+ ]);
+ const content = fs.readFileSync(path.join(tmpDir, "apm.lock.yaml"), "utf-8");
+ const parsed = parseAPMLockfile(content);
+ expect(parsed.lockfile_version).toBe("1");
+ expect(parsed.dependencies).toHaveLength(1);
+ expect(parsed.dependencies[0].repo_url).toBe("https://github.com/o/r");
+ expect(parsed.dependencies[0].resolved_commit).toBe("deadbeef");
+ expect(parsed.dependencies[0].deployed_files).toContain(".github/skills/foo/skill.md");
+ });
+});
+
+// ---------------------------------------------------------------------------
+// writeWorkspaceApmYml
+// ---------------------------------------------------------------------------
+
+describe("writeWorkspaceApmYml", () => {
+ /** @type {string} */
+ let tmpDir;
+
+ beforeEach(() => {
+ tmpDir = makeTempDir();
+ });
+ afterEach(() => removeTempDir(tmpDir));
+
+ it("writes apm.yml with name and version", () => {
+ writeWorkspaceApmYml(tmpDir);
+ const content = fs.readFileSync(path.join(tmpDir, "apm.yml"), "utf-8");
+ expect(content).toContain("name: gh-aw-workspace");
+ expect(content).toContain("version: 0.0.0");
+ });
+});
+
+// ---------------------------------------------------------------------------
+// parsePackagesFromEnv
+// ---------------------------------------------------------------------------
+
+describe("parsePackagesFromEnv", () => {
+ afterEach(() => {
+ delete process.env.APM_PACKAGES;
+ });
+
+ it("returns empty array when APM_PACKAGES is not set", () => {
+ delete process.env.APM_PACKAGES;
+ expect(parsePackagesFromEnv()).toEqual([]);
+ });
+
+ it("parses a JSON array of package slugs", () => {
+ process.env.APM_PACKAGES = '["microsoft/apm-sample-package","github/awesome-copilot/skills/foo"]';
+ expect(parsePackagesFromEnv()).toEqual(["microsoft/apm-sample-package", "github/awesome-copilot/skills/foo"]);
+ });
+
+ it("filters out empty strings", () => {
+ process.env.APM_PACKAGES = '["pkg-a","","pkg-b"]';
+ expect(parsePackagesFromEnv()).toEqual(["pkg-a", "pkg-b"]);
+ });
+
+ it("throws on non-array JSON", () => {
+ process.env.APM_PACKAGES = '"single-string"';
+ expect(() => parsePackagesFromEnv()).toThrow(/JSON array/);
+ });
+
+ it("throws on malformed JSON", () => {
+ process.env.APM_PACKAGES = "[not valid json";
+ expect(() => parsePackagesFromEnv()).toThrow(/parse APM_PACKAGES/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// main() – mocked Octokit integration tests
+// ---------------------------------------------------------------------------
+
+describe("main() – mocked Octokit", () => {
+ /** @type {string} */
+ let tmpDir;
+
+ beforeEach(() => {
+ tmpDir = makeTempDir();
+ vi.clearAllMocks();
+ });
+ afterEach(() => {
+ removeTempDir(tmpDir);
+ delete process.env.APM_PACKAGES;
+ });
+
+ it("writes apm.yml and empty lockfile when packages list is empty", async () => {
+ await main({ octokitOverride: {}, workspaceDir: tmpDir, packages: [] });
+ expect(fs.existsSync(path.join(tmpDir, "apm.yml"))).toBe(true);
+ const lockContent = fs.readFileSync(path.join(tmpDir, "apm.lock.yaml"), "utf-8");
+ expect(lockContent).toContain("lockfile_version: '1'");
+ });
+
+ it("downloads files for a single package and writes lockfile", async () => {
+ const octokit = makeMockOctokit({
+ commitSha: "abcdef1234567890abcdef1234567890abcdef12",
+ tree: [
+ { type: "blob", path: ".github/skills/test-skill/skill.md", sha: "s1" },
+ { type: "blob", path: ".github/copilot-instructions.md", sha: "s2" },
+ { type: "blob", path: "README.md", sha: "s3" }, // non-target, should be skipped
+ ],
+ contentMap: {
+ ".github/skills/test-skill/skill.md": "# Test Skill",
+ ".github/copilot-instructions.md": "# Instructions",
+ },
+ });
+
+ await main({ octokitOverride: octokit, workspaceDir: tmpDir, packages: ["test-org/test-pkg"] });
+
+ expect(fs.existsSync(path.join(tmpDir, ".github/skills/test-skill/skill.md"))).toBe(true);
+ expect(fs.existsSync(path.join(tmpDir, ".github/copilot-instructions.md"))).toBe(true);
+ expect(fs.existsSync(path.join(tmpDir, "README.md"))).toBe(false);
+
+ const lockContent = fs.readFileSync(path.join(tmpDir, "apm.lock.yaml"), "utf-8");
+ expect(lockContent).toContain("https://github.com/test-org/test-pkg");
+ expect(lockContent).toContain("abcdef1234567890abcdef1234567890abcdef12");
+ expect(lockContent).toContain(".github/skills/test-skill/skill.md");
+ });
+
+ it("skips path-traversal entries from the GitHub tree", async () => {
+ const octokit = makeMockOctokit({
+ tree: [
+ { type: "blob", path: ".github/../../../etc/passwd", sha: "evil" },
+ { type: "blob", path: ".github/skills/safe/skill.md", sha: "safe" },
+ ],
+ contentMap: {
+ ".github/skills/safe/skill.md": "safe content",
+ },
+ });
+
+ await main({ octokitOverride: octokit, workspaceDir: tmpDir, packages: ["attacker/repo"] });
+
+ expect(fs.existsSync(path.join(tmpDir, ".github/skills/safe/skill.md"))).toBe(true);
+ expect(fs.existsSync(path.join(tmpDir, "etc/passwd"))).toBe(false);
+ });
+
+ it("lockfile is valid input for apm_pack (round-trip fixture)", async () => {
+ const { parseAPMLockfile } = require("./apm_unpack.cjs");
+
+ const octokit = makeMockOctokit({
+ commitSha: "cccc0000000000000000000000000000cccccccc",
+ tree: [
+ { type: "blob", path: ".github/skills/my-skill/skill.md", sha: "f1" },
+ { type: "blob", path: ".claude/agents/my-agent.md", sha: "f2" },
+ ],
+ contentMap: {
+ ".github/skills/my-skill/skill.md": "# My Skill",
+ ".claude/agents/my-agent.md": "# My Agent",
+ },
+ });
+
+ await main({ octokitOverride: octokit, workspaceDir: tmpDir, packages: ["my-org/my-pkg"] });
+
+ const lockContent = fs.readFileSync(path.join(tmpDir, "apm.lock.yaml"), "utf-8");
+ const parsed = parseAPMLockfile(lockContent);
+
+ expect(parsed.lockfile_version).toBe("1");
+ expect(parsed.dependencies).toHaveLength(1);
+ expect(parsed.dependencies[0].deployed_files).toContain(".github/skills/my-skill/skill.md");
+ expect(parsed.dependencies[0].deployed_files).toContain(".claude/agents/my-agent.md");
+
+ // Verify installed files are actually present on disk
+ expect(fs.existsSync(path.join(tmpDir, ".github/skills/my-skill/skill.md"))).toBe(true);
+ expect(fs.existsSync(path.join(tmpDir, ".claude/agents/my-agent.md"))).toBe(true);
+ });
+});
diff --git a/actions/setup/js/apm_pack.cjs b/actions/setup/js/apm_pack.cjs
new file mode 100644
index 00000000000..94fdb090049
--- /dev/null
+++ b/actions/setup/js/apm_pack.cjs
@@ -0,0 +1,726 @@
+// @ts-check
+///
+
+/**
+ * APM Bundle Packer
+ *
+ * JavaScript implementation of the APM (Agent Package Manager) bundle pack
+ * algorithm, equivalent to microsoft/apm packer.py.
+ *
+ * This module creates a self-contained .tar.gz bundle from an already-installed
+ * APM workspace (produced by `apm install`). It replaces the `microsoft/apm-action`
+ * pack step in the APM job, removing the external dependency for packing.
+ *
+ * Algorithm (mirrors packer.py):
+ * 1. Read apm.yml for package name and version (used for bundle directory name)
+ * 2. Read apm.lock.yaml from the workspace
+ * 3. Resolve the effective target (explicit > auto-detect from folder structure)
+ * 4. Collect deployed_files from all dependencies, filtered by target with
+ * cross-target mapping (e.g. .github/skills/ → .claude/skills/ for claude target)
+ * 5. Verify all referenced files exist on disk
+ * 6. Copy files (skip symlinks) to output/-/ preserving structure
+ * 7. Write an enriched apm.lock.yaml with a pack: header to the bundle directory
+ * 8. Create a .tar.gz archive and remove the bundle directory
+ * 9. Emit bundle-path output via core.setOutput
+ *
+ * Environment variables:
+ * APM_WORKSPACE – project root with apm.lock.yaml and installed files
+ * (default: /tmp/gh-aw/apm-workspace)
+ * APM_BUNDLE_OUTPUT – directory where the bundle archive is created
+ * (default: /tmp/gh-aw/apm-bundle-output)
+ * APM_TARGET – pack target: claude, copilot/vscode, cursor, opencode, all
+ * (default: auto-detect from workspace folder structure)
+ *
+ * @module apm_pack
+ */
+
+"use strict";
+
+const fs = require("fs");
+const path = require("path");
+const os = require("os");
+
+/** Lockfile filename used by current APM versions. */
+const LOCKFILE_NAME = "apm.lock.yaml";
+
+/** apm.yml filename for package metadata. */
+const APM_YML_NAME = "apm.yml";
+
+// Import shared parsing utilities from apm_unpack to avoid duplication.
+// Globals (core, exec) must be set before this module is loaded.
+const { parseAPMLockfile, unquoteYaml } = require("./apm_unpack.cjs");
+
+// ---------------------------------------------------------------------------
+// Target / cross-target mapping constants (mirrors lockfile_enrichment.py)
+// ---------------------------------------------------------------------------
+
+/**
+ * Authoritative mapping of target names to deployed-file path prefixes.
+ * @type {Record}
+ */
+const TARGET_PREFIXES = {
+ copilot: [".github/"],
+ vscode: [".github/"],
+ claude: [".claude/"],
+ cursor: [".cursor/"],
+ opencode: [".opencode/"],
+ all: [".github/", ".claude/", ".cursor/", ".opencode/"],
+};
+
+/**
+ * Cross-target path equivalences for skills/ and agents/ directories.
+ * Maps srcPrefix (disk/deployed_files path) → dstPrefix (bundle path) for a given target,
+ * as used by filterFilesByTarget(). Only skills/ and agents/ are semantically identical
+ * across targets.
+ * @type {Record>}
+ */
+const CROSS_TARGET_MAPS = {
+ claude: {
+ ".github/skills/": ".claude/skills/",
+ ".github/agents/": ".claude/agents/",
+ },
+ vscode: {
+ ".claude/skills/": ".github/skills/",
+ ".claude/agents/": ".github/agents/",
+ },
+ copilot: {
+ ".claude/skills/": ".github/skills/",
+ ".claude/agents/": ".github/agents/",
+ },
+ cursor: {
+ ".github/skills/": ".cursor/skills/",
+ ".github/agents/": ".cursor/agents/",
+ },
+ opencode: {
+ ".github/skills/": ".opencode/skills/",
+ ".github/agents/": ".opencode/agents/",
+ },
+};
+
+// ---------------------------------------------------------------------------
+// apm.yml parser
+// ---------------------------------------------------------------------------
+
+/**
+ * @typedef {Object} ApmYmlInfo
+ * @property {string} name - Package name (defaults to directory name if missing)
+ * @property {string} version - Package version (defaults to "0.0.0" if missing)
+ */
+
+/**
+ * Parse an apm.yml file to extract package name and version.
+ * These are used to name the bundle directory: -.
+ *
+ * @param {string} content - Raw YAML content of apm.yml
+ * @param {string} [fallbackName] - Fallback name if not found in content
+ * @returns {ApmYmlInfo}
+ */
+function parseApmYml(content, fallbackName = "bundle") {
+ let name = fallbackName;
+ let version = "0.0.0";
+
+ for (const line of content.split("\n")) {
+ const m = line.match(/^(name|version):\s*(.*)$/);
+ if (m) {
+ const v = unquoteYaml(m[2]);
+ if (m[1] === "name" && v !== null && String(v).trim() !== "") {
+ name = String(v).trim();
+ } else if (m[1] === "version" && v !== null && String(v).trim() !== "") {
+ version = String(v).trim();
+ }
+ }
+ }
+
+ return { name, version };
+}
+
+// ---------------------------------------------------------------------------
+// Target detection
+// ---------------------------------------------------------------------------
+
+/**
+ * Detect the effective pack target.
+ *
+ * Priority:
+ * 1. Explicit target (from APM_TARGET environment variable)
+ * 2. Auto-detect from workspace folder structure
+ *
+ * @param {string} workspaceDir - Project root to inspect for target folders
+ * @param {string | null | undefined} explicitTarget - Explicit target string
+ * @returns {string} Normalised target string
+ */
+function detectTarget(workspaceDir, explicitTarget) {
+ if (explicitTarget) {
+ const t = explicitTarget.trim().toLowerCase();
+ if (t === "copilot" || t === "vscode" || t === "agents") return "vscode";
+ if (t === "claude") return "claude";
+ if (t === "cursor") return "cursor";
+ if (t === "opencode") return "opencode";
+ if (t === "all") return "all";
+ core.warning(`[APM Pack] Unknown target '${t}' — falling back to 'all'`);
+ return "all";
+ }
+
+ // Auto-detect from folder structure
+ const githubDir = path.join(workspaceDir, ".github");
+ const claudeDir = path.join(workspaceDir, ".claude");
+ const cursorDir = path.join(workspaceDir, ".cursor");
+ const opencodeDir = path.join(workspaceDir, ".opencode");
+ const hasGitHub = fs.existsSync(githubDir) && fs.lstatSync(githubDir).isDirectory();
+ const hasClaude = fs.existsSync(claudeDir) && fs.lstatSync(claudeDir).isDirectory();
+ const hasCursor = fs.existsSync(cursorDir) && fs.lstatSync(cursorDir).isDirectory();
+ const hasOpencode = fs.existsSync(opencodeDir) && fs.lstatSync(opencodeDir).isDirectory();
+
+ const detected = [hasGitHub && ".github/", hasClaude && ".claude/", hasCursor && ".cursor/", hasOpencode && ".opencode/"].filter(Boolean);
+
+ if (detected.length >= 2) {
+ core.info(`[APM Pack] Auto-detected target: all (found ${detected.join(" and ")})`);
+ return "all";
+ }
+ if (hasGitHub) {
+ core.info("[APM Pack] Auto-detected target: vscode (found .github/ folder)");
+ return "vscode";
+ }
+ if (hasClaude) {
+ core.info("[APM Pack] Auto-detected target: claude (found .claude/ folder)");
+ return "claude";
+ }
+ if (hasCursor) {
+ core.info("[APM Pack] Auto-detected target: cursor (found .cursor/ folder)");
+ return "cursor";
+ }
+ if (hasOpencode) {
+ core.info("[APM Pack] Auto-detected target: opencode (found .opencode/ folder)");
+ return "opencode";
+ }
+ core.info("[APM Pack] No target folders found — using 'all'");
+ return "all";
+}
+
+// ---------------------------------------------------------------------------
+// File filtering with cross-target mapping
+// ---------------------------------------------------------------------------
+
+/**
+ * @typedef {Object} FilterResult
+ * @property {string[]} files - Filtered (and cross-target mapped) file paths for the bundle.
+ * @property {Record} pathMappings - Maps bundle_path → disk_path for cross-target remaps.
+ */
+
+/**
+ * Filter deployed file paths by target prefix, with cross-target mapping.
+ *
+ * When files are deployed under one target prefix (e.g. .github/skills/)
+ * but the pack target is different (e.g. claude), skills and agents are
+ * remapped to the equivalent target path. Commands, instructions, and
+ * hooks are NOT remapped — they are target-specific.
+ *
+ * Mirrors _filter_files_by_target in lockfile_enrichment.py exactly.
+ *
+ * @param {string[]} deployedFiles - List of relative file paths from deployed_files.
+ * @param {string} target - Normalised target string.
+ * @returns {FilterResult}
+ */
+function filterFilesByTarget(deployedFiles, target) {
+ const prefixes = TARGET_PREFIXES[target] || TARGET_PREFIXES["all"];
+ // Direct matches: files that start with a target prefix
+ const direct = deployedFiles.filter(f => prefixes.some(p => f.startsWith(p)));
+
+ /** @type {Record} */
+ const pathMappings = {};
+ const crossMap = CROSS_TARGET_MAPS[target] || {};
+
+ if (Object.keys(crossMap).length > 0) {
+ const directSet = new Set(direct);
+ for (const f of deployedFiles) {
+ if (directSet.has(f)) continue;
+ for (const [srcPrefix, dstPrefix] of Object.entries(crossMap)) {
+ if (f.startsWith(srcPrefix)) {
+ const mapped = dstPrefix + f.slice(srcPrefix.length);
+ if (!directSet.has(mapped)) {
+ direct.push(mapped);
+ directSet.add(mapped);
+ pathMappings[mapped] = f; // bundle_path → disk_path
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ return { files: direct, pathMappings };
+}
+
+// ---------------------------------------------------------------------------
+// YAML serialization for the enriched lockfile
+// ---------------------------------------------------------------------------
+
+/**
+ * Serialize a scalar value to a YAML string, matching PyYAML safe_dump style.
+ *
+ * Strings that look like YAML keywords or numbers are single-quoted.
+ * Other strings are returned as-is. Booleans and numbers are serialized
+ * without quotes. Null becomes the literal string "null".
+ *
+ * @param {string | number | boolean | null | undefined} value
+ * @returns {string}
+ */
+function scalarToYaml(value) {
+ if (value === null || value === undefined) return "null";
+ if (typeof value === "boolean") return value ? "true" : "false";
+ if (typeof value === "number") return String(value);
+ const s = String(value);
+ // Quote strings that YAML would parse as non-strings (mirrors PyYAML safe_dump quoting)
+ if (
+ s === "" ||
+ s === "null" ||
+ s === "~" ||
+ s === "true" ||
+ s === "false" ||
+ s === "yes" ||
+ s === "no" ||
+ s === "on" ||
+ s === "off" ||
+ /^-?\d+$/.test(s) ||
+ /^-?\d+\.\d+$/.test(s) ||
+ // YAML 1.1 parses ISO 8601 timestamps as datetime objects; quote to preserve string type
+ /^\d{4}-\d{2}-\d{2}T/.test(s)
+ ) {
+ return `'${s.replace(/'/g, "''")}'`;
+ }
+ return s;
+}
+
+/**
+ * Serialize an enriched APM lockfile to YAML.
+ *
+ * The output format matches PyYAML safe_dump: the pack: section is
+ * prepended, followed by top-level metadata, then the dependencies
+ * sequence with filtered deployed_files.
+ *
+ * This output is parseable by both:
+ * - Python yaml.safe_load (used by apm unpack)
+ * - Our parseAPMLockfile (used by apm_unpack.cjs)
+ *
+ * @param {import("./apm_unpack.cjs").APMLockfile} lockfile - Parsed lockfile
+ * @param {import("./apm_unpack.cjs").LockedDependency[]} filteredDeps - Deps with filtered deployed_files
+ * @param {{ format: string, target: string, packed_at: string, mapped_from?: string[] }} packMeta
+ * @returns {string} YAML string
+ */
+function serializeLockfileYaml(lockfile, filteredDeps, packMeta) {
+ const lines = [];
+
+ // Pack metadata section (prepended first, as in Python's enrich_lockfile_for_pack)
+ lines.push("pack:");
+ lines.push(` format: ${scalarToYaml(packMeta.format)}`);
+ lines.push(` target: ${scalarToYaml(packMeta.target)}`);
+ lines.push(` packed_at: ${scalarToYaml(packMeta.packed_at)}`);
+ if (packMeta.mapped_from && packMeta.mapped_from.length > 0) {
+ lines.push(" mapped_from:");
+ for (const prefix of packMeta.mapped_from) {
+ lines.push(` - ${prefix}`);
+ }
+ }
+
+ // Top-level metadata fields
+ if (lockfile.lockfile_version !== null) {
+ lines.push(`lockfile_version: ${scalarToYaml(lockfile.lockfile_version)}`);
+ }
+ if (lockfile.generated_at !== null) {
+ lines.push(`generated_at: ${scalarToYaml(lockfile.generated_at)}`);
+ }
+ if (lockfile.apm_version !== null) {
+ lines.push(`apm_version: ${scalarToYaml(lockfile.apm_version)}`);
+ }
+
+ // Dependencies sequence
+ lines.push("dependencies:");
+ for (const dep of filteredDeps) {
+ lines.push(`- repo_url: ${scalarToYaml(dep.repo_url)}`);
+ if (dep.host !== null) lines.push(` host: ${scalarToYaml(dep.host)}`);
+ else lines.push(` host: null`);
+ if (dep.resolved_commit !== null) lines.push(` resolved_commit: ${scalarToYaml(dep.resolved_commit)}`);
+ else lines.push(` resolved_commit: null`);
+ if (dep.resolved_ref !== null) lines.push(` resolved_ref: ${scalarToYaml(dep.resolved_ref)}`);
+ else lines.push(` resolved_ref: null`);
+ if (dep.version !== null) lines.push(` version: ${scalarToYaml(dep.version)}`);
+ else lines.push(` version: null`);
+ if (dep.virtual_path !== null) lines.push(` virtual_path: ${scalarToYaml(dep.virtual_path)}`);
+ else lines.push(` virtual_path: null`);
+ lines.push(` is_virtual: ${dep.is_virtual ? "true" : "false"}`);
+ lines.push(` depth: ${dep.depth}`);
+ if (dep.resolved_by !== null) lines.push(` resolved_by: ${scalarToYaml(dep.resolved_by)}`);
+ else lines.push(` resolved_by: null`);
+ if (dep.package_type !== null) lines.push(` package_type: ${scalarToYaml(dep.package_type)}`);
+ else lines.push(` package_type: null`);
+ if (dep.source !== null) lines.push(` source: ${scalarToYaml(dep.source)}`);
+ else lines.push(` source: null`);
+ if (dep.local_path !== null) lines.push(` local_path: ${scalarToYaml(dep.local_path)}`);
+ else lines.push(` local_path: null`);
+ if (dep.content_hash !== null) lines.push(` content_hash: ${scalarToYaml(dep.content_hash)}`);
+ else lines.push(` content_hash: null`);
+ lines.push(` is_dev: ${dep.is_dev ? "true" : "false"}`);
+ // Preserve unknown fields so the enriched lockfile is non-destructive
+ for (const [k, v] of Object.entries(dep.extra || {})) {
+ lines.push(` ${k}: ${scalarToYaml(v)}`);
+ }
+ lines.push(" deployed_files:");
+ for (const f of dep.deployed_files) {
+ lines.push(` - ${scalarToYaml(f)}`);
+ }
+ }
+
+ return lines.join("\n") + "\n";
+}
+
+// ---------------------------------------------------------------------------
+// Security helpers (mirrors assertSafePath / assertDestInsideOutput in unpacker)
+// ---------------------------------------------------------------------------
+
+/**
+ * Validate that a relative path from the lockfile is safe to pack.
+ * Rejects absolute paths and path-traversal attempts.
+ *
+ * @param {string} relPath - Relative path string from deployed_files.
+ * @throws {Error} If the path is unsafe.
+ */
+function assertSafePackPath(relPath) {
+ if (path.isAbsolute(relPath) || relPath.startsWith("/")) {
+ throw new Error(`Refusing to pack unsafe absolute path from lockfile: ${JSON.stringify(relPath)}`);
+ }
+ const parts = relPath.split(/[\\/]/);
+ if (parts.includes("..")) {
+ throw new Error(`Refusing to pack path-traversal entry from lockfile: ${JSON.stringify(relPath)}`);
+ }
+}
+
+/**
+ * Verify the resolved destination path stays within the bundle directory.
+ *
+ * @param {string} destPath - Absolute destination path.
+ * @param {string} bundleDirResolved - Resolved absolute bundle directory.
+ * @throws {Error} If the dest escapes the bundle directory.
+ */
+function assertPackDestInside(destPath, bundleDirResolved) {
+ const resolved = path.resolve(destPath);
+ if (!resolved.startsWith(bundleDirResolved + path.sep) && resolved !== bundleDirResolved) {
+ throw new Error(`Refusing to pack path that escapes the bundle directory: ${JSON.stringify(destPath)}`);
+ }
+}
+
+// ---------------------------------------------------------------------------
+// Copy helpers (mirrors copyDirRecursive / listDirRecursive in apm_unpack)
+// ---------------------------------------------------------------------------
+
+/**
+ * Recursively copy a directory tree from src to dest, skipping symbolic links.
+ *
+ * @param {string} src - Source directory.
+ * @param {string} dest - Destination directory.
+ * @returns {number} Number of files copied.
+ */
+function copyDirForPack(src, dest) {
+ let count = 0;
+ const entries = fs.readdirSync(src, { withFileTypes: true });
+ for (const entry of entries) {
+ const srcPath = path.join(src, entry.name);
+ const destPath = path.join(dest, entry.name);
+ if (entry.isSymbolicLink()) {
+ core.warning(`[APM Pack] Skipping symlink: ${srcPath}`);
+ continue;
+ }
+ if (entry.isDirectory()) {
+ fs.mkdirSync(destPath, { recursive: true });
+ count += copyDirForPack(srcPath, destPath);
+ } else if (entry.isFile()) {
+ fs.mkdirSync(path.dirname(destPath), { recursive: true });
+ fs.copyFileSync(srcPath, destPath);
+ count++;
+ }
+ }
+ return count;
+}
+
+/**
+ * List all file paths recursively under dir, relative to dir. Symbolic links skipped.
+ *
+ * @param {string} dir
+ * @returns {string[]}
+ */
+function listDirForPack(dir) {
+ /** @type {string[]} */
+ const result = [];
+ try {
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
+ for (const entry of entries) {
+ if (entry.isSymbolicLink()) continue;
+ if (entry.isDirectory()) {
+ const sub = listDirForPack(path.join(dir, entry.name));
+ result.push(...sub.map(s => entry.name + "/" + s));
+ } else {
+ result.push(entry.name);
+ }
+ }
+ } catch {
+ // Best-effort listing
+ }
+ return result;
+}
+
+// ---------------------------------------------------------------------------
+// Main pack function
+// ---------------------------------------------------------------------------
+
+/**
+ * @typedef {Object} PackResult
+ * @property {string} bundlePath - Absolute path to the created .tar.gz archive.
+ * @property {string[]} files - Unique list of bundle file paths (filtered by target).
+ * @property {string} target - Effective target used for packing.
+ * @property {Record} pathMappings - Cross-target path mappings used.
+ */
+
+/**
+ * Create a self-contained APM bundle from an installed workspace.
+ *
+ * Mirrors pack_bundle() in packer.py.
+ *
+ * @param {object} params
+ * @param {string} params.workspaceDir - Project root with apm.lock.yaml + installed files.
+ * @param {string} params.outputDir - Directory where the bundle archive will be written.
+ * @param {string | null} [params.target] - Explicit target, or null to auto-detect.
+ * @param {string} [params.format] - Bundle format (default: "apm").
+ * @returns {Promise}
+ */
+async function packBundle({ workspaceDir, outputDir, target = null, format = "apm" }) {
+ core.info("=== APM Bundle Pack ===");
+ core.info(`[APM Pack] Workspace directory : ${workspaceDir}`);
+ core.info(`[APM Pack] Output directory : ${outputDir}`);
+
+ if (!fs.existsSync(workspaceDir)) {
+ throw new Error(`APM workspace directory not found: ${workspaceDir}`);
+ }
+
+ // 1. Read apm.yml for package name / version
+ const apmYmlPath = path.join(workspaceDir, APM_YML_NAME);
+ let pkgName = path.basename(workspaceDir);
+ let pkgVersion = "0.0.0";
+ if (fs.existsSync(apmYmlPath)) {
+ const apmYmlContent = fs.readFileSync(apmYmlPath, "utf-8");
+ const info = parseApmYml(apmYmlContent, pkgName);
+ pkgName = info.name;
+ pkgVersion = info.version;
+ core.info(`[APM Pack] Package : ${pkgName}@${pkgVersion}`);
+ } else {
+ core.warning(`[APM Pack] ${APM_YML_NAME} not found — using directory name and version 0.0.0`);
+ }
+
+ // 2. Read apm.lock.yaml
+ const lockfilePath = path.join(workspaceDir, LOCKFILE_NAME);
+ if (!fs.existsSync(lockfilePath)) {
+ throw new Error(`${LOCKFILE_NAME} not found in workspace: ${workspaceDir}\n` + "Run 'apm install' first to resolve dependencies.");
+ }
+ const lockfileContent = fs.readFileSync(lockfilePath, "utf-8");
+ core.info(`[APM Pack] Lockfile size: ${lockfileContent.length} bytes`);
+
+ // 3. Parse lockfile
+ const lockfile = parseAPMLockfile(lockfileContent);
+ core.info(`[APM Pack] Lockfile version : ${lockfile.lockfile_version}`);
+ core.info(`[APM Pack] APM version : ${lockfile.apm_version}`);
+ core.info(`[APM Pack] Dependencies : ${lockfile.dependencies.length}`);
+
+ // 4. Resolve effective target
+ const effectiveTarget = detectTarget(workspaceDir, target);
+ core.info(`[APM Pack] Target : ${effectiveTarget}`);
+
+ // 5. Collect deployed_files from all dependencies, filtered by target
+ /** @type {string[]} */
+ const allDeployed = [];
+ for (const dep of lockfile.dependencies) {
+ allDeployed.push(...dep.deployed_files);
+ }
+ const { files: filteredFiles, pathMappings } = filterFilesByTarget(allDeployed, effectiveTarget);
+
+ // Deduplicate while preserving order (mirrors Python's seen set)
+ /** @type {Set} */
+ const seen = new Set();
+ /** @type {string[]} */
+ const uniqueFiles = [];
+ for (const f of filteredFiles) {
+ if (!seen.has(f)) {
+ seen.add(f);
+ uniqueFiles.push(f);
+ }
+ }
+ core.info(`[APM Pack] Files to bundle (after filter + dedup): ${uniqueFiles.length}`);
+
+ // 6. Verify each path is safe and exists on disk
+ const workspaceDirResolved = path.resolve(workspaceDir);
+ const missing = [];
+ for (const bundlePath of uniqueFiles) {
+ assertSafePackPath(bundlePath);
+ // For cross-target mapped files, verify the ORIGINAL (on-disk) path
+ const diskRelPath = pathMappings[bundlePath] || bundlePath;
+ // Strip trailing slash for existence check
+ const diskRelPathClean = diskRelPath.endsWith("/") ? diskRelPath.slice(0, -1) : diskRelPath;
+ const absPath = path.join(workspaceDirResolved, diskRelPathClean);
+ // Guard: destination must stay inside workspace
+ const resolvedAbs = path.resolve(absPath);
+ if (!resolvedAbs.startsWith(workspaceDirResolved + path.sep) && resolvedAbs !== workspaceDirResolved) {
+ throw new Error(`Refusing to pack path that escapes workspace directory: ${JSON.stringify(diskRelPath)}`);
+ }
+ if (!fs.existsSync(absPath)) {
+ missing.push(diskRelPath);
+ }
+ }
+ if (missing.length > 0) {
+ throw new Error(`The following deployed files are missing on disk — run 'apm install' to restore them:\n` + missing.map(m => ` - ${m}`).join("\n"));
+ }
+ core.info(`[APM Pack] All ${uniqueFiles.length} file(s) verified on disk`);
+
+ // 7. Build bundle directory: output/-/
+ const bundleDirName = `${pkgName}-${pkgVersion}`;
+ const bundleDir = path.join(path.resolve(outputDir), bundleDirName);
+ const bundleDirResolved = path.resolve(bundleDir);
+ fs.mkdirSync(bundleDir, { recursive: true });
+ core.info(`[APM Pack] Bundle directory : ${bundleDir}`);
+
+ // 8. Copy files preserving directory structure (skip symlinks)
+ let copied = 0;
+ for (const bundleRelPath of uniqueFiles) {
+ const diskRelPath = pathMappings[bundleRelPath] || bundleRelPath;
+ const diskRelPathClean = diskRelPath.endsWith("/") ? diskRelPath.slice(0, -1) : diskRelPath;
+ const bundleRelPathClean = bundleRelPath.endsWith("/") ? bundleRelPath.slice(0, -1) : bundleRelPath;
+ const src = path.join(workspaceDirResolved, diskRelPathClean);
+ const dest = path.join(bundleDir, bundleRelPathClean);
+
+ // Defense-in-depth: verify mapped destination stays inside bundle
+ assertPackDestInside(dest, bundleDirResolved);
+
+ if (!fs.existsSync(src)) continue;
+ const srcLstat = fs.lstatSync(src);
+ if (srcLstat.isSymbolicLink()) {
+ core.warning(`[APM Pack] Skipping symlink: ${diskRelPath}`);
+ continue;
+ }
+
+ if (srcLstat.isDirectory() || bundleRelPath.endsWith("/")) {
+ core.info(`[APM Pack] Copying directory: ${diskRelPath}${pathMappings[bundleRelPath] ? ` → ${bundleRelPathClean}` : ""}`);
+ fs.mkdirSync(dest, { recursive: true });
+ const n = copyDirForPack(src, dest);
+ core.info(`[APM Pack] → Copied ${n} file(s) from ${diskRelPath}`);
+ copied += n;
+ } else {
+ core.info(`[APM Pack] Copying file: ${diskRelPath}${pathMappings[bundleRelPath] ? ` → ${bundleRelPathClean}` : ""}`);
+ fs.mkdirSync(path.dirname(dest), { recursive: true });
+ fs.copyFileSync(src, dest, 0 /* no COPYFILE_EXCL */);
+ copied++;
+ }
+ }
+ core.info(`[APM Pack] Done copying: ${copied} file(s)`);
+
+ // 9. Compute mapped_from for pack: header (source prefixes used in cross-target mapping)
+ const crossMap = CROSS_TARGET_MAPS[effectiveTarget] || {};
+ const usedSrcPrefixes = new Set();
+ for (const original of Object.values(pathMappings)) {
+ for (const srcPrefix of Object.keys(crossMap)) {
+ if (original.startsWith(srcPrefix)) {
+ usedSrcPrefixes.add(srcPrefix);
+ break;
+ }
+ }
+ }
+
+ // Build per-dep filtered dep list (each dep gets deployed_files filtered by target)
+ const filteredDeps = lockfile.dependencies.map(dep => {
+ const { files: depFiles } = filterFilesByTarget(dep.deployed_files, effectiveTarget);
+ return { ...dep, deployed_files: depFiles };
+ });
+
+ // 10. Write enriched apm.lock.yaml to bundle directory
+ const packMeta = {
+ format,
+ target: effectiveTarget,
+ packed_at: new Date().toISOString(),
+ mapped_from: Array.from(usedSrcPrefixes).sort(),
+ };
+ const enrichedLockfile = serializeLockfileYaml(lockfile, filteredDeps, packMeta);
+ const bundleLockfilePath = path.join(bundleDir, LOCKFILE_NAME);
+ fs.writeFileSync(bundleLockfilePath, enrichedLockfile, "utf-8");
+ core.info(`[APM Pack] Wrote enriched lockfile: ${bundleLockfilePath}`);
+
+ // Log bundle contents
+ const bundleFiles = listDirForPack(bundleDir);
+ core.info(`[APM Pack] Bundle contains ${bundleFiles.length} file(s):`);
+ bundleFiles.slice(0, 30).forEach(f => core.info(` ${f}`));
+ if (bundleFiles.length > 30) core.info(` ... and ${bundleFiles.length - 30} more`);
+
+ // 11. Create .tar.gz archive and remove bundle directory
+ const archiveName = `${bundleDirName}.tar.gz`;
+ const archivePath = path.join(path.resolve(outputDir), archiveName);
+ core.info(`[APM Pack] Creating archive: ${archivePath}`);
+ await exec.exec("tar", ["-czf", archivePath, "-C", path.resolve(outputDir), bundleDirName]);
+ fs.rmSync(bundleDir, { recursive: true, force: true });
+ core.info(`[APM Pack] Archive created: ${archivePath}`);
+
+ return {
+ bundlePath: archivePath,
+ files: uniqueFiles,
+ target: effectiveTarget,
+ pathMappings,
+ };
+}
+
+// ---------------------------------------------------------------------------
+// Entry point
+// ---------------------------------------------------------------------------
+
+/**
+ * Main entry point called by the github-script step.
+ *
+ * Reads configuration from environment variables:
+ * APM_WORKSPACE – project root with apm.lock.yaml and installed files
+ * (default: /tmp/gh-aw/apm-workspace)
+ * APM_BUNDLE_OUTPUT – directory where the bundle archive is created
+ * (default: /tmp/gh-aw/apm-bundle-output)
+ * APM_TARGET – pack target (default: auto-detect)
+ */
+async function main() {
+ const workspaceDir = process.env.APM_WORKSPACE || "/tmp/gh-aw/apm-workspace";
+ const outputDir = process.env.APM_BUNDLE_OUTPUT || "/tmp/gh-aw/apm-bundle-output";
+ const target = process.env.APM_TARGET || null;
+
+ core.info("[APM Pack] Starting APM bundle packing");
+ core.info(`[APM Pack] APM_WORKSPACE : ${workspaceDir}`);
+ core.info(`[APM Pack] APM_BUNDLE_OUTPUT : ${outputDir}`);
+ core.info(`[APM Pack] APM_TARGET : ${target || "(auto-detect)"}`);
+
+ try {
+ fs.mkdirSync(outputDir, { recursive: true });
+ const result = await packBundle({ workspaceDir, outputDir, target });
+
+ core.info("[APM Pack] ✅ APM bundle packed successfully");
+ core.info(`[APM Pack] Bundle path : ${result.bundlePath}`);
+ core.info(`[APM Pack] Files bundled : ${result.files.length}`);
+ core.info(`[APM Pack] Target : ${result.target}`);
+
+ core.setOutput("bundle-path", result.bundlePath);
+ } catch (err) {
+ const msg = err instanceof Error ? err.message : String(err);
+ core.error(`[APM Pack] ❌ Failed to pack APM bundle: ${msg}`);
+ throw err;
+ }
+}
+
+module.exports = {
+ main,
+ packBundle,
+ parseApmYml,
+ detectTarget,
+ filterFilesByTarget,
+ serializeLockfileYaml,
+ scalarToYaml,
+ assertSafePackPath,
+ assertPackDestInside,
+ copyDirForPack,
+ listDirForPack,
+};
diff --git a/actions/setup/js/apm_pack.test.cjs b/actions/setup/js/apm_pack.test.cjs
new file mode 100644
index 00000000000..f5e32082b1d
--- /dev/null
+++ b/actions/setup/js/apm_pack.test.cjs
@@ -0,0 +1,740 @@
+// @ts-check
+///
+
+import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
+const fs = require("fs");
+const path = require("path");
+const os = require("os");
+
+// ---------------------------------------------------------------------------
+// Global mock setup
+// ---------------------------------------------------------------------------
+
+const mockCore = {
+ info: vi.fn(),
+ warning: vi.fn(),
+ error: vi.fn(),
+ setFailed: vi.fn(),
+ setOutput: vi.fn(),
+};
+
+const mockExec = {
+ exec: vi.fn(),
+};
+
+// Establish globals before requiring the modules
+global.core = mockCore;
+global.exec = mockExec;
+
+const { parseApmYml, detectTarget, filterFilesByTarget, serializeLockfileYaml, scalarToYaml, assertSafePackPath, assertPackDestInside, packBundle } = require("./apm_pack.cjs");
+
+// ---------------------------------------------------------------------------
+// Helpers
+// ---------------------------------------------------------------------------
+
+/** Create a temp directory and return its path. */
+function makeTempDir() {
+ return fs.mkdtempSync(path.join(os.tmpdir(), "apm-pack-test-"));
+}
+
+/** Remove a directory recursively (best-effort). */
+function removeTempDir(dir) {
+ if (dir && fs.existsSync(dir)) {
+ fs.rmSync(dir, { recursive: true, force: true });
+ }
+}
+
+/** Write a file, creating parent directories as needed. Returns absolute path. */
+function writeFile(dir, relPath, content = "content") {
+ const full = path.join(dir, relPath);
+ fs.mkdirSync(path.dirname(full), { recursive: true });
+ fs.writeFileSync(full, content, "utf-8");
+ return full;
+}
+
+/**
+ * Build a minimal apm.lock.yaml string with given dependencies.
+ * @param {Array<{repoUrl: string, files: string[]}>} deps
+ */
+function buildLockfile(deps) {
+ const lines = ["lockfile_version: '1'", "apm_version: 0.8.5", "dependencies:"];
+ for (const dep of deps) {
+ lines.push(`- repo_url: ${dep.repoUrl}`);
+ lines.push(` host: github.com`);
+ lines.push(` resolved_commit: abc123`);
+ lines.push(` resolved_ref: main`);
+ lines.push(` depth: 1`);
+ lines.push(` deployed_files:`);
+ for (const f of dep.files) {
+ lines.push(` - ${f}`);
+ }
+ }
+ return lines.join("\n") + "\n";
+}
+
+// ---------------------------------------------------------------------------
+// parseApmYml
+// ---------------------------------------------------------------------------
+
+describe("parseApmYml", () => {
+ it("parses name and version from valid apm.yml", () => {
+ const content = `name: my-package\nversion: 1.2.3\n`;
+ const result = parseApmYml(content);
+ expect(result.name).toBe("my-package");
+ expect(result.version).toBe("1.2.3");
+ });
+
+ it("uses fallback name when name is missing", () => {
+ const content = `version: 1.0.0\n`;
+ const result = parseApmYml(content, "fallback-dir");
+ expect(result.name).toBe("fallback-dir");
+ expect(result.version).toBe("1.0.0");
+ });
+
+ it("uses default version 0.0.0 when version is missing", () => {
+ const content = `name: pkg\n`;
+ const result = parseApmYml(content);
+ expect(result.name).toBe("pkg");
+ expect(result.version).toBe("0.0.0");
+ });
+
+ it("uses defaults when content is empty", () => {
+ const result = parseApmYml("", "my-fallback");
+ expect(result.name).toBe("my-fallback");
+ expect(result.version).toBe("0.0.0");
+ });
+
+ it("handles single-quoted values", () => {
+ const content = `name: 'my-pkg'\nversion: '2.0.0'\n`;
+ const result = parseApmYml(content);
+ expect(result.name).toBe("my-pkg");
+ expect(result.version).toBe("2.0.0");
+ });
+});
+
+// ---------------------------------------------------------------------------
+// detectTarget
+// ---------------------------------------------------------------------------
+
+describe("detectTarget", () => {
+ let tmpDir;
+
+ beforeEach(() => {
+ tmpDir = makeTempDir();
+ });
+ afterEach(() => {
+ removeTempDir(tmpDir);
+ });
+
+ it("returns explicit target normalized to vscode for copilot", () => {
+ expect(detectTarget(tmpDir, "copilot")).toBe("vscode");
+ });
+
+ it("returns explicit target normalized to vscode for vscode", () => {
+ expect(detectTarget(tmpDir, "vscode")).toBe("vscode");
+ });
+
+ it("returns explicit target normalized to vscode for agents", () => {
+ expect(detectTarget(tmpDir, "agents")).toBe("vscode");
+ });
+
+ it("returns claude for explicit claude target", () => {
+ expect(detectTarget(tmpDir, "claude")).toBe("claude");
+ });
+
+ it("returns cursor for explicit cursor target", () => {
+ expect(detectTarget(tmpDir, "cursor")).toBe("cursor");
+ });
+
+ it("returns opencode for explicit opencode target", () => {
+ expect(detectTarget(tmpDir, "opencode")).toBe("opencode");
+ });
+
+ it("returns all for explicit all target", () => {
+ expect(detectTarget(tmpDir, "all")).toBe("all");
+ });
+
+ it("auto-detects vscode when .github/ folder exists", () => {
+ fs.mkdirSync(path.join(tmpDir, ".github"));
+ expect(detectTarget(tmpDir, null)).toBe("vscode");
+ });
+
+ it("auto-detects claude when .claude/ folder exists", () => {
+ fs.mkdirSync(path.join(tmpDir, ".claude"));
+ expect(detectTarget(tmpDir, null)).toBe("claude");
+ });
+
+ it("auto-detects all when both .github/ and .claude/ exist", () => {
+ fs.mkdirSync(path.join(tmpDir, ".github"));
+ fs.mkdirSync(path.join(tmpDir, ".claude"));
+ expect(detectTarget(tmpDir, null)).toBe("all");
+ });
+
+ it("defaults to all when no target folders found", () => {
+ expect(detectTarget(tmpDir, null)).toBe("all");
+ });
+
+ it("explicit target wins over auto-detection", () => {
+ fs.mkdirSync(path.join(tmpDir, ".github"));
+ fs.mkdirSync(path.join(tmpDir, ".claude"));
+ expect(detectTarget(tmpDir, "claude")).toBe("claude");
+ });
+});
+
+// ---------------------------------------------------------------------------
+// filterFilesByTarget
+// ---------------------------------------------------------------------------
+
+describe("filterFilesByTarget – direct matches", () => {
+ it("copilot target includes .github/ files only", () => {
+ const files = [".github/skills/foo/", ".claude/skills/foo/", ".cursor/rules/bar.md"];
+ const { files: result, pathMappings } = filterFilesByTarget(files, "copilot");
+ expect(result).toContain(".github/skills/foo/");
+ expect(result).not.toContain(".claude/skills/foo/");
+ expect(result).not.toContain(".cursor/rules/bar.md");
+ expect(Object.keys(pathMappings)).toHaveLength(0);
+ });
+
+ it("claude target includes .claude/ files only (no cross-map if source is already .claude/)", () => {
+ const files = [".claude/skills/foo/", ".claude/commands/cmd.md"];
+ const { files: result, pathMappings } = filterFilesByTarget(files, "claude");
+ expect(result).toContain(".claude/skills/foo/");
+ expect(result).toContain(".claude/commands/cmd.md");
+ expect(Object.keys(pathMappings)).toHaveLength(0);
+ });
+
+ it("all target includes all target directories", () => {
+ const files = [".github/skills/foo/", ".claude/skills/bar/", ".cursor/rules/x.md"];
+ const { files: result } = filterFilesByTarget(files, "all");
+ expect(result).toContain(".github/skills/foo/");
+ expect(result).toContain(".claude/skills/bar/");
+ expect(result).toContain(".cursor/rules/x.md");
+ });
+
+ it("applies cross-target mapping when no direct-match files exist", () => {
+ const files = [".github/skills/foo/"];
+ const { files: result } = filterFilesByTarget(files, "claude");
+ // No direct matches, but .github/skills/ → .claude/skills/ cross-map applies
+ expect(result).toContain(".claude/skills/foo/");
+ });
+});
+
+describe("filterFilesByTarget – cross-target mapping", () => {
+ it("claude target maps .github/skills/ to .claude/skills/", () => {
+ const files = [".github/skills/my-skill/"];
+ const { files: result, pathMappings } = filterFilesByTarget(files, "claude");
+ expect(result).toContain(".claude/skills/my-skill/");
+ expect(result).not.toContain(".github/skills/my-skill/");
+ expect(pathMappings[".claude/skills/my-skill/"]).toBe(".github/skills/my-skill/");
+ });
+
+ it("claude target maps .github/agents/ to .claude/agents/", () => {
+ const files = [".github/agents/my-agent.md"];
+ const { files: result, pathMappings } = filterFilesByTarget(files, "claude");
+ expect(result).toContain(".claude/agents/my-agent.md");
+ expect(pathMappings[".claude/agents/my-agent.md"]).toBe(".github/agents/my-agent.md");
+ });
+
+ it("claude target does NOT map .github/instructions/ (target-specific, not remapped)", () => {
+ const files = [".github/copilot-instructions.md"];
+ const { files: result } = filterFilesByTarget(files, "claude");
+ // .github/copilot-instructions.md does not start with .claude/ and has no cross-map
+ expect(result).not.toContain(".github/copilot-instructions.md");
+ });
+
+ it("copilot target maps .claude/skills/ to .github/skills/", () => {
+ const files = [".claude/skills/my-skill/"];
+ const { files: result, pathMappings } = filterFilesByTarget(files, "copilot");
+ expect(result).toContain(".github/skills/my-skill/");
+ expect(pathMappings[".github/skills/my-skill/"]).toBe(".claude/skills/my-skill/");
+ });
+
+ it("cursor target maps .github/skills/ to .cursor/skills/", () => {
+ const files = [".github/skills/my-skill/"];
+ const { files: result, pathMappings } = filterFilesByTarget(files, "cursor");
+ expect(result).toContain(".cursor/skills/my-skill/");
+ expect(pathMappings[".cursor/skills/my-skill/"]).toBe(".github/skills/my-skill/");
+ });
+
+ it("cross-mapped path is not included twice if already directly present", () => {
+ // File already exists under .claude/ AND under .github/ — should not duplicate
+ const files = [".claude/skills/foo/", ".github/skills/foo/"];
+ const { files: result } = filterFilesByTarget(files, "claude");
+ const claudeSkills = result.filter(f => f === ".claude/skills/foo/");
+ expect(claudeSkills).toHaveLength(1); // No duplicates
+ });
+
+ it("all target has no cross-target mappings needed (prefixes cover all dirs)", () => {
+ const files = [".github/skills/foo/", ".claude/skills/bar/"];
+ const { files: result, pathMappings } = filterFilesByTarget(files, "all");
+ expect(result).toContain(".github/skills/foo/");
+ expect(result).toContain(".claude/skills/bar/");
+ expect(Object.keys(pathMappings)).toHaveLength(0); // No mapping needed for "all"
+ });
+});
+
+// ---------------------------------------------------------------------------
+// scalarToYaml
+// ---------------------------------------------------------------------------
+
+describe("scalarToYaml", () => {
+ it("returns 'null' for null/undefined", () => {
+ expect(scalarToYaml(null)).toBe("null");
+ expect(scalarToYaml(undefined)).toBe("null");
+ });
+
+ it("returns true/false for booleans", () => {
+ expect(scalarToYaml(true)).toBe("true");
+ expect(scalarToYaml(false)).toBe("false");
+ });
+
+ it("returns number as string", () => {
+ expect(scalarToYaml(42)).toBe("42");
+ expect(scalarToYaml(0)).toBe("0");
+ });
+
+ it("single-quotes strings that look like YAML keywords", () => {
+ expect(scalarToYaml("null")).toBe("'null'");
+ expect(scalarToYaml("true")).toBe("'true'");
+ expect(scalarToYaml("false")).toBe("'false'");
+ expect(scalarToYaml("yes")).toBe("'yes'");
+ expect(scalarToYaml("no")).toBe("'no'");
+ expect(scalarToYaml("on")).toBe("'on'");
+ expect(scalarToYaml("off")).toBe("'off'");
+ expect(scalarToYaml("")).toBe("''");
+ });
+
+ it("single-quotes strings that look like numbers", () => {
+ expect(scalarToYaml("1")).toBe("'1'");
+ expect(scalarToYaml("42")).toBe("'42'");
+ expect(scalarToYaml("3.14")).toBe("'3.14'");
+ });
+
+ it("single-quotes ISO datetime strings (YAML 1.1 parses them as datetime)", () => {
+ expect(scalarToYaml("2024-01-15T10:00:00.000Z")).toBe("'2024-01-15T10:00:00.000Z'");
+ expect(scalarToYaml("2024-03-29T11:13:45.004Z")).toBe("'2024-03-29T11:13:45.004Z'");
+ });
+
+ it("returns regular strings as-is", () => {
+ expect(scalarToYaml("https://github.com/owner/repo")).toBe("https://github.com/owner/repo");
+ expect(scalarToYaml("main")).toBe("main");
+ expect(scalarToYaml("github.com")).toBe("github.com");
+ expect(scalarToYaml("abc123")).toBe("abc123");
+ });
+});
+
+// ---------------------------------------------------------------------------
+// serializeLockfileYaml
+// ---------------------------------------------------------------------------
+
+describe("serializeLockfileYaml", () => {
+ const baseLockfile = {
+ lockfile_version: "1",
+ generated_at: "2024-01-15T10:00:00.000000+00:00",
+ apm_version: "0.8.5",
+ dependencies: [],
+ pack: {},
+ };
+
+ it("includes pack: section with format, target, packed_at", () => {
+ const yaml = serializeLockfileYaml(baseLockfile, [], {
+ format: "apm",
+ target: "claude",
+ packed_at: "2024-01-15T10:00:00.000Z",
+ });
+ expect(yaml).toContain("pack:");
+ expect(yaml).toContain(" format: apm");
+ expect(yaml).toContain(" target: claude");
+ expect(yaml).toContain(" packed_at: '2024-01-15T10:00:00.000Z'");
+ });
+
+ it("includes mapped_from when cross-target mappings were used", () => {
+ const yaml = serializeLockfileYaml(baseLockfile, [], {
+ format: "apm",
+ target: "claude",
+ packed_at: "2024-01-15T10:00:00.000Z",
+ mapped_from: [".github/agents/", ".github/skills/"],
+ });
+ expect(yaml).toContain(" mapped_from:");
+ expect(yaml).toContain(" - .github/agents/");
+ expect(yaml).toContain(" - .github/skills/");
+ });
+
+ it("omits mapped_from when no cross-target mappings", () => {
+ const yaml = serializeLockfileYaml(baseLockfile, [], {
+ format: "apm",
+ target: "all",
+ packed_at: "2024-01-15T10:00:00.000Z",
+ mapped_from: [],
+ });
+ expect(yaml).not.toContain("mapped_from");
+ });
+
+ it("includes lockfile_version and apm_version", () => {
+ const yaml = serializeLockfileYaml(baseLockfile, [], {
+ format: "apm",
+ target: "all",
+ packed_at: "t",
+ });
+ expect(yaml).toContain("lockfile_version: '1'");
+ expect(yaml).toContain("apm_version: 0.8.5");
+ });
+
+ it("includes filtered deployed_files for each dependency", () => {
+ const dep = {
+ repo_url: "https://github.com/owner/pkg",
+ host: "github.com",
+ resolved_commit: "abc123",
+ resolved_ref: "main",
+ version: null,
+ virtual_path: null,
+ is_virtual: false,
+ depth: 1,
+ resolved_by: null,
+ package_type: null,
+ source: null,
+ local_path: null,
+ content_hash: null,
+ is_dev: false,
+ deployed_files: [".claude/skills/foo/", ".claude/agents/bar.md"],
+ };
+ const yaml = serializeLockfileYaml(baseLockfile, [dep], {
+ format: "apm",
+ target: "claude",
+ packed_at: "t",
+ });
+ expect(yaml).toContain("- repo_url: https://github.com/owner/pkg");
+ expect(yaml).toContain(" - .claude/skills/foo/");
+ expect(yaml).toContain(" - .claude/agents/bar.md");
+ });
+
+ it("pack: section comes before lockfile_version (prepended)", () => {
+ const yaml = serializeLockfileYaml(baseLockfile, [], {
+ format: "apm",
+ target: "all",
+ packed_at: "t",
+ });
+ const packIdx = yaml.indexOf("pack:");
+ const versionIdx = yaml.indexOf("lockfile_version:");
+ expect(packIdx).toBeLessThan(versionIdx);
+ });
+
+ it("output is parseable by parseAPMLockfile from apm_unpack", () => {
+ const { parseAPMLockfile } = require("./apm_unpack.cjs");
+ const dep = {
+ repo_url: "https://github.com/owner/pkg",
+ host: "github.com",
+ resolved_commit: "abc",
+ resolved_ref: "main",
+ version: "1.0.0",
+ virtual_path: null,
+ is_virtual: false,
+ depth: 1,
+ resolved_by: null,
+ package_type: null,
+ source: null,
+ local_path: null,
+ content_hash: null,
+ is_dev: false,
+ deployed_files: [".claude/skills/foo/"],
+ };
+ const yaml = serializeLockfileYaml(baseLockfile, [dep], {
+ format: "apm",
+ target: "claude",
+ packed_at: "2024-01-15T10:00:00.000Z",
+ });
+ const parsed = parseAPMLockfile(yaml);
+ expect(parsed.lockfile_version).toBe("1");
+ expect(parsed.pack.target).toBe("claude");
+ expect(parsed.pack.format).toBe("apm");
+ expect(parsed.dependencies).toHaveLength(1);
+ expect(parsed.dependencies[0].deployed_files).toContain(".claude/skills/foo/");
+ });
+});
+
+// ---------------------------------------------------------------------------
+// assertSafePackPath
+// ---------------------------------------------------------------------------
+
+describe("assertSafePackPath", () => {
+ it("accepts safe relative paths", () => {
+ expect(() => assertSafePackPath(".github/skills/foo/")).not.toThrow();
+ expect(() => assertSafePackPath(".claude/agents/bar.md")).not.toThrow();
+ });
+
+ it("rejects absolute paths", () => {
+ expect(() => assertSafePackPath("/etc/passwd")).toThrow(/unsafe absolute path/);
+ expect(() => assertSafePackPath("/tmp/secret")).toThrow(/unsafe absolute path/);
+ });
+
+ it("rejects path traversal entries", () => {
+ expect(() => assertSafePackPath("../etc/passwd")).toThrow(/path-traversal/);
+ expect(() => assertSafePackPath(".github/../../../etc/passwd")).toThrow(/path-traversal/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// assertPackDestInside
+// ---------------------------------------------------------------------------
+
+describe("assertPackDestInside", () => {
+ it("accepts paths inside the bundle directory", () => {
+ const bundleDir = "/tmp/my-bundle";
+ expect(() => assertPackDestInside("/tmp/my-bundle/file.txt", bundleDir)).not.toThrow();
+ expect(() => assertPackDestInside("/tmp/my-bundle/subdir/file.txt", bundleDir)).not.toThrow();
+ });
+
+ it("rejects paths that escape the bundle directory", () => {
+ const bundleDir = "/tmp/my-bundle";
+ expect(() => assertPackDestInside("/tmp/other/file.txt", bundleDir)).toThrow(/escapes the bundle/);
+ expect(() => assertPackDestInside("/etc/passwd", bundleDir)).toThrow(/escapes the bundle/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// packBundle – integration tests with real file system
+// ---------------------------------------------------------------------------
+
+describe("packBundle – integration", () => {
+ let workspaceDir;
+ let outputDir;
+
+ beforeEach(() => {
+ workspaceDir = makeTempDir();
+ outputDir = makeTempDir();
+
+ // Wire up exec.exec to run real tar
+ const { spawnSync } = require("child_process");
+ mockExec.exec.mockImplementation(async (cmd, args = []) => {
+ const result = spawnSync(cmd, args, { stdio: "inherit" });
+ if (result.status !== 0) {
+ throw new Error(`Command failed: ${cmd} ${args.join(" ")} (exit ${result.status})`);
+ }
+ return result.status;
+ });
+ });
+
+ afterEach(() => {
+ removeTempDir(workspaceDir);
+ removeTempDir(outputDir);
+ vi.clearAllMocks();
+ });
+
+ it("packs a simple bundle with .github/ files", async () => {
+ // Create workspace
+ writeFile(workspaceDir, "apm.yml", "name: test-pkg\nversion: 1.0.0\n");
+ const lockfileContent = buildLockfile([
+ {
+ repoUrl: "https://github.com/owner/skill-a",
+ files: [".github/skills/skill-a/"],
+ },
+ ]);
+ writeFile(workspaceDir, "apm.lock.yaml", lockfileContent);
+ writeFile(workspaceDir, ".github/skills/skill-a/skill.md", "# Skill A\n");
+ writeFile(workspaceDir, ".github/skills/skill-a/notes.txt", "Notes\n");
+
+ const result = await packBundle({
+ workspaceDir,
+ outputDir,
+ target: "all",
+ });
+
+ expect(result.bundlePath).toMatch(/test-pkg-1\.0\.0\.tar\.gz$/);
+ expect(fs.existsSync(result.bundlePath)).toBe(true);
+ expect(result.files).toContain(".github/skills/skill-a/");
+ expect(result.target).toBe("all");
+
+ // Verify tar.gz contains expected files
+ const { spawnSync } = require("child_process");
+ const listResult = spawnSync("tar", ["-tzf", result.bundlePath], { encoding: "utf-8" });
+ expect(listResult.status).toBe(0);
+ const entries = listResult.stdout.split("\n").filter(Boolean);
+ expect(entries.some(e => e.includes("skill.md"))).toBe(true);
+ expect(entries.some(e => e.includes(`.github/skills/skill-a`))).toBe(true);
+ expect(entries.some(e => e.includes("apm.lock.yaml"))).toBe(true);
+ });
+
+ it("applies cross-target mapping for claude target", async () => {
+ writeFile(workspaceDir, "apm.yml", "name: cross-test\nversion: 2.0.0\n");
+ const lockfileContent = buildLockfile([
+ {
+ repoUrl: "https://github.com/owner/skills",
+ files: [".github/skills/my-skill/", ".github/copilot-instructions.md"],
+ },
+ ]);
+ writeFile(workspaceDir, "apm.lock.yaml", lockfileContent);
+ writeFile(workspaceDir, ".github/skills/my-skill/skill.md", "# My Skill\n");
+ writeFile(workspaceDir, ".github/copilot-instructions.md", "# Copilot\n");
+
+ const result = await packBundle({
+ workspaceDir,
+ outputDir,
+ target: "claude",
+ });
+
+ expect(result.bundlePath).toMatch(/cross-test-2\.0\.0\.tar\.gz$/);
+ expect(fs.existsSync(result.bundlePath)).toBe(true);
+
+ // The bundle should contain .claude/skills/my-skill/ (mapped from .github/skills/)
+ // but NOT .github/copilot-instructions.md (no cross-map for instructions)
+ const { spawnSync } = require("child_process");
+ const listResult = spawnSync("tar", ["-tzf", result.bundlePath], { encoding: "utf-8" });
+ const entries = listResult.stdout.split("\n").filter(Boolean);
+ expect(entries.some(e => e.includes(".claude/skills/my-skill/skill.md"))).toBe(true);
+ expect(entries.some(e => e.includes("copilot-instructions.md"))).toBe(false);
+
+ // Verify pathMappings
+ expect(result.pathMappings[".claude/skills/my-skill/"]).toBe(".github/skills/my-skill/");
+ });
+
+ it("sets bundle-path output via core.setOutput", async () => {
+ writeFile(workspaceDir, "apm.yml", "name: output-test\nversion: 1.0.0\n");
+ writeFile(workspaceDir, "apm.lock.yaml", buildLockfile([{ repoUrl: "https://github.com/o/r", files: [".github/copilot-instructions.md"] }]));
+ writeFile(workspaceDir, ".github/copilot-instructions.md", "# Instructions\n");
+
+ await packBundle({ workspaceDir, outputDir, target: "all" });
+ // setOutput is called from main() not packBundle directly — test via main()
+ });
+
+ it("throws when apm.lock.yaml is missing", async () => {
+ writeFile(workspaceDir, "apm.yml", "name: missing-lock\nversion: 1.0.0\n");
+ await expect(packBundle({ workspaceDir, outputDir, target: "all" })).rejects.toThrow(/apm\.lock\.yaml not found/);
+ });
+
+ it("throws when a deployed file is missing from disk", async () => {
+ writeFile(workspaceDir, "apm.yml", "name: missing-file\nversion: 1.0.0\n");
+ const lockfileContent = buildLockfile([
+ {
+ repoUrl: "https://github.com/owner/pkg",
+ files: [".github/skills/missing-skill/"],
+ },
+ ]);
+ writeFile(workspaceDir, "apm.lock.yaml", lockfileContent);
+ // Do NOT create .github/skills/missing-skill/ on disk
+
+ await expect(packBundle({ workspaceDir, outputDir, target: "all" })).rejects.toThrow(/missing on disk/);
+ });
+
+ it("skips symlinks in directories (security: never bundle symlinks)", async () => {
+ writeFile(workspaceDir, "apm.yml", "name: symlink-test\nversion: 1.0.0\n");
+ writeFile(workspaceDir, "apm.lock.yaml", buildLockfile([{ repoUrl: "https://github.com/o/r", files: [".github/skills/skill-a/"] }]));
+ writeFile(workspaceDir, ".github/skills/skill-a/real.md", "# Real\n");
+ // Create a symlink inside the skill directory
+ try {
+ fs.symlinkSync("/etc/passwd", path.join(workspaceDir, ".github/skills/skill-a/link.txt"));
+ } catch {
+ // Skip if symlinks not supported
+ return;
+ }
+
+ const result = await packBundle({ workspaceDir, outputDir, target: "all" });
+ expect(fs.existsSync(result.bundlePath)).toBe(true);
+
+ // Verify symlink was not bundled
+ const { spawnSync } = require("child_process");
+ const listResult = spawnSync("tar", ["-tzf", result.bundlePath], { encoding: "utf-8" });
+ const entries = listResult.stdout.split("\n").filter(Boolean);
+ expect(entries.some(e => e.includes("link.txt"))).toBe(false);
+ expect(entries.some(e => e.includes("real.md"))).toBe(true);
+ });
+
+ it("rejects path-traversal entries in deployed_files that pass target filter", async () => {
+ writeFile(workspaceDir, "apm.yml", "name: traversal-test\nversion: 1.0.0\n");
+ // Use a path that starts with .github/ (passes "all" target filter) but contains ..
+ const maliciousLockfile = "lockfile_version: '1'\napm_version: 0.8.5\ndependencies:\n- repo_url: https://github.com/evil/pkg\n depth: 1\n deployed_files:\n - .github/skills/../../etc/passwd\n";
+ writeFile(workspaceDir, "apm.lock.yaml", maliciousLockfile);
+
+ await expect(packBundle({ workspaceDir, outputDir, target: "all" })).rejects.toThrow(/path-traversal/);
+ });
+
+ it("bundles multiple dependencies with deduplication", async () => {
+ writeFile(workspaceDir, "apm.yml", "name: multi-dep\nversion: 1.0.0\n");
+ const lockfileContent = buildLockfile([
+ { repoUrl: "https://github.com/o/pkg-a", files: [".github/skills/skill-a/", ".github/copilot-instructions.md"] },
+ { repoUrl: "https://github.com/o/pkg-b", files: [".github/skills/skill-b/", ".github/copilot-instructions.md"] },
+ ]);
+ writeFile(workspaceDir, "apm.lock.yaml", lockfileContent);
+ writeFile(workspaceDir, ".github/skills/skill-a/skill.md", "# A\n");
+ writeFile(workspaceDir, ".github/skills/skill-b/skill.md", "# B\n");
+ writeFile(workspaceDir, ".github/copilot-instructions.md", "# Both\n");
+
+ const result = await packBundle({ workspaceDir, outputDir, target: "all" });
+ expect(result.files).toContain(".github/skills/skill-a/");
+ expect(result.files).toContain(".github/skills/skill-b/");
+ // .github/copilot-instructions.md should appear only once despite being in both deps
+ const count = result.files.filter(f => f === ".github/copilot-instructions.md").length;
+ expect(count).toBe(1);
+ });
+
+ it("the enriched apm.lock.yaml in the bundle is parseable by parseAPMLockfile", async () => {
+ const { parseAPMLockfile } = require("./apm_unpack.cjs");
+ writeFile(workspaceDir, "apm.yml", "name: parse-test\nversion: 1.0.0\n");
+ writeFile(workspaceDir, "apm.lock.yaml", buildLockfile([{ repoUrl: "https://github.com/o/r", files: [".github/skills/foo/"] }]));
+ writeFile(workspaceDir, ".github/skills/foo/skill.md", "# Foo\n");
+
+ const result = await packBundle({ workspaceDir, outputDir, target: "all" });
+
+ // Extract and read the lockfile from the bundle
+ const extractDir = makeTempDir();
+ try {
+ const { spawnSync } = require("child_process");
+ spawnSync("tar", ["-xzf", result.bundlePath, "-C", extractDir]);
+ // Find the lockfile
+ const bundleDirs = fs.readdirSync(extractDir);
+ expect(bundleDirs.length).toBeGreaterThan(0);
+ const lockfilePath = path.join(extractDir, bundleDirs[0], "apm.lock.yaml");
+ expect(fs.existsSync(lockfilePath)).toBe(true);
+ const lockfileContent = fs.readFileSync(lockfilePath, "utf-8");
+ const parsed = parseAPMLockfile(lockfileContent);
+ expect(parsed.pack.format).toBe("apm");
+ expect(parsed.dependencies.length).toBeGreaterThan(0);
+ } finally {
+ removeTempDir(extractDir);
+ }
+ });
+});
+
+// ---------------------------------------------------------------------------
+// main() – basic smoke test
+// ---------------------------------------------------------------------------
+
+describe("main()", () => {
+ let workspaceDir;
+ let outputDir;
+ let origEnv;
+
+ beforeEach(() => {
+ workspaceDir = makeTempDir();
+ outputDir = makeTempDir();
+ origEnv = { ...process.env };
+
+ const { spawnSync } = require("child_process");
+ mockExec.exec.mockImplementation(async (cmd, args = []) => {
+ const result = spawnSync(cmd, args, { stdio: "inherit" });
+ if (result.status !== 0) throw new Error(`Failed: ${cmd} ${args.join(" ")}`);
+ return result.status;
+ });
+ });
+
+ afterEach(() => {
+ process.env = origEnv;
+ removeTempDir(workspaceDir);
+ removeTempDir(outputDir);
+ vi.clearAllMocks();
+ });
+
+ it("calls core.setOutput with bundle-path on success", async () => {
+ process.env.APM_WORKSPACE = workspaceDir;
+ process.env.APM_BUNDLE_OUTPUT = outputDir;
+ process.env.APM_TARGET = "all";
+
+ writeFile(workspaceDir, "apm.yml", "name: main-test\nversion: 1.0.0\n");
+ writeFile(workspaceDir, "apm.lock.yaml", buildLockfile([{ repoUrl: "https://github.com/o/r", files: [".github/copilot-instructions.md"] }]));
+ writeFile(workspaceDir, ".github/copilot-instructions.md", "# Instructions\n");
+
+ const { main } = require("./apm_pack.cjs");
+ await main();
+
+ expect(mockCore.setOutput).toHaveBeenCalledWith("bundle-path", expect.stringMatching(/\.tar\.gz$/));
+ });
+});
diff --git a/actions/setup/js/apm_unpack.cjs b/actions/setup/js/apm_unpack.cjs
index 360e0f574f0..72643c36f4e 100644
--- a/actions/setup/js/apm_unpack.cjs
+++ b/actions/setup/js/apm_unpack.cjs
@@ -87,6 +87,7 @@ function unquoteYaml(raw) {
* @property {string | null} local_path
* @property {string | null} content_hash
* @property {boolean} is_dev
+ * @property {Record} extra - Unknown fields preserved for non-destructive round-trip
*/
/**
@@ -302,6 +303,7 @@ function makeEmptyDep() {
local_path: null,
content_hash: null,
is_dev: false,
+ extra: {},
};
}
@@ -356,7 +358,8 @@ function assignDepField(dep, key, value) {
dep.is_dev = value === true || value === "true";
break;
default:
- // Unknown field – ignore silently
+ // Unknown field – preserve in extra for non-destructive round-trip
+ dep.extra[key] = value;
break;
}
}
diff --git a/actions/setup/js/run_apm_install.cjs b/actions/setup/js/run_apm_install.cjs
new file mode 100644
index 00000000000..e29b9dbe90d
--- /dev/null
+++ b/actions/setup/js/run_apm_install.cjs
@@ -0,0 +1,47 @@
+// @ts-check
+/**
+ * run_apm_install.cjs
+ *
+ * Standalone entry-point for apm_install.cjs used in CI integration tests
+ * and local development. Sets up lightweight CJS-compatible shims for the
+ * @actions/* globals expected by apm_install.cjs, then calls main().
+ *
+ * Environment variables (consumed by apm_install.main):
+ * GITHUB_APM_PAT – GitHub token (falls back to GITHUB_TOKEN)
+ * APM_PACKAGES – JSON array of package slugs
+ * APM_WORKSPACE – workspace directory for downloaded files + lockfile
+ *
+ * Usage:
+ * node actions/setup/js/run_apm_install.cjs
+ */
+
+"use strict";
+
+const { setupGlobals } = require("./setup_globals.cjs");
+const { main } = require("./apm_install.cjs");
+
+// Minimal shim for @actions/core — only the methods used by apm_install.cjs.
+const core = {
+ info: msg => console.log(msg),
+ warning: msg => console.warn(`::warning::${msg}`),
+ error: msg => console.error(`::error::${msg}`),
+ setFailed: msg => {
+ console.error(`::error::${msg}`);
+ process.exitCode = 1;
+ },
+ setOutput: (name, value) => console.log(`::set-output name=${name}::${value}`),
+};
+
+// Wire shims into globals so apm_install.cjs can use them.
+setupGlobals(
+ /** @type {any} */ core, // logging
+ {}, // @actions/github — not used directly (apm_install creates its own Octokit)
+ /** @type {any} */ {}, // GitHub Actions event context — not used
+ /** @type {any} */ {}, // @actions/exec — not used
+ {} // @actions/io — not used
+);
+
+main().catch(err => {
+ console.error(`::error::${err.message}`);
+ process.exit(1);
+});
diff --git a/actions/setup/js/run_apm_pack.cjs b/actions/setup/js/run_apm_pack.cjs
new file mode 100644
index 00000000000..0a3069137c4
--- /dev/null
+++ b/actions/setup/js/run_apm_pack.cjs
@@ -0,0 +1,65 @@
+// @ts-check
+/**
+ * run_apm_pack.cjs
+ *
+ * Standalone entry-point for apm_pack.cjs used in CI integration tests and
+ * local development. Sets up lightweight CJS-compatible shims for the
+ * @actions/* globals expected by apm_pack.cjs (which imports apm_unpack.cjs),
+ * then calls main().
+ *
+ * The @actions/core v3+ package is ESM-only and cannot be loaded via require().
+ * The shims below reproduce the subset of the API used by apm_pack.cjs:
+ * core.info / core.warning / core.error / core.setFailed / core.setOutput
+ * exec.exec(cmd, args, options)
+ *
+ * Environment variables (consumed by apm_pack.main):
+ * APM_WORKSPACE – project root with apm.lock.yaml and installed files
+ * APM_BUNDLE_OUTPUT – directory where the bundle archive is written
+ * APM_TARGET – pack target (claude, copilot/vscode, cursor, opencode, all)
+ *
+ * Usage:
+ * node actions/setup/js/run_apm_pack.cjs
+ */
+
+"use strict";
+
+const { spawnSync } = require("child_process");
+const { setupGlobals } = require("./setup_globals.cjs");
+const { main } = require("./apm_pack.cjs");
+
+// Minimal shim for @actions/core — only the methods used by apm_pack.cjs.
+const core = {
+ info: msg => console.log(msg),
+ warning: msg => console.warn(`::warning::${msg}`),
+ error: msg => console.error(`::error::${msg}`),
+ setFailed: msg => {
+ console.error(`::error::${msg}`);
+ process.exitCode = 1;
+ },
+ setOutput: (name, value) => console.log(`::set-output name=${name}::${value}`),
+};
+
+// Minimal shim for @actions/exec — only exec() is used by apm_pack.cjs.
+const exec = {
+ exec: async (cmd, args = [], opts = {}) => {
+ const result = spawnSync(cmd, args, { stdio: "inherit", ...opts });
+ if (result.status !== 0) {
+ throw new Error(`Command failed: ${cmd} ${args.join(" ")} (exit ${result.status})`);
+ }
+ return result.status;
+ },
+};
+
+// Wire shims into globals so apm_pack.cjs (and the imported apm_unpack.cjs) can use them.
+setupGlobals(
+ /** @type {any} */ core, // logging, outputs, inputs
+ {}, // @actions/github – not used by apm_pack
+ /** @type {any} */ {}, // GitHub Actions event context – not used by apm_pack
+ /** @type {any} */ exec, // runs `tar -czf`
+ {} // @actions/io – not used by apm_pack
+);
+
+main().catch(err => {
+ console.error(`::error::${err.message}`);
+ process.exit(1);
+});
diff --git a/actions/setup/js/tsconfig.json b/actions/setup/js/tsconfig.json
index 0f9de57f780..608be435914 100644
--- a/actions/setup/js/tsconfig.json
+++ b/actions/setup/js/tsconfig.json
@@ -32,5 +32,5 @@
"typeRoots": ["./node_modules/@types", "./types"]
},
"include": ["*.cjs", "types/*.d.ts"],
- "exclude": ["../../../node_modules", "../../../dist", "*.test.cjs", "run_apm_unpack.cjs"]
+ "exclude": ["../../../node_modules", "../../../dist", "*.test.cjs", "run_apm_unpack.cjs", "run_apm_install.cjs", "run_apm_pack.cjs"]
}
diff --git a/pkg/workflow/apm_dependencies.go b/pkg/workflow/apm_dependencies.go
index 400ce19d827..a06658a9551 100644
--- a/pkg/workflow/apm_dependencies.go
+++ b/pkg/workflow/apm_dependencies.go
@@ -1,10 +1,10 @@
package workflow
import (
+ "encoding/json"
"fmt"
"sort"
- "github.com/github/gh-aw/pkg/constants"
"github.com/github/gh-aw/pkg/logger"
)
@@ -114,9 +114,18 @@ func buildAPMAppTokenInvalidationStep() []string {
return steps
}
-// GenerateAPMPackStep generates the GitHub Actions step that installs APM packages and
-// packs them into a bundle in the activation job. The step always uses isolated:true because
-// the activation job has no repo context to preserve.
+// GenerateAPMPackStep generates the GitHub Actions step that installs APM packages
+// from GitHub and packs them into a bundle in a single github-script step.
+//
+// The step uses two JavaScript modules from the gh-aw setup actions:
+// 1. apm_install.cjs — downloads packages from GitHub using the REST API,
+// writes the installed files and apm.lock.yaml to APM_WORKSPACE.
+// This replaces the previous `pip install apm-cli && apm install` shell step.
+// 2. apm_pack.cjs — reads the installed workspace, filters files by target,
+// creates the .tar.gz bundle, and emits the bundle-path output.
+//
+// The step id is "apm_pack" so the upload-artifact step can reference
+// ${{ steps.apm_pack.outputs.bundle-path }}.
//
// Parameters:
// - apmDeps: APM dependency configuration extracted from frontmatter
@@ -132,43 +141,52 @@ func GenerateAPMPackStep(apmDeps *APMDependenciesInfo, target string, data *Work
apmDepsLog.Printf("Generating APM pack step: %d packages, target=%s", len(apmDeps.Packages), target)
- actionRef, err := GetActionPinWithData("microsoft/apm-action", string(constants.DefaultAPMActionVersion), data)
- if err != nil {
- apmDepsLog.Printf("Failed to resolve microsoft/apm-action@%s: %v", constants.DefaultAPMActionVersion, err)
- actionRef = GetActionPin("microsoft/apm-action")
+ // GITHUB_APM_PAT is the token used by apm_install.cjs for GitHub API access.
+ // When a GitHub App is configured, use the minted app token; otherwise use the
+ // cascading fallback token.
+ hasGitHubAppToken := apmDeps.GitHubApp != nil
+
+ var githubAPMPatExpr string
+ if hasGitHubAppToken {
+ githubAPMPatExpr = fmt.Sprintf("${{ steps.%s.outputs.token }}", apmAppTokenStepID)
+ } else {
+ githubAPMPatExpr = getEffectiveAPMGitHubToken(apmDeps.GitHubToken)
}
+ // Encode packages as a JSON array for the APM_PACKAGES env var.
+ // json.Marshal on a []string value never returns an error.
+ pkgsJSON, _ := json.Marshal(apmDeps.Packages)
+
+ githubScriptRef := GetActionPin("actions/github-script")
+
+ // Single github-script step: install packages from GitHub, then pack them.
lines := []string{
- " - name: Install and pack APM dependencies",
+ " - name: Install and pack APM bundle",
" id: apm_pack",
- " uses: " + actionRef,
+ " uses: " + githubScriptRef,
+ " env:",
+ " GITHUB_APM_PAT: " + githubAPMPatExpr,
+ // APM_PACKAGES is a JSON array; single-quoting the value prevents YAML
+ // from parsing it as an array literal (GitHub Actions env values must be strings).
+ " APM_PACKAGES: '" + string(pkgsJSON) + "'",
+ " APM_WORKSPACE: /tmp/gh-aw/apm-workspace",
+ " APM_BUNDLE_OUTPUT: /tmp/gh-aw/apm-bundle-output",
+ " APM_TARGET: " + target,
}
- // Build env block: always add GITHUB_TOKEN (app token takes priority over cascading fallback)
- // plus any user-provided env vars.
- // If github-app is configured, GITHUB_TOKEN is set from the minted app token, so any
- // user-supplied GITHUB_TOKEN key is skipped to avoid a duplicate / conflicting entry.
- hasGitHubAppToken := apmDeps.GitHubApp != nil
- hasUserEnv := len(apmDeps.Env) > 0
- lines = append(lines, " env:")
- if hasGitHubAppToken {
- lines = append(lines,
- fmt.Sprintf(" GITHUB_TOKEN: ${{ steps.%s.outputs.token }}", apmAppTokenStepID),
- )
- } else {
- // No github-app: use cascading token fallback (custom token or GH_AW_PLUGINS_TOKEN cascade)
- lines = append(lines,
- " GITHUB_TOKEN: "+getEffectiveAPMGitHubToken(apmDeps.GitHubToken),
- )
+ // Include any user-provided env vars (skip keys already set above)
+ reserved := map[string]bool{
+ "GITHUB_APM_PAT": true,
+ "APM_PACKAGES": true,
+ "APM_WORKSPACE": true,
+ "APM_TARGET": true,
}
- if hasUserEnv {
+ if len(apmDeps.Env) > 0 {
keys := make([]string, 0, len(apmDeps.Env))
for k := range apmDeps.Env {
- // Skip GITHUB_TOKEN when github-app provides it to avoid duplicate keys
- if hasGitHubAppToken && k == "GITHUB_TOKEN" {
- continue
+ if !reserved[k] {
+ keys = append(keys, k)
}
- keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
@@ -178,20 +196,13 @@ func GenerateAPMPackStep(apmDeps *APMDependenciesInfo, target string, data *Work
lines = append(lines,
" with:",
- " dependencies: |",
- )
-
- for _, dep := range apmDeps.Packages {
- lines = append(lines, " - "+dep)
- }
-
- lines = append(lines,
- " isolated: 'true'",
- " pack: 'true'",
- " archive: 'true'",
- " target: "+target,
- " working-directory: /tmp/gh-aw/apm-workspace",
- " apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}",
+ " script: |",
+ " const { setupGlobals } = require('"+SetupActionDestination+"/setup_globals.cjs');",
+ " setupGlobals(core, github, context, exec, io);",
+ " const { main: apmInstall } = require('"+SetupActionDestination+"/apm_install.cjs');",
+ " await apmInstall();",
+ " const { main: apmPack } = require('"+SetupActionDestination+"/apm_pack.cjs');",
+ " await apmPack();",
)
return GitHubActionStep(lines)
diff --git a/pkg/workflow/apm_dependencies_test.go b/pkg/workflow/apm_dependencies_test.go
index 8bc7081a22e..28101f012be 100644
--- a/pkg/workflow/apm_dependencies_test.go
+++ b/pkg/workflow/apm_dependencies_test.go
@@ -350,17 +350,15 @@ func TestGenerateAPMPackStep(t *testing.T) {
apmDeps: &APMDependenciesInfo{Packages: []string{"microsoft/apm-sample-package"}},
target: "copilot",
expectedContains: []string{
- "Install and pack APM dependencies",
+ "Install and pack APM bundle",
"id: apm_pack",
- "microsoft/apm-action",
- "dependencies: |",
- "- microsoft/apm-sample-package",
- "isolated: 'true'",
- "pack: 'true'",
- "archive: 'true'",
- "target: copilot",
- "working-directory: /tmp/gh-aw/apm-workspace",
- "apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}",
+ "actions/github-script",
+ "apm_install.cjs",
+ "apm_pack.cjs",
+ `APM_PACKAGES: '["microsoft/apm-sample-package"]'`,
+ "APM_TARGET: copilot",
+ "APM_WORKSPACE: /tmp/gh-aw/apm-workspace",
+ "GITHUB_APM_PAT:",
},
},
{
@@ -368,13 +366,13 @@ func TestGenerateAPMPackStep(t *testing.T) {
apmDeps: &APMDependenciesInfo{Packages: []string{"microsoft/apm-sample-package", "github/skills/review"}},
target: "claude",
expectedContains: []string{
- "Install and pack APM dependencies",
+ "Install and pack APM bundle",
"id: apm_pack",
- "microsoft/apm-action",
- "- microsoft/apm-sample-package",
- "- github/skills/review",
- "target: claude",
- "apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}",
+ "apm_install.cjs",
+ "apm_pack.cjs",
+ `"microsoft/apm-sample-package"`,
+ `"github/skills/review"`,
+ "APM_TARGET: claude",
},
},
{
@@ -382,16 +380,18 @@ func TestGenerateAPMPackStep(t *testing.T) {
apmDeps: &APMDependenciesInfo{Packages: []string{"microsoft/apm-sample-package"}},
target: "all",
expectedContains: []string{
- "target: all",
- "apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}",
+ "APM_TARGET: all",
+ "apm_install.cjs",
+ "apm_pack.cjs",
},
},
{
- name: "Custom APM version still uses env var reference in step",
+ name: "No pip install or apm-cli in new JS-only implementation",
apmDeps: &APMDependenciesInfo{Packages: []string{"microsoft/apm-sample-package"}, Version: "v1.0.0"},
target: "copilot",
expectedContains: []string{
- "apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}",
+ "apm_install.cjs",
+ "apm_pack.cjs",
},
},
}
@@ -409,6 +409,11 @@ func TestGenerateAPMPackStep(t *testing.T) {
require.NotEmpty(t, step, "Step should not be empty")
combined := combineStepLines(step)
+ // New JS-only implementation must not reference Python tooling or microsoft/apm-action
+ assert.NotContains(t, combined, "microsoft/apm-action", "Should not reference microsoft/apm-action")
+ assert.NotContains(t, combined, "pip install", "Should not use pip install")
+ assert.NotContains(t, combined, "apm install", "Should not call apm CLI install")
+
for _, expected := range tt.expectedContains {
assert.Contains(t, combined, expected, "Step should contain: %s", expected)
}
@@ -492,7 +497,7 @@ func TestGenerateAPMRestoreStep(t *testing.T) {
}
func TestGenerateAPMPackStepWithGitHubApp(t *testing.T) {
- t.Run("Pack step includes GITHUB_TOKEN env when github-app is configured", func(t *testing.T) {
+ t.Run("Pack step uses GITHUB_APM_PAT from app token when github-app is configured", func(t *testing.T) {
apmDeps := &APMDependenciesInfo{
Packages: []string{"acme-org/acme-skills/plugins/dev-tools"},
GitHubApp: &GitHubAppConfig{
@@ -506,12 +511,13 @@ func TestGenerateAPMPackStepWithGitHubApp(t *testing.T) {
require.NotEmpty(t, step, "Step should not be empty")
combined := combineStepLines(step)
- assert.Contains(t, combined, "GITHUB_TOKEN: ${{ steps.apm-app-token.outputs.token }}", "Should inject app token as GITHUB_TOKEN")
+ assert.Contains(t, combined, "GITHUB_APM_PAT: ${{ steps.apm-app-token.outputs.token }}", "Should inject app token as GITHUB_APM_PAT")
assert.Contains(t, combined, "env:", "Should have env section")
- assert.Contains(t, combined, "- acme-org/acme-skills/plugins/dev-tools", "Should list dependency")
+ assert.Contains(t, combined, "acme-org/acme-skills/plugins/dev-tools", "Should list dependency in apm.yml")
+ assert.NotContains(t, combined, "microsoft/apm-action", "Should not reference microsoft/apm-action")
})
- t.Run("Pack step uses cascading fallback GITHUB_TOKEN without github-app", func(t *testing.T) {
+ t.Run("Pack step uses cascading fallback GITHUB_APM_PAT without github-app", func(t *testing.T) {
apmDeps := &APMDependenciesInfo{
Packages: []string{"microsoft/apm-sample-package"},
}
@@ -521,7 +527,7 @@ func TestGenerateAPMPackStepWithGitHubApp(t *testing.T) {
require.NotEmpty(t, step, "Step should not be empty")
combined := combineStepLines(step)
- assert.Contains(t, combined, "GITHUB_TOKEN:", "Should have GITHUB_TOKEN with cascading fallback")
+ assert.Contains(t, combined, "GITHUB_APM_PAT:", "Should have GITHUB_APM_PAT with cascading fallback")
assert.Contains(t, combined, "GH_AW_PLUGINS_TOKEN", "Should reference cascading token")
assert.Contains(t, combined, "GH_AW_GITHUB_TOKEN", "Should reference cascading token")
assert.NotContains(t, combined, "apm-app-token", "Should not reference app token without github-app")
@@ -698,7 +704,7 @@ func TestExtractAPMDependenciesEnv(t *testing.T) {
}
func TestGenerateAPMPackStepWithEnv(t *testing.T) {
- t.Run("Pack step includes user env vars and cascading GITHUB_TOKEN", func(t *testing.T) {
+ t.Run("Pack step includes user env vars and cascading GITHUB_APM_PAT", func(t *testing.T) {
apmDeps := &APMDependenciesInfo{
Packages: []string{"microsoft/apm-sample-package"},
Env: map[string]string{
@@ -715,7 +721,7 @@ func TestGenerateAPMPackStepWithEnv(t *testing.T) {
assert.Contains(t, combined, "env:", "Should have env section")
assert.Contains(t, combined, "MY_TOKEN: ${{ secrets.MY_TOKEN }}", "Should include MY_TOKEN env var")
assert.Contains(t, combined, "REGISTRY: https://registry.example.com", "Should include REGISTRY env var")
- assert.Contains(t, combined, "GITHUB_TOKEN:", "Should have GITHUB_TOKEN with cascading fallback")
+ assert.Contains(t, combined, "GITHUB_APM_PAT:", "Should have GITHUB_APM_PAT with cascading fallback")
assert.Contains(t, combined, "GH_AW_PLUGINS_TOKEN", "Cascading fallback should include GH_AW_PLUGINS_TOKEN")
})
@@ -736,7 +742,7 @@ func TestGenerateAPMPackStepWithEnv(t *testing.T) {
require.NotEmpty(t, step, "Step should not be empty")
combined := combineStepLines(step)
- assert.Contains(t, combined, "GITHUB_TOKEN: ${{ steps.apm-app-token.outputs.token }}", "Should have GITHUB_TOKEN from app")
+ assert.Contains(t, combined, "GITHUB_APM_PAT: ${{ steps.apm-app-token.outputs.token }}", "Should have GITHUB_APM_PAT from app")
assert.Contains(t, combined, "EXTRA: value", "Should include user env var")
})
@@ -764,7 +770,7 @@ func TestGenerateAPMPackStepWithEnv(t *testing.T) {
assert.True(t, aPos < mPos && mPos < zPos, "Env vars should be sorted alphabetically")
})
- t.Run("GITHUB_TOKEN in user env is skipped when github-app is configured", func(t *testing.T) {
+ t.Run("GITHUB_APM_PAT in user env is skipped when github-app is configured", func(t *testing.T) {
apmDeps := &APMDependenciesInfo{
Packages: []string{"acme-org/acme-skills"},
GitHubApp: &GitHubAppConfig{
@@ -772,8 +778,8 @@ func TestGenerateAPMPackStepWithEnv(t *testing.T) {
PrivateKey: "${{ secrets.APP_PRIVATE_KEY }}",
},
Env: map[string]string{
- "GITHUB_TOKEN": "should-be-skipped",
- "OTHER_VAR": "kept",
+ "GITHUB_APM_PAT": "should-be-skipped",
+ "OTHER_VAR": "kept",
},
}
data := &WorkflowData{Name: "test-workflow"}
@@ -782,11 +788,11 @@ func TestGenerateAPMPackStepWithEnv(t *testing.T) {
require.NotEmpty(t, step, "Step should not be empty")
combined := combineStepLines(step)
- assert.Contains(t, combined, "GITHUB_TOKEN: ${{ steps.apm-app-token.outputs.token }}", "Should have GITHUB_TOKEN from app token, not user env")
- assert.NotContains(t, combined, "should-be-skipped", "User-supplied GITHUB_TOKEN value should be absent")
+ assert.Contains(t, combined, "GITHUB_APM_PAT: ${{ steps.apm-app-token.outputs.token }}", "Should have GITHUB_APM_PAT from app token, not user env")
+ assert.NotContains(t, combined, "should-be-skipped", "User-supplied GITHUB_APM_PAT value should be absent")
assert.Contains(t, combined, "OTHER_VAR: kept", "Other user env vars should be present")
- count := strings.Count(combined, "GITHUB_TOKEN:")
- assert.Equal(t, 1, count, "GITHUB_TOKEN should appear exactly once")
+ count := strings.Count(combined, "GITHUB_APM_PAT:")
+ assert.Equal(t, 1, count, "GITHUB_APM_PAT should appear exactly once")
})
}
@@ -842,7 +848,7 @@ func TestGetEffectiveAPMGitHubToken(t *testing.T) {
}
func TestGenerateAPMPackStepWithGitHubToken(t *testing.T) {
- t.Run("Pack step uses custom github-token when specified", func(t *testing.T) {
+ t.Run("Pack step uses custom github-token as GITHUB_APM_PAT when specified", func(t *testing.T) {
apmDeps := &APMDependenciesInfo{
Packages: []string{"microsoft/apm-sample-package"},
GitHubToken: "${{ secrets.MY_TOKEN }}",
@@ -853,7 +859,7 @@ func TestGenerateAPMPackStepWithGitHubToken(t *testing.T) {
require.NotEmpty(t, step, "Step should not be empty")
combined := combineStepLines(step)
- assert.Contains(t, combined, "GITHUB_TOKEN: ${{ secrets.MY_TOKEN }}", "Should use custom token directly")
+ assert.Contains(t, combined, "GITHUB_APM_PAT: ${{ secrets.MY_TOKEN }}", "Should use custom token directly as GITHUB_APM_PAT")
assert.NotContains(t, combined, "apm-app-token", "Should not reference app token")
})
@@ -867,7 +873,7 @@ func TestGenerateAPMPackStepWithGitHubToken(t *testing.T) {
require.NotEmpty(t, step, "Step should not be empty")
combined := combineStepLines(step)
- assert.Contains(t, combined, "GITHUB_TOKEN:", "Should have GITHUB_TOKEN")
+ assert.Contains(t, combined, "GITHUB_APM_PAT:", "Should have GITHUB_APM_PAT")
assert.Contains(t, combined, "GH_AW_PLUGINS_TOKEN", "Should include GH_AW_PLUGINS_TOKEN in cascade")
})
@@ -886,7 +892,7 @@ func TestGenerateAPMPackStepWithGitHubToken(t *testing.T) {
require.NotEmpty(t, step, "Step should not be empty")
combined := combineStepLines(step)
- assert.Contains(t, combined, "GITHUB_TOKEN: ${{ steps.apm-app-token.outputs.token }}", "github-app token should take priority")
+ assert.Contains(t, combined, "GITHUB_APM_PAT: ${{ steps.apm-app-token.outputs.token }}", "github-app token should take priority")
assert.NotContains(t, combined, "secrets.MY_TOKEN", "Custom github-token should not appear when github-app is configured")
})
}
diff --git a/pkg/workflow/compiler_apm_job.go b/pkg/workflow/compiler_apm_job.go
index 50f1c192224..c9ffc98aae3 100644
--- a/pkg/workflow/compiler_apm_job.go
+++ b/pkg/workflow/compiler_apm_job.go
@@ -13,8 +13,9 @@ var compilerAPMJobLog = logger.New("workflow:compiler_apm_job")
// dependencies into a bundle artifact. This job runs after the activation job and uploads
// the packed bundle so the agent job can download and restore it.
//
-// The APM job uses minimal permissions ({}) because all required tokens are passed
-// explicitly via env/secrets rather than relying on the workflow's GITHUB_TOKEN scope.
+// The APM job requires the gh-aw setup action to copy .cjs scripts to $RUNNER_TEMP/gh-aw/actions/
+// so that apm_install.cjs and apm_pack.cjs can be required by the github-script step.
+// In dev/script mode this means adding a checkout step (contents: read) before setup.
func (c *Compiler) buildAPMJob(data *WorkflowData) (*Job, error) {
compilerAPMJobLog.Printf("Building APM job: %d packages", len(data.APMDependencies.Packages))
@@ -25,6 +26,17 @@ func (c *Compiler) buildAPMJob(data *WorkflowData) (*Job, error) {
var steps []string
+ // Add setup action to copy JavaScript files (apm_install.cjs, apm_pack.cjs, etc.)
+ // to $RUNNER_TEMP/gh-aw/actions/ before the github-script step runs.
+ setupActionRef := c.resolveActionReference("./actions/setup", data)
+ if setupActionRef != "" || c.actionMode.IsScript() {
+ // For dev/script mode (local action path), checkout the actions folder first
+ steps = append(steps, c.generateCheckoutActionsFolder(data)...)
+ // APM install (apm_install.cjs) requires @actions/github to create an Octokit client.
+ // Pass enableCustomTokens=true so the setup action runs `npm install @actions/github`.
+ steps = append(steps, c.generateSetupStep(setupActionRef, SetupActionDestination, true)...)
+ }
+
// Mint a GitHub App token before the pack step if a github-app is configured for APM.
// The APM job depends on activation, so it can reference needs.activation.outputs.target_repo_name
// instead of the activation-job-local steps.resolve-host-repo.outputs.target_repo_name.
@@ -46,7 +58,7 @@ func (c *Compiler) buildAPMJob(data *WorkflowData) (*Job, error) {
}
// Upload the packed APM bundle as a separate artifact for the agent job to download.
- // The path comes from the apm_pack step output `bundle-path`, which microsoft/apm-action
+ // The path comes from the apm_pack step output `bundle-path`, which apm_pack.cjs
// sets to the location of the packed .tar.gz archive.
// The APM job depends on activation, so it uses artifactPrefixExprForDownstreamJob.
compilerAPMJobLog.Print("Adding APM bundle artifact upload step")
@@ -75,10 +87,15 @@ func (c *Compiler) buildAPMJob(data *WorkflowData) (*Job, error) {
"GH_AW_INFO_APM_VERSION": apmVersion,
}
- // Minimal permissions: the APM job does not need any GitHub token scopes because
- // all tokens (for apm-action, create-github-app-token, upload-artifact) are either
- // passed explicitly via secrets/env or handled by the runner's ACTIONS_RUNTIME_TOKEN.
- permissions := NewPermissionsEmpty().RenderToYAML()
+ // Permissions: start with empty (minimal privilege) and add contents: read when a
+ // checkout step is needed to copy the gh-aw actions folder (dev/script mode).
+ var permissions string
+ needsContentsRead := (c.actionMode.IsDev() || c.actionMode.IsScript()) && len(c.generateCheckoutActionsFolder(data)) > 0
+ if needsContentsRead {
+ permissions = NewPermissionsContentsRead().RenderToYAML()
+ } else {
+ permissions = NewPermissionsEmpty().RenderToYAML()
+ }
job := &Job{
Name: string(constants.APMJobName),