diff --git a/.github/workflows/ab-testing-advisor.lock.yml b/.github/workflows/ab-testing-advisor.lock.yml
index 549bce70b4..97983797a7 100644
--- a/.github/workflows/ab-testing-advisor.lock.yml
+++ b/.github/workflows/ab-testing-advisor.lock.yml
@@ -485,6 +485,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/ace-editor.lock.yml b/.github/workflows/ace-editor.lock.yml
index 982df133fc..27b7aace88 100644
--- a/.github/workflows/ace-editor.lock.yml
+++ b/.github/workflows/ace-editor.lock.yml
@@ -481,6 +481,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/agent-performance-analyzer.lock.yml b/.github/workflows/agent-performance-analyzer.lock.yml
index acdb3ac543..053c2ecd70 100644
--- a/.github/workflows/agent-performance-analyzer.lock.yml
+++ b/.github/workflows/agent-performance-analyzer.lock.yml
@@ -599,6 +599,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/architecture-guardian.lock.yml b/.github/workflows/architecture-guardian.lock.yml
index 6b41ed001f..677b399ca7 100644
--- a/.github/workflows/architecture-guardian.lock.yml
+++ b/.github/workflows/architecture-guardian.lock.yml
@@ -478,6 +478,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/aw-failure-investigator.lock.yml b/.github/workflows/aw-failure-investigator.lock.yml
index ea0b5acb4f..f986725637 100644
--- a/.github/workflows/aw-failure-investigator.lock.yml
+++ b/.github/workflows/aw-failure-investigator.lock.yml
@@ -531,6 +531,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/bot-detection.lock.yml b/.github/workflows/bot-detection.lock.yml
index 9b94d12341..d1423ce89c 100644
--- a/.github/workflows/bot-detection.lock.yml
+++ b/.github/workflows/bot-detection.lock.yml
@@ -480,6 +480,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/breaking-change-checker.lock.yml b/.github/workflows/breaking-change-checker.lock.yml
index bb644c1ff4..65086bffba 100644
--- a/.github/workflows/breaking-change-checker.lock.yml
+++ b/.github/workflows/breaking-change-checker.lock.yml
@@ -468,6 +468,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/ci-doctor.lock.yml b/.github/workflows/ci-doctor.lock.yml
index ccb211680f..5da79760c8 100644
--- a/.github/workflows/ci-doctor.lock.yml
+++ b/.github/workflows/ci-doctor.lock.yml
@@ -609,6 +609,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/cli-consistency-checker.lock.yml b/.github/workflows/cli-consistency-checker.lock.yml
index 6493704397..f8cc3f2423 100644
--- a/.github/workflows/cli-consistency-checker.lock.yml
+++ b/.github/workflows/cli-consistency-checker.lock.yml
@@ -454,6 +454,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/cli-version-checker.lock.yml b/.github/workflows/cli-version-checker.lock.yml
index 83b2b17e24..3be0a34701 100644
--- a/.github/workflows/cli-version-checker.lock.yml
+++ b/.github/workflows/cli-version-checker.lock.yml
@@ -492,6 +492,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/codex-github-remote-mcp-test.lock.yml b/.github/workflows/codex-github-remote-mcp-test.lock.yml
index 7d587df62e..b8b0b8a1c4 100644
--- a/.github/workflows/codex-github-remote-mcp-test.lock.yml
+++ b/.github/workflows/codex-github-remote-mcp-test.lock.yml
@@ -454,6 +454,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/contribution-check.lock.yml b/.github/workflows/contribution-check.lock.yml
index 3cfd842185..46b934ce15 100644
--- a/.github/workflows/contribution-check.lock.yml
+++ b/.github/workflows/contribution-check.lock.yml
@@ -576,6 +576,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/copilot-opt.lock.yml b/.github/workflows/copilot-opt.lock.yml
index f0a58b911e..ce65f1ad74 100644
--- a/.github/workflows/copilot-opt.lock.yml
+++ b/.github/workflows/copilot-opt.lock.yml
@@ -513,6 +513,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/copilot-token-audit.lock.yml b/.github/workflows/copilot-token-audit.lock.yml
index b542bbf141..64e554f35a 100644
--- a/.github/workflows/copilot-token-audit.lock.yml
+++ b/.github/workflows/copilot-token-audit.lock.yml
@@ -573,6 +573,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/copilot-token-optimizer.lock.yml b/.github/workflows/copilot-token-optimizer.lock.yml
index 738e47baf6..e5f13e57cc 100644
--- a/.github/workflows/copilot-token-optimizer.lock.yml
+++ b/.github/workflows/copilot-token-optimizer.lock.yml
@@ -506,6 +506,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-architecture-diagram.lock.yml b/.github/workflows/daily-architecture-diagram.lock.yml
index 89d800a955..fbd38f57da 100644
--- a/.github/workflows/daily-architecture-diagram.lock.yml
+++ b/.github/workflows/daily-architecture-diagram.lock.yml
@@ -528,6 +528,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-aw-cross-repo-compile-check.lock.yml b/.github/workflows/daily-aw-cross-repo-compile-check.lock.yml
index 7e2d6f7c67..1cdbbeaa0c 100644
--- a/.github/workflows/daily-aw-cross-repo-compile-check.lock.yml
+++ b/.github/workflows/daily-aw-cross-repo-compile-check.lock.yml
@@ -504,6 +504,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-cache-strategy-analyzer.lock.yml b/.github/workflows/daily-cache-strategy-analyzer.lock.yml
index 18db649626..ccbd61a1ac 100644
--- a/.github/workflows/daily-cache-strategy-analyzer.lock.yml
+++ b/.github/workflows/daily-cache-strategy-analyzer.lock.yml
@@ -608,6 +608,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
@@ -1429,18 +1432,18 @@ jobs:
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.6'
- cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_de64d9d18b620726_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_064564c6343901d3_EOF
[history]
persistence = "none"
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_MCP_CONFIG_de64d9d18b620726_EOF
+ GH_AW_MCP_CONFIG_064564c6343901d3_EOF
# Generate JSON config for MCP gateway
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_b478abb36de9495d_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_29baf567a663cbb4_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
},
@@ -1451,11 +1454,11 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_b478abb36de9495d_EOF
+ GH_AW_MCP_CONFIG_29baf567a663cbb4_EOF
# Sync converter output to writable CODEX_HOME for Codex
mkdir -p /tmp/gh-aw/mcp-config
- cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_73dbda43b4bb5649_EOF
+ cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_44a94b5204e38d49_EOF
model_provider = "openai-proxy"
[model_providers.openai-proxy]
name = "OpenAI AWF proxy"
@@ -1465,7 +1468,7 @@ jobs:
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_CODEX_SHELL_POLICY_73dbda43b4bb5649_EOF
+ GH_AW_CODEX_SHELL_POLICY_44a94b5204e38d49_EOF
awk '
BEGIN { skip_openai_proxy = 0 }
/^[[:space:]]*model_provider[[:space:]]*=/ { next }
diff --git a/.github/workflows/daily-cli-performance.lock.yml b/.github/workflows/daily-cli-performance.lock.yml
index e694b94c84..ed064f3dec 100644
--- a/.github/workflows/daily-cli-performance.lock.yml
+++ b/.github/workflows/daily-cli-performance.lock.yml
@@ -586,6 +586,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-cli-tools-tester.lock.yml b/.github/workflows/daily-cli-tools-tester.lock.yml
index 2d6850ef46..add9ac377c 100644
--- a/.github/workflows/daily-cli-tools-tester.lock.yml
+++ b/.github/workflows/daily-cli-tools-tester.lock.yml
@@ -570,6 +570,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-community-attribution.lock.yml b/.github/workflows/daily-community-attribution.lock.yml
index cb534351a5..ff83904d3a 100644
--- a/.github/workflows/daily-community-attribution.lock.yml
+++ b/.github/workflows/daily-community-attribution.lock.yml
@@ -561,6 +561,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-doc-healer.lock.yml b/.github/workflows/daily-doc-healer.lock.yml
index 33158c6a38..4fecab5a4e 100644
--- a/.github/workflows/daily-doc-healer.lock.yml
+++ b/.github/workflows/daily-doc-healer.lock.yml
@@ -536,6 +536,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml
index 859b3bc33d..d1c9023e49 100644
--- a/.github/workflows/daily-file-diet.lock.yml
+++ b/.github/workflows/daily-file-diet.lock.yml
@@ -522,6 +522,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-function-namer.lock.yml b/.github/workflows/daily-function-namer.lock.yml
index 60b21fa4e0..854f747b63 100644
--- a/.github/workflows/daily-function-namer.lock.yml
+++ b/.github/workflows/daily-function-namer.lock.yml
@@ -567,6 +567,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-grafana-otel-instrumentation-advisor.lock.yml b/.github/workflows/daily-grafana-otel-instrumentation-advisor.lock.yml
index 0c8b63a20d..6a5c807db0 100644
--- a/.github/workflows/daily-grafana-otel-instrumentation-advisor.lock.yml
+++ b/.github/workflows/daily-grafana-otel-instrumentation-advisor.lock.yml
@@ -515,6 +515,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-mcp-concurrency-analysis.lock.yml b/.github/workflows/daily-mcp-concurrency-analysis.lock.yml
index fe5f7c86ac..62e416a0c7 100644
--- a/.github/workflows/daily-mcp-concurrency-analysis.lock.yml
+++ b/.github/workflows/daily-mcp-concurrency-analysis.lock.yml
@@ -578,6 +578,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-model-inventory.lock.yml b/.github/workflows/daily-model-inventory.lock.yml
index da9de5a452..2a698a0d84 100644
--- a/.github/workflows/daily-model-inventory.lock.yml
+++ b/.github/workflows/daily-model-inventory.lock.yml
@@ -504,6 +504,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-multi-device-docs-tester.lock.yml b/.github/workflows/daily-multi-device-docs-tester.lock.yml
index b43226c911..80913af5cb 100644
--- a/.github/workflows/daily-multi-device-docs-tester.lock.yml
+++ b/.github/workflows/daily-multi-device-docs-tester.lock.yml
@@ -529,6 +529,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-otel-instrumentation-advisor.lock.yml b/.github/workflows/daily-otel-instrumentation-advisor.lock.yml
index a0a6564c6c..aaaf39b26b 100644
--- a/.github/workflows/daily-otel-instrumentation-advisor.lock.yml
+++ b/.github/workflows/daily-otel-instrumentation-advisor.lock.yml
@@ -513,6 +513,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-safe-output-optimizer.lock.yml b/.github/workflows/daily-safe-output-optimizer.lock.yml
index 0d783e05dc..e9b1e8c9ac 100644
--- a/.github/workflows/daily-safe-output-optimizer.lock.yml
+++ b/.github/workflows/daily-safe-output-optimizer.lock.yml
@@ -612,6 +612,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-safe-outputs-conformance.lock.yml b/.github/workflows/daily-safe-outputs-conformance.lock.yml
index 7bfca01c16..d600deba41 100644
--- a/.github/workflows/daily-safe-outputs-conformance.lock.yml
+++ b/.github/workflows/daily-safe-outputs-conformance.lock.yml
@@ -506,6 +506,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-security-red-team.lock.yml b/.github/workflows/daily-security-red-team.lock.yml
index 6ea3b669be..07743b2dc9 100644
--- a/.github/workflows/daily-security-red-team.lock.yml
+++ b/.github/workflows/daily-security-red-team.lock.yml
@@ -538,6 +538,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-skill-optimizer.lock.yml b/.github/workflows/daily-skill-optimizer.lock.yml
index e5e7bd2d65..d180c49732 100644
--- a/.github/workflows/daily-skill-optimizer.lock.yml
+++ b/.github/workflows/daily-skill-optimizer.lock.yml
@@ -486,6 +486,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-spdd-spec-planner.lock.yml b/.github/workflows/daily-spdd-spec-planner.lock.yml
index 9c4f338386..e4e0027b30 100644
--- a/.github/workflows/daily-spdd-spec-planner.lock.yml
+++ b/.github/workflows/daily-spdd-spec-planner.lock.yml
@@ -490,6 +490,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-subagent-optimizer.lock.yml b/.github/workflows/daily-subagent-optimizer.lock.yml
index 86fbad6e34..1a886d798a 100644
--- a/.github/workflows/daily-subagent-optimizer.lock.yml
+++ b/.github/workflows/daily-subagent-optimizer.lock.yml
@@ -559,6 +559,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-syntax-error-quality.lock.yml b/.github/workflows/daily-syntax-error-quality.lock.yml
index cc7bde4cbc..87581a262d 100644
--- a/.github/workflows/daily-syntax-error-quality.lock.yml
+++ b/.github/workflows/daily-syntax-error-quality.lock.yml
@@ -515,6 +515,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-team-status.lock.yml b/.github/workflows/daily-team-status.lock.yml
index 391f12928b..07e39fda6f 100644
--- a/.github/workflows/daily-team-status.lock.yml
+++ b/.github/workflows/daily-team-status.lock.yml
@@ -519,6 +519,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-testify-uber-super-expert.lock.yml b/.github/workflows/daily-testify-uber-super-expert.lock.yml
index 71c89c41e1..22fb466f48 100644
--- a/.github/workflows/daily-testify-uber-super-expert.lock.yml
+++ b/.github/workflows/daily-testify-uber-super-expert.lock.yml
@@ -546,6 +546,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/daily-token-consumption-report.lock.yml b/.github/workflows/daily-token-consumption-report.lock.yml
index d208bc1381..e02b4bc012 100644
--- a/.github/workflows/daily-token-consumption-report.lock.yml
+++ b/.github/workflows/daily-token-consumption-report.lock.yml
@@ -516,6 +516,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml
index e8a8defed2..7cd2c5cc35 100644
--- a/.github/workflows/deep-report.lock.yml
+++ b/.github/workflows/deep-report.lock.yml
@@ -673,6 +673,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/delight.lock.yml b/.github/workflows/delight.lock.yml
index 0820a767c8..9a54eeeef8 100644
--- a/.github/workflows/delight.lock.yml
+++ b/.github/workflows/delight.lock.yml
@@ -532,6 +532,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/dependabot-burner.lock.yml b/.github/workflows/dependabot-burner.lock.yml
index 613a129cf3..2abb8426ed 100644
--- a/.github/workflows/dependabot-burner.lock.yml
+++ b/.github/workflows/dependabot-burner.lock.yml
@@ -461,6 +461,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/dependabot-go-checker.lock.yml b/.github/workflows/dependabot-go-checker.lock.yml
index 6bb0decadc..d470a44e28 100644
--- a/.github/workflows/dependabot-go-checker.lock.yml
+++ b/.github/workflows/dependabot-go-checker.lock.yml
@@ -484,6 +484,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/deployment-incident-monitor.lock.yml b/.github/workflows/deployment-incident-monitor.lock.yml
index 0030799a3c..4515d46aae 100644
--- a/.github/workflows/deployment-incident-monitor.lock.yml
+++ b/.github/workflows/deployment-incident-monitor.lock.yml
@@ -468,6 +468,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml
index 52a75b891a..2edea8b452 100644
--- a/.github/workflows/dev.lock.yml
+++ b/.github/workflows/dev.lock.yml
@@ -517,6 +517,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/discussion-task-miner.lock.yml b/.github/workflows/discussion-task-miner.lock.yml
index 3851ec0720..83bc13a16c 100644
--- a/.github/workflows/discussion-task-miner.lock.yml
+++ b/.github/workflows/discussion-task-miner.lock.yml
@@ -510,6 +510,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/duplicate-code-detector.lock.yml b/.github/workflows/duplicate-code-detector.lock.yml
index 8137adf822..9c0c7f1e0d 100644
--- a/.github/workflows/duplicate-code-detector.lock.yml
+++ b/.github/workflows/duplicate-code-detector.lock.yml
@@ -507,6 +507,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
@@ -1317,18 +1320,18 @@ jobs:
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.6'
- cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_c51711226436d0bd_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_537488539a145e30_EOF
[history]
persistence = "none"
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_MCP_CONFIG_c51711226436d0bd_EOF
+ GH_AW_MCP_CONFIG_537488539a145e30_EOF
# Generate JSON config for MCP gateway
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_4bfa2b3d3ee1ceab_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_0cfa5d99353d01ee_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
},
@@ -1339,11 +1342,11 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_4bfa2b3d3ee1ceab_EOF
+ GH_AW_MCP_CONFIG_0cfa5d99353d01ee_EOF
# Sync converter output to writable CODEX_HOME for Codex
mkdir -p /tmp/gh-aw/mcp-config
- cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_ccbf946e47882948_EOF
+ cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_09f9b523bfe32508_EOF
model_provider = "openai-proxy"
[model_providers.openai-proxy]
name = "OpenAI AWF proxy"
@@ -1353,7 +1356,7 @@ jobs:
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_CODEX_SHELL_POLICY_ccbf946e47882948_EOF
+ GH_AW_CODEX_SHELL_POLICY_09f9b523bfe32508_EOF
awk '
BEGIN { skip_openai_proxy = 0 }
/^[[:space:]]*model_provider[[:space:]]*=/ { next }
diff --git a/.github/workflows/example-permissions-warning.lock.yml b/.github/workflows/example-permissions-warning.lock.yml
index a241da1317..d8bbc6012f 100644
--- a/.github/workflows/example-permissions-warning.lock.yml
+++ b/.github/workflows/example-permissions-warning.lock.yml
@@ -453,6 +453,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/firewall.lock.yml b/.github/workflows/firewall.lock.yml
index 5858ca7169..3dfd280c45 100644
--- a/.github/workflows/firewall.lock.yml
+++ b/.github/workflows/firewall.lock.yml
@@ -455,6 +455,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/go-pattern-detector.lock.yml b/.github/workflows/go-pattern-detector.lock.yml
index ad4b949159..1dedc1b749 100644
--- a/.github/workflows/go-pattern-detector.lock.yml
+++ b/.github/workflows/go-pattern-detector.lock.yml
@@ -473,6 +473,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/gpclean.lock.yml b/.github/workflows/gpclean.lock.yml
index f3bb19fd36..1d488b3bf5 100644
--- a/.github/workflows/gpclean.lock.yml
+++ b/.github/workflows/gpclean.lock.yml
@@ -492,6 +492,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/hippo-embed.lock.yml b/.github/workflows/hippo-embed.lock.yml
index d48c3a635a..daab6ea0da 100644
--- a/.github/workflows/hippo-embed.lock.yml
+++ b/.github/workflows/hippo-embed.lock.yml
@@ -488,6 +488,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/issue-arborist.lock.yml b/.github/workflows/issue-arborist.lock.yml
index 8a6ba2e259..0dda81456c 100644
--- a/.github/workflows/issue-arborist.lock.yml
+++ b/.github/workflows/issue-arborist.lock.yml
@@ -602,6 +602,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
@@ -1386,18 +1389,18 @@ jobs:
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.6'
- cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_674a878025566fc9_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_d9da00b71d42dc5e_EOF
[history]
persistence = "none"
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_MCP_CONFIG_674a878025566fc9_EOF
+ GH_AW_MCP_CONFIG_d9da00b71d42dc5e_EOF
# Generate JSON config for MCP gateway
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_9e5f38cce10c70fd_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_399e71eeb47db0ab_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
},
@@ -1408,11 +1411,11 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_9e5f38cce10c70fd_EOF
+ GH_AW_MCP_CONFIG_399e71eeb47db0ab_EOF
# Sync converter output to writable CODEX_HOME for Codex
mkdir -p /tmp/gh-aw/mcp-config
- cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_085777e59741e8b5_EOF
+ cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_4f484691bd2f358a_EOF
model_provider = "openai-proxy"
[model_providers.openai-proxy]
name = "OpenAI AWF proxy"
@@ -1422,7 +1425,7 @@ jobs:
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_CODEX_SHELL_POLICY_085777e59741e8b5_EOF
+ GH_AW_CODEX_SHELL_POLICY_4f484691bd2f358a_EOF
awk '
BEGIN { skip_openai_proxy = 0 }
/^[[:space:]]*model_provider[[:space:]]*=/ { next }
diff --git a/.github/workflows/metrics-collector.lock.yml b/.github/workflows/metrics-collector.lock.yml
index 78c08f98f1..c23ed0ffdd 100644
--- a/.github/workflows/metrics-collector.lock.yml
+++ b/.github/workflows/metrics-collector.lock.yml
@@ -557,6 +557,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/plan.lock.yml b/.github/workflows/plan.lock.yml
index f32f56743d..1ddfe425fb 100644
--- a/.github/workflows/plan.lock.yml
+++ b/.github/workflows/plan.lock.yml
@@ -543,6 +543,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/poem-bot.lock.yml b/.github/workflows/poem-bot.lock.yml
index 55584907cb..930998360d 100644
--- a/.github/workflows/poem-bot.lock.yml
+++ b/.github/workflows/poem-bot.lock.yml
@@ -678,6 +678,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/pr-triage-agent.lock.yml b/.github/workflows/pr-triage-agent.lock.yml
index 1f3e7f0bf6..4bc883e872 100644
--- a/.github/workflows/pr-triage-agent.lock.yml
+++ b/.github/workflows/pr-triage-agent.lock.yml
@@ -558,6 +558,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/refactoring-cadence.lock.yml b/.github/workflows/refactoring-cadence.lock.yml
index 84205b732e..6790d4d6cf 100644
--- a/.github/workflows/refactoring-cadence.lock.yml
+++ b/.github/workflows/refactoring-cadence.lock.yml
@@ -491,6 +491,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/security-compliance.lock.yml b/.github/workflows/security-compliance.lock.yml
index 0fccf20b16..206ee038a9 100644
--- a/.github/workflows/security-compliance.lock.yml
+++ b/.github/workflows/security-compliance.lock.yml
@@ -500,6 +500,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/semantic-function-refactor.lock.yml b/.github/workflows/semantic-function-refactor.lock.yml
index 50a80d86d4..4f1e455486 100644
--- a/.github/workflows/semantic-function-refactor.lock.yml
+++ b/.github/workflows/semantic-function-refactor.lock.yml
@@ -520,6 +520,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/sergo.lock.yml b/.github/workflows/sergo.lock.yml
index a348dd6907..4144e74964 100644
--- a/.github/workflows/sergo.lock.yml
+++ b/.github/workflows/sergo.lock.yml
@@ -569,6 +569,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-ci.lock.yml b/.github/workflows/smoke-ci.lock.yml
index e88635d534..d632d41a6a 100644
--- a/.github/workflows/smoke-ci.lock.yml
+++ b/.github/workflows/smoke-ci.lock.yml
@@ -609,6 +609,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml
index 1a747fdb06..8de9b4b2f0 100644
--- a/.github/workflows/smoke-claude.lock.yml
+++ b/.github/workflows/smoke-claude.lock.yml
@@ -1188,6 +1188,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml
index 0af2aa0904..bb36075cf5 100644
--- a/.github/workflows/smoke-codex.lock.yml
+++ b/.github/workflows/smoke-codex.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"5654399396a1259c6dd48c4a40a088432604138bc7c3df639ce30480fcb35914","agent_id":"codex"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"1966a5593470eb9a658ba884daacb4988874da9f13de43dff1b94bc3d215f2e8","agent_id":"codex"}
# gh-aw-manifest: {"version":1,"secrets":["CODEX_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN","OPENAI_API_KEY"],"actions":[{"repo":"actions-ecosystem/action-add-labels","sha":"c96b68fec76a0987cd93957189e9abd0b9a72ff1","version":"v1.1.3"},{"repo":"actions/cache/restore","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/cache/save","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"3a2844b7e9c422d3c10d287c895573f7108da1b3","version":"v9.0.0"},{"repo":"actions/setup-go","sha":"4a3601121dd01d1626a1e23e37211e3254c1c06c","version":"v6.4.0"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.41","digest":"sha256:cb2b565d070116d4b67e355775340528b5a2c3cb18b2c9049638bcc2df681770","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.41@sha256:cb2b565d070116d4b67e355775340528b5a2c3cb18b2c9049638bcc2df681770"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.41","digest":"sha256:fadd0de387209f69a9a7a1b8722bb5e7fdfb80ba9749a5c60f0e4cd7582a74d0","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.41@sha256:fadd0de387209f69a9a7a1b8722bb5e7fdfb80ba9749a5c60f0e4cd7582a74d0"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.41","digest":"sha256:1260445d25968dbf3ae70143964177a0e5914cf2ce07a6117f7d3caec6c3e3c4","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.41@sha256:1260445d25968dbf3ae70143964177a0e5914cf2ce07a6117f7d3caec6c3e3c4"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.3.6","digest":"sha256:2bb8eef86006a4c5963c55616a9c51c32f27bfdecb023b8aa6f91f6718d9171c","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.3.6@sha256:2bb8eef86006a4c5963c55616a9c51c32f27bfdecb023b8aa6f91f6718d9171c"},{"image":"ghcr.io/github/github-mcp-server:v1.0.3","digest":"sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959","pinned_image":"ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959"},{"image":"ghcr.io/github/serena-mcp-server:latest","digest":"sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5","pinned_image":"ghcr.io/github/serena-mcp-server:latest@sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]}
# ___ _ _
# / _ \ | | (_)
@@ -260,25 +260,25 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF'
+ cat << 'GH_AW_PROMPT_86f34cc6d485a7e9_EOF'
- GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF
+ GH_AW_PROMPT_86f34cc6d485a7e9_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/playwright_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF'
+ cat << 'GH_AW_PROMPT_86f34cc6d485a7e9_EOF'
- Tools: add_comment(max:2), create_issue, add_labels, remove_labels, unassign_from_user, hide_comment(max:5), missing_tool, missing_data, noop, add_smoked_label
- GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF
+ Tools: add_comment(max:2), create_issue, add_labels, remove_labels, unassign_from_user, hide_comment(max:5), set_issue_field, missing_tool, missing_data, noop, add_smoked_label
+ GH_AW_PROMPT_86f34cc6d485a7e9_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_comment_memory.md"
- cat << 'GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF'
+ cat << 'GH_AW_PROMPT_86f34cc6d485a7e9_EOF'
- GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF
+ GH_AW_PROMPT_86f34cc6d485a7e9_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/mcp_cli_tools_prompt.md"
- cat << 'GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF'
+ cat << 'GH_AW_PROMPT_86f34cc6d485a7e9_EOF'
The following GitHub context information is available for this workflow:
{{#if __GH_AW_GITHUB_ACTOR__ }}
@@ -310,9 +310,9 @@ jobs:
- **Note**: If a branch you need is not in the list above and is not listed as an additional fetched ref, it has NOT been checked out. For private repositories you cannot fetch it without proper authentication. If the branch is required and not available, exit with an error and ask the user to add it to the `fetch:` option of the `checkout:` configuration (e.g., `fetch: ["refs/pulls/open/*"]` for all open PR refs, or `fetch: ["main", "feature/my-branch"]` for specific branches).
- GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF
+ GH_AW_PROMPT_86f34cc6d485a7e9_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF'
+ cat << 'GH_AW_PROMPT_86f34cc6d485a7e9_EOF'
## Serena Code Analysis
@@ -353,7 +353,7 @@ jobs:
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/shared/noop-reminder.md}}
{{#runtime-import .github/workflows/smoke-codex.md}}
- GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF
+ GH_AW_PROMPT_86f34cc6d485a7e9_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
@@ -609,9 +609,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_990c1eb48269b369_EOF'
- {"add_comment":{"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-codex"]},"add_smoked_label":true,"comment_memory":{"max":1,"memory_id":"default"},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-codex","expires":2,"labels":["automation","testing"],"max":1},"create_report_incomplete_issue":{},"hide_comment":{"max":5},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"remove_labels":{"allowed":["smoke"]},"report_incomplete":{},"unassign_from_user":{"allowed":["githubactionagent"],"max":1}}
- GH_AW_SAFE_OUTPUTS_CONFIG_990c1eb48269b369_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_80cde1e004b80053_EOF'
+ {"add_comment":{"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-codex"]},"add_smoked_label":true,"comment_memory":{"max":1,"memory_id":"default"},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-codex","expires":2,"labels":["automation","testing"],"max":1},"create_report_incomplete_issue":{},"hide_comment":{"max":5},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"remove_labels":{"allowed":["smoke"]},"report_incomplete":{},"set_issue_field":{"allowed_fields":["*"],"max":1},"unassign_from_user":{"allowed":["githubactionagent"],"max":1}}
+ GH_AW_SAFE_OUTPUTS_CONFIG_80cde1e004b80053_EOF
- name: Generate Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -620,7 +620,8 @@ jobs:
"add_comment": " CONSTRAINTS: Maximum 2 comment(s) can be added. Supports reply_to_id for discussion threading.",
"add_labels": " CONSTRAINTS: Only these labels are allowed: [\"smoke-codex\"].",
"create_issue": " CONSTRAINTS: Maximum 1 issue(s) can be created. Labels [\"automation\" \"testing\"] will be automatically added.",
- "remove_labels": " CONSTRAINTS: Only these labels can be removed: [smoke]."
+ "remove_labels": " CONSTRAINTS: Only these labels can be removed: [smoke].",
+ "set_issue_field": " CONSTRAINTS: Maximum 1 issue field update(s) can be made. Any issue field is allowed."
},
"repo_params": {},
"dynamic_tools": [
@@ -724,6 +725,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
@@ -865,6 +869,34 @@ jobs:
}
}
},
+ "set_issue_field": {
+ "defaultMax": 5,
+ "fields": {
+ "field_name": {
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 128
+ },
+ "field_node_id": {
+ "type": "string",
+ "maxLength": 256
+ },
+ "issue_number": {
+ "issueOrPRNumber": true
+ },
+ "repo": {
+ "type": "string",
+ "maxLength": 256
+ },
+ "value": {
+ "required": true,
+ "type": "string",
+ "sanitize": true,
+ "maxLength": 256
+ }
+ },
+ "customValidation": "requiresOneOf:field_name,field_node_id"
+ },
"unassign_from_user": {
"defaultMax": 1,
"fields": {
@@ -939,7 +971,7 @@ jobs:
- name: Write MCP Scripts Config
run: |
mkdir -p "${RUNNER_TEMP}/gh-aw/mcp-scripts/logs"
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json" << 'GH_AW_MCP_SCRIPTS_TOOLS_ea62428d6b567c2d_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json" << 'GH_AW_MCP_SCRIPTS_TOOLS_3324323e1546af0f_EOF'
{
"serverName": "mcpscripts",
"version": "1.0.0",
@@ -969,8 +1001,8 @@ jobs:
}
]
}
- GH_AW_MCP_SCRIPTS_TOOLS_ea62428d6b567c2d_EOF
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs" << 'GH_AW_MCP_SCRIPTS_SERVER_9e9be9620ff39e25_EOF'
+ GH_AW_MCP_SCRIPTS_TOOLS_3324323e1546af0f_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs" << 'GH_AW_MCP_SCRIPTS_SERVER_553d1c1cd9e241b0_EOF'
const path = require("path");
const { startHttpServer } = require("./mcp_scripts_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
@@ -984,12 +1016,12 @@ jobs:
console.error("Failed to start mcp-scripts HTTP server:", error);
process.exit(1);
});
- GH_AW_MCP_SCRIPTS_SERVER_9e9be9620ff39e25_EOF
+ GH_AW_MCP_SCRIPTS_SERVER_553d1c1cd9e241b0_EOF
chmod +x "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs"
- name: Write MCP Scripts Tool Files
run: |
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh" << 'GH_AW_MCP_SCRIPTS_SH_GH_e06f151e3fec9952_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh" << 'GH_AW_MCP_SCRIPTS_SH_GH_40837874df1ce5fa_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: gh
# Execute any gh CLI command. This tool is accessible as 'mcpscripts-gh'. Provide the full command after 'gh' (e.g., args: 'pr list --limit 5'). The tool will run: gh . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -1001,7 +1033,7 @@ jobs:
GH_TOKEN="$GH_AW_GH_TOKEN" gh $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_GH_e06f151e3fec9952_EOF
+ GH_AW_MCP_SCRIPTS_SH_GH_40837874df1ce5fa_EOF
chmod +x "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh"
- name: Generate MCP Scripts Server Config
@@ -1074,7 +1106,7 @@ jobs:
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_MCP_SCRIPTS_PORT -e GH_AW_MCP_SCRIPTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GITHUB_AW_OTEL_TRACE_ID -e GITHUB_AW_OTEL_PARENT_SPAN_ID -e CODEX_HOME -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.6'
- cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_732c588478440710_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_d98dadbd88963449_EOF
[history]
persistence = "none"
@@ -1126,11 +1158,11 @@ jobs:
[mcp_servers.serena."guard-policies".write-sink]
accept = ["*"]
- GH_AW_MCP_CONFIG_732c588478440710_EOF
+ GH_AW_MCP_CONFIG_d98dadbd88963449_EOF
# Generate JSON config for MCP gateway
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_732c588478440710_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_d98dadbd88963449_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
"github": {
@@ -1216,11 +1248,11 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_732c588478440710_EOF
+ GH_AW_MCP_CONFIG_d98dadbd88963449_EOF
# Sync converter output to writable CODEX_HOME for Codex
mkdir -p /tmp/gh-aw/mcp-config
- cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_db31288df1eb158b_EOF
+ cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_01e7883d1058b4a5_EOF
model_provider = "openai-proxy"
@@ -1232,7 +1264,7 @@ jobs:
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "GH_AW_ASSETS_ALLOWED_EXTS", "GH_AW_ASSETS_BRANCH", "GH_AW_ASSETS_MAX_SIZE_KB", "GH_AW_SAFE_OUTPUTS", "GITHUB_PERSONAL_ACCESS_TOKEN", "GITHUB_REPOSITORY", "GITHUB_SERVER_URL", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_CODEX_SHELL_POLICY_db31288df1eb158b_EOF
+ GH_AW_CODEX_SHELL_POLICY_01e7883d1058b4a5_EOF
awk '
BEGIN { skip_openai_proxy = 0 }
/^[[:space:]]*model_provider[[:space:]]*=/ { next }
@@ -1825,18 +1857,18 @@ jobs:
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.6'
- cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_61d3fa0129c5a16a_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_15e70be1e1ac16d4_EOF
[history]
persistence = "none"
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_MCP_CONFIG_61d3fa0129c5a16a_EOF
+ GH_AW_MCP_CONFIG_15e70be1e1ac16d4_EOF
# Generate JSON config for MCP gateway
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_db4017edd1dcde18_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_4770171e6c750118_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
},
@@ -1847,11 +1879,11 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_db4017edd1dcde18_EOF
+ GH_AW_MCP_CONFIG_4770171e6c750118_EOF
# Sync converter output to writable CODEX_HOME for Codex
mkdir -p /tmp/gh-aw/mcp-config
- cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_4d97817116a0a7b0_EOF
+ cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_02ebfa55b244cbed_EOF
model_provider = "openai-proxy"
[model_providers.openai-proxy]
name = "OpenAI AWF proxy"
@@ -1861,7 +1893,7 @@ jobs:
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_CODEX_SHELL_POLICY_4d97817116a0a7b0_EOF
+ GH_AW_CODEX_SHELL_POLICY_02ebfa55b244cbed_EOF
awk '
BEGIN { skip_openai_proxy = 0 }
/^[[:space:]]*model_provider[[:space:]]*=/ { next }
@@ -2068,7 +2100,7 @@ jobs:
GITHUB_SERVER_URL: ${{ github.server_url }}
GITHUB_API_URL: ${{ github.api_url }}
GH_AW_SAFE_OUTPUT_ACTIONS: "{\"add_smoked_label\":\"add_smoked_label\"}"
- GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":2},\"add_labels\":{\"allowed\":[\"smoke-codex\"]},\"comment_memory\":{\"max\":1,\"memory_id\":\"default\"},\"create_issue\":{\"close_older_issues\":true,\"close_older_key\":\"smoke-codex\",\"expires\":2,\"labels\":[\"automation\",\"testing\"],\"max\":1},\"create_report_incomplete_issue\":{},\"hide_comment\":{\"max\":5},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"remove_labels\":{\"allowed\":[\"smoke\"]},\"report_incomplete\":{},\"unassign_from_user\":{\"allowed\":[\"githubactionagent\"],\"max\":1}}"
+ GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":2},\"add_labels\":{\"allowed\":[\"smoke-codex\"]},\"comment_memory\":{\"max\":1,\"memory_id\":\"default\"},\"create_issue\":{\"close_older_issues\":true,\"close_older_key\":\"smoke-codex\",\"expires\":2,\"labels\":[\"automation\",\"testing\"],\"max\":1},\"create_report_incomplete_issue\":{},\"hide_comment\":{\"max\":5},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"remove_labels\":{\"allowed\":[\"smoke\"]},\"report_incomplete\":{},\"set_issue_field\":{\"allowed_fields\":[\"*\"],\"max\":1},\"unassign_from_user\":{\"allowed\":[\"githubactionagent\"],\"max\":1}}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
diff --git a/.github/workflows/smoke-codex.md b/.github/workflows/smoke-codex.md
index 57da2faf94..d4b2938989 100644
--- a/.github/workflows/smoke-codex.md
+++ b/.github/workflows/smoke-codex.md
@@ -53,6 +53,9 @@ safe-outputs:
close-older-issues: true
close-older-key: "smoke-codex"
labels: [automation, testing]
+ set-issue-field:
+ max: 1
+ allowed-fields: ["*"]
add-labels:
allowed: [smoke-codex]
remove-labels:
@@ -99,16 +102,24 @@ checkout:
7. **Build gh-aw**: Run `GOCACHE=/tmp/go-cache GOMODCACHE=/tmp/go-mod make build` to verify the agent can successfully build the gh-aw project (both caches must be set to /tmp because the default cache locations are not writable). If the command fails, mark this test as ❌ and report the failure.
8. **Comment Memory Testing**: Append an original 3-line haiku to the comment-memory markdown file(s) in `/tmp/gh-aw/comment-memory/*.md` without removing existing content.
9. **Cache Memory Testing**:
- - Check if `/tmp/gh-aw/cache-memory/smoke-codex-history.json` exists; if it does, read it and note the previous run's results (run ID, timestamp, status)
- - Write current run results to `/tmp/gh-aw/cache-memory/smoke-codex-history.json` with content: `{"run_id": "${{ github.run_id }}", "timestamp": "", "status": "PASS or FAIL", "tests_passed": , "tests_failed": }` (create the parent directory if it doesn't exist)
- - Use bash to verify the file was written successfully (use `cat` to read it back)
+ - Check if `/tmp/gh-aw/cache-memory/smoke-codex-history.json` exists; if it does, read it and note the previous run's results (run ID, timestamp, status)
+ - Write current run results to `/tmp/gh-aw/cache-memory/smoke-codex-history.json` with content: `{"run_id": "${{ github.run_id }}", "timestamp": "", "status": "PASS or FAIL", "tests_passed": , "tests_failed": }` (create the parent directory if it doesn't exist)
+ - Use bash to verify the file was written successfully (use `cat` to read it back)
+10. **Set Issue Field Testing**:
+ - After creating the smoke-test issue, use `set_issue_field` exactly once on that new issue
+ - Discover available issue fields and choose one compatible field/value pair:
+ - text field → short text value
+ - number field → numeric value
+ - date field → `YYYY-MM-DD`
+ - single-select field → an existing option name
+ - If no editable issue fields are available, report this test as skipped with reason
## Output
**ALWAYS create an issue** with a summary of the smoke test run:
- Title: "Smoke Test: Codex - ${{ github.run_id }}"
- Body should include:
- - Test results (✅ or ❌ for each test, including test #9 Cache Memory)
+ - Test results (✅ or ❌ for each test, including test #9 Cache Memory and test #10 Set Issue Field)
- Overall status: PASS or FAIL
- Run URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
- Timestamp
diff --git a/.github/workflows/smoke-copilot-arm.lock.yml b/.github/workflows/smoke-copilot-arm.lock.yml
index aac516be88..21d935e0c4 100644
--- a/.github/workflows/smoke-copilot-arm.lock.yml
+++ b/.github/workflows/smoke-copilot-arm.lock.yml
@@ -792,6 +792,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-create-cross-repo-pr.lock.yml b/.github/workflows/smoke-create-cross-repo-pr.lock.yml
index 87cdc4ca02..12d169ff3d 100644
--- a/.github/workflows/smoke-create-cross-repo-pr.lock.yml
+++ b/.github/workflows/smoke-create-cross-repo-pr.lock.yml
@@ -551,6 +551,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-crush.lock.yml b/.github/workflows/smoke-crush.lock.yml
index 8f3977fb14..2a8195aba7 100644
--- a/.github/workflows/smoke-crush.lock.yml
+++ b/.github/workflows/smoke-crush.lock.yml
@@ -561,6 +561,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-gemini.lock.yml b/.github/workflows/smoke-gemini.lock.yml
index 28c652be3e..39edf38165 100644
--- a/.github/workflows/smoke-gemini.lock.yml
+++ b/.github/workflows/smoke-gemini.lock.yml
@@ -602,6 +602,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-opencode.lock.yml b/.github/workflows/smoke-opencode.lock.yml
index b89361209c..87b2c2427a 100644
--- a/.github/workflows/smoke-opencode.lock.yml
+++ b/.github/workflows/smoke-opencode.lock.yml
@@ -580,6 +580,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-pi.lock.yml b/.github/workflows/smoke-pi.lock.yml
index ca93745f54..b0d9b1c792 100644
--- a/.github/workflows/smoke-pi.lock.yml
+++ b/.github/workflows/smoke-pi.lock.yml
@@ -605,6 +605,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-project.lock.yml b/.github/workflows/smoke-project.lock.yml
index 408d814ca5..a2a3507414 100644
--- a/.github/workflows/smoke-project.lock.yml
+++ b/.github/workflows/smoke-project.lock.yml
@@ -581,6 +581,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-temporary-id.lock.yml b/.github/workflows/smoke-temporary-id.lock.yml
index b77be79cd2..b6fea46993 100644
--- a/.github/workflows/smoke-temporary-id.lock.yml
+++ b/.github/workflows/smoke-temporary-id.lock.yml
@@ -550,6 +550,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-update-cross-repo-pr.lock.yml b/.github/workflows/smoke-update-cross-repo-pr.lock.yml
index 2874fc4323..7523e2f5bc 100644
--- a/.github/workflows/smoke-update-cross-repo-pr.lock.yml
+++ b/.github/workflows/smoke-update-cross-repo-pr.lock.yml
@@ -581,6 +581,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/smoke-workflow-call-with-inputs.lock.yml b/.github/workflows/smoke-workflow-call-with-inputs.lock.yml
index 1f7ea62eca..d58b785490 100644
--- a/.github/workflows/smoke-workflow-call-with-inputs.lock.yml
+++ b/.github/workflows/smoke-workflow-call-with-inputs.lock.yml
@@ -531,6 +531,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/spec-librarian.lock.yml b/.github/workflows/spec-librarian.lock.yml
index 100d3afb2a..189e010ea6 100644
--- a/.github/workflows/spec-librarian.lock.yml
+++ b/.github/workflows/spec-librarian.lock.yml
@@ -507,6 +507,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/stale-repo-identifier.lock.yml b/.github/workflows/stale-repo-identifier.lock.yml
index cc95315297..65d12f5eb0 100644
--- a/.github/workflows/stale-repo-identifier.lock.yml
+++ b/.github/workflows/stale-repo-identifier.lock.yml
@@ -681,6 +681,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/static-analysis-report.lock.yml b/.github/workflows/static-analysis-report.lock.yml
index ad0d856045..f6507ea0ba 100644
--- a/.github/workflows/static-analysis-report.lock.yml
+++ b/.github/workflows/static-analysis-report.lock.yml
@@ -571,6 +571,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/step-name-alignment.lock.yml b/.github/workflows/step-name-alignment.lock.yml
index 0bad3ad6a5..367efabe4f 100644
--- a/.github/workflows/step-name-alignment.lock.yml
+++ b/.github/workflows/step-name-alignment.lock.yml
@@ -481,6 +481,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/super-linter.lock.yml b/.github/workflows/super-linter.lock.yml
index f3eadb6220..3f388c514a 100644
--- a/.github/workflows/super-linter.lock.yml
+++ b/.github/workflows/super-linter.lock.yml
@@ -503,6 +503,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/test-workflow.lock.yml b/.github/workflows/test-workflow.lock.yml
index 2dc64136ee..e1f97a9fd9 100644
--- a/.github/workflows/test-workflow.lock.yml
+++ b/.github/workflows/test-workflow.lock.yml
@@ -454,6 +454,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/video-analyzer.lock.yml b/.github/workflows/video-analyzer.lock.yml
index 87810deefd..f7fb9c034f 100644
--- a/.github/workflows/video-analyzer.lock.yml
+++ b/.github/workflows/video-analyzer.lock.yml
@@ -475,6 +475,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/workflow-health-manager.lock.yml b/.github/workflows/workflow-health-manager.lock.yml
index 55751b4581..f4b885e9b5 100644
--- a/.github/workflows/workflow-health-manager.lock.yml
+++ b/.github/workflows/workflow-health-manager.lock.yml
@@ -520,6 +520,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/workflow-normalizer.lock.yml b/.github/workflows/workflow-normalizer.lock.yml
index e64381fa14..f806d71814 100644
--- a/.github/workflows/workflow-normalizer.lock.yml
+++ b/.github/workflows/workflow-normalizer.lock.yml
@@ -526,6 +526,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/.github/workflows/workflow-skill-extractor.lock.yml b/.github/workflows/workflow-skill-extractor.lock.yml
index b9d4dafb76..8b6f01b514 100644
--- a/.github/workflows/workflow-skill-extractor.lock.yml
+++ b/.github/workflows/workflow-skill-extractor.lock.yml
@@ -495,6 +495,9 @@ jobs:
"sanitize": true,
"maxLength": 65000
},
+ "fields": {
+ "type": "array"
+ },
"labels": {
"type": "array",
"itemType": "string",
diff --git a/actions/setup/js/allowed_issue_fields.cjs b/actions/setup/js/allowed_issue_fields.cjs
new file mode 100644
index 0000000000..1b1770f3df
--- /dev/null
+++ b/actions/setup/js/allowed_issue_fields.cjs
@@ -0,0 +1,64 @@
+// @ts-check
+///
+
+const { ERR_VALIDATION } = require("./error_codes.cjs");
+
+/**
+ * Parse allowed issue field names from config.
+ * @param {string[]|string|undefined} value
+ * @returns {string[]}
+ */
+function parseAllowedIssueFields(value) {
+ if (value == null || value === "") {
+ return [];
+ }
+ const raw = Array.isArray(value) ? value : String(value).split(",");
+ const uniqueFields = new Set();
+ for (const item of raw) {
+ const normalized = String(item).trim();
+ if (normalized) {
+ uniqueFields.add(normalized);
+ }
+ }
+ return [...uniqueFields];
+}
+
+/**
+ * Validate one issue field name against configured allowed-fields.
+ * @param {string} fieldName
+ * @param {string[]} allowedFields
+ * @returns {void}
+ */
+function validateAllowedIssueFieldName(fieldName, allowedFields) {
+ if (!fieldName) {
+ return;
+ }
+ if (!Array.isArray(allowedFields) || allowedFields.length === 0 || allowedFields.includes("*")) {
+ return;
+ }
+ const allowedFieldSet = new Set(allowedFields.map(field => field.toLowerCase()));
+ if (!allowedFieldSet.has(fieldName.toLowerCase())) {
+ throw new Error(`${ERR_VALIDATION}: issue field "${fieldName}" is not in the allowed-fields list: ${allowedFields.join(", ")}`);
+ }
+}
+
+/**
+ * Validate requested issue fields against configured allowed-fields.
+ * @param {Array<{name: string, value: string|number}>} issueFields
+ * @param {string[]} allowedFields
+ * @returns {void}
+ */
+function validateAllowedIssueFields(issueFields, allowedFields) {
+ if (!Array.isArray(issueFields) || issueFields.length === 0) {
+ return;
+ }
+ for (const field of issueFields) {
+ validateAllowedIssueFieldName(field.name, allowedFields);
+ }
+}
+
+module.exports = {
+ parseAllowedIssueFields,
+ validateAllowedIssueFieldName,
+ validateAllowedIssueFields,
+};
diff --git a/actions/setup/js/create_issue.cjs b/actions/setup/js/create_issue.cjs
index 92265e094b..7dc674307c 100644
--- a/actions/setup/js/create_issue.cjs
+++ b/actions/setup/js/create_issue.cjs
@@ -24,6 +24,7 @@ const { parseBoolTemplatable } = require("./templatable.cjs");
const { tryEnforceArrayLimit } = require("./limit_enforcement_helpers.cjs");
const { logStagedPreviewInfo } = require("./staged_preview.cjs");
const { isStagedMode } = require("./safe_output_helpers.cjs");
+const { parseAllowedIssueFields, validateAllowedIssueFields } = require("./allowed_issue_fields.cjs");
const { buildWorkflowRunUrl } = require("./workflow_metadata_helpers.cjs");
const { MAX_LABELS, MAX_ASSIGNEES } = require("./constants.cjs");
const { findAgent, getIssueDetails, assignAgentToIssue } = require("./assign_agent_helpers.cjs");
@@ -234,51 +235,6 @@ function normalizeIssueFields(fields) {
});
}
-/**
- * Parse allowed issue field names from config.
- * @param {string[]|string|undefined} value
- * @returns {string[]}
- */
-function parseAllowedIssueFields(value) {
- if (value == null || value === "") {
- return [];
- }
- const raw = Array.isArray(value) ? value : String(value).split(",");
- const uniqueFields = new Set();
- for (const item of raw) {
- const normalized = String(item).trim();
- if (normalized) {
- uniqueFields.add(normalized);
- }
- }
- return [...uniqueFields];
-}
-
-/**
- * Validate requested issue fields against configured allowed-fields.
- * @param {Array<{name: string, value: string|number}>} issueFields
- * @param {string[]} allowedFields
- * @returns {void}
- */
-function validateAllowedIssueFields(issueFields, allowedFields) {
- if (!Array.isArray(issueFields) || issueFields.length === 0) {
- return;
- }
- if (!Array.isArray(allowedFields) || allowedFields.length === 0 || allowedFields.includes("*")) {
- return;
- }
-
- // We intentionally normalize to lowercase for comparisons because issue field names
- // come from user-provided config/output and repository metadata, and should match
- // even when case differs (e.g., "priority" vs "Priority").
- const allowedFieldSet = new Set(allowedFields.map(field => field.toLowerCase()));
- for (const field of issueFields) {
- if (!allowedFieldSet.has(field.name.toLowerCase())) {
- throw new Error(`${ERR_VALIDATION}: issue field "${field.name}" is not in the allowed-fields list: ${allowedFields.join(", ")}`);
- }
- }
-}
-
/**
* Resolve issue node ID from issue number.
* Queries GraphQL for the issue node ID required by field mutations.
diff --git a/actions/setup/js/safe_output_handler_manager.cjs b/actions/setup/js/safe_output_handler_manager.cjs
index e9fd0baae2..6009bd68f6 100644
--- a/actions/setup/js/safe_output_handler_manager.cjs
+++ b/actions/setup/js/safe_output_handler_manager.cjs
@@ -57,6 +57,7 @@ const HANDLER_MAP = {
mark_pull_request_as_ready_for_review: "./mark_pull_request_as_ready_for_review.cjs",
hide_comment: "./hide_comment.cjs",
set_issue_type: "./set_issue_type.cjs",
+ set_issue_field: "./set_issue_field.cjs",
add_reviewer: "./add_reviewer.cjs",
assign_milestone: "./assign_milestone.cjs",
assign_to_user: "./assign_to_user.cjs",
diff --git a/actions/setup/js/safe_outputs_tools.json b/actions/setup/js/safe_outputs_tools.json
index 033c55fd48..d2b12e12c3 100644
--- a/actions/setup/js/safe_outputs_tools.json
+++ b/actions/setup/js/safe_outputs_tools.json
@@ -1141,6 +1141,41 @@
"additionalProperties": false
}
},
+ {
+ "name": "set_issue_field",
+ "description": "Set a single GitHub issue field by name and value. Use field_name for discovery by field label (for example, \"Priority\"), or provide field_node_id to skip discovery. Supports text, number, date (YYYY-MM-DD), and single-select fields (value must match an option name).",
+ "inputSchema": {
+ "type": "object",
+ "required": ["value"],
+ "properties": {
+ "issue_number": {
+ "type": ["number", "string"],
+ "description": "Issue number to set the field on. If omitted, targets the issue that triggered this workflow."
+ },
+ "field_name": {
+ "type": "string",
+ "description": "Issue field name to set (e.g., \"Priority\", \"Severity\", \"Customer Impact\")."
+ },
+ "field_node_id": {
+ "type": "string",
+ "description": "Optional GraphQL node ID of the issue field. Provide this to skip field-name discovery and set a field directly."
+ },
+ "value": {
+ "type": "string",
+ "description": "Field value to set. For single-select fields, this must match an existing option name. For date fields, use YYYY-MM-DD."
+ },
+ "secrecy": {
+ "type": "string",
+ "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\")."
+ },
+ "integrity": {
+ "type": "string",
+ "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\")."
+ }
+ },
+ "additionalProperties": false
+ }
+ },
{
"name": "update_project",
"description": "Manage GitHub Projects: add issues/pull requests/draft issues, update item fields (status, priority, effort, dates), manage custom fields, and create project views. Use this to organize work by adding items to projects, updating field values, creating custom fields up-front, and setting up project views (table, board, roadmap).\n\nThree modes: (1) Add or update project items with custom field values; (2) Create project fields; (3) Create project views. This is the primary tool for ProjectOps automation - add items to projects, set custom fields for tracking, and organize project boards.",
diff --git a/actions/setup/js/set_issue_field.cjs b/actions/setup/js/set_issue_field.cjs
new file mode 100644
index 0000000000..398234a2f4
--- /dev/null
+++ b/actions/setup/js/set_issue_field.cjs
@@ -0,0 +1,408 @@
+// @ts-check
+///
+
+/**
+ * @typedef {import('./types/handler-factory').HandlerFactoryFunction} HandlerFactoryFunction
+ */
+
+const { getErrorMessage } = require("./error_helpers.cjs");
+const { resolveTargetRepoConfig, resolveAndValidateRepo } = require("./repo_helpers.cjs");
+const { logStagedPreviewInfo } = require("./staged_preview.cjs");
+const { isStagedMode } = require("./safe_output_helpers.cjs");
+const { createAuthenticatedGitHubClient } = require("./handler_auth.cjs");
+const { parseAllowedIssueFields, validateAllowedIssueFieldName } = require("./allowed_issue_fields.cjs");
+const { loadTemporaryIdMapFromResolved, resolveRepoIssueTarget } = require("./temporary_id.cjs");
+
+/** @type {string} Safe output type handled by this module */
+const HANDLER_TYPE = "set_issue_field";
+
+/**
+ * Fetches the node ID of an issue for use in GraphQL mutations.
+ * @param {Object} githubClient - Authenticated GitHub client
+ * @param {string} owner - Repository owner
+ * @param {string} repo - Repository name
+ * @param {number} issueNumber - Issue number
+ * @returns {Promise} Issue node ID
+ */
+async function getIssueNodeId(githubClient, owner, repo, issueNumber) {
+ const { data } = await githubClient.rest.issues.get({
+ owner,
+ repo,
+ issue_number: issueNumber,
+ });
+ return data.node_id;
+}
+
+/**
+ * Fetches available issue fields for the repository/owner.
+ * @param {Object} githubClient - Authenticated GitHub client
+ * @param {string} owner - Repository owner
+ * @param {string} repo - Repository name
+ * @returns {Promise}>>}
+ */
+async function fetchIssueFields(githubClient, owner, repo) {
+ try {
+ const result = await githubClient.graphql(
+ `query($owner: String!, $repo: String!) {
+ repository(owner: $owner, name: $repo) {
+ issueFields(first: 100) {
+ nodes {
+ __typename
+ id
+ name
+ ... on IssueFieldSingleSelect {
+ options {
+ id
+ name
+ }
+ }
+ }
+ }
+ owner {
+ __typename
+ ... on Organization {
+ issueFields(first: 100) {
+ nodes {
+ __typename
+ id
+ name
+ ... on IssueFieldSingleSelect {
+ options {
+ id
+ name
+ }
+ }
+ }
+ }
+ }
+ ... on User {
+ issueFields(first: 100) {
+ nodes {
+ __typename
+ id
+ name
+ ... on IssueFieldSingleSelect {
+ options {
+ id
+ name
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }`,
+ { owner, repo }
+ );
+
+ const repoFields = result?.repository?.issueFields?.nodes ?? [];
+ if (repoFields.length > 0) {
+ return repoFields;
+ }
+
+ const ownerFields = result?.repository?.owner?.issueFields?.nodes ?? [];
+ return ownerFields;
+ } catch (error) {
+ if (typeof core !== "undefined") {
+ core.debug(`Could not fetch issue fields (may not be enabled): ${error instanceof Error ? error.message : String(error)}`);
+ }
+ return [];
+ }
+}
+
+/**
+ * Builds a field update payload based on field type and value.
+ * @param {{__typename?: string, name?: string, options?: Array<{id: string, name: string}>}|null} field
+ * @param {string} rawValue
+ * @returns {{success: true, update: Record} | {success: false, error: string}}
+ */
+function buildFieldUpdatePayload(field, rawValue) {
+ const fieldType = field?.__typename || "IssueFieldText";
+
+ if (fieldType === "IssueFieldSingleSelect") {
+ const options = field?.options ?? [];
+ const selected = options.find(option => option.name.toLowerCase() === rawValue.toLowerCase());
+ if (!selected) {
+ const availableOptions = options.map(option => option.name).join(", ");
+ return {
+ success: false,
+ error: `Invalid value ${JSON.stringify(rawValue)} for issue field ${JSON.stringify(field?.name || "(unknown)")}. Available options: ${availableOptions}. Use the exact option name or pass field_node_id to bypass name discovery.`,
+ };
+ }
+ return {
+ success: true,
+ update: {
+ singleSelectOptionId: selected.id,
+ },
+ };
+ }
+
+ if (fieldType === "IssueFieldNumber") {
+ const parsed = Number(rawValue);
+ if (!Number.isFinite(parsed)) {
+ return {
+ success: false,
+ error: `Invalid value ${JSON.stringify(rawValue)} for numeric issue field ${JSON.stringify(field?.name || "(unknown)")}. Provide a numeric value (example: "3.14").`,
+ };
+ }
+ return {
+ success: true,
+ update: {
+ numberValue: parsed,
+ },
+ };
+ }
+
+ if (fieldType === "IssueFieldDate") {
+ if (!/^\d{4}-\d{2}-\d{2}$/.test(rawValue)) {
+ return {
+ success: false,
+ error: `Invalid value ${JSON.stringify(rawValue)} for date issue field ${JSON.stringify(field?.name || "(unknown)")}. Use YYYY-MM-DD format.`,
+ };
+ }
+ return {
+ success: true,
+ update: {
+ dateValue: rawValue,
+ },
+ };
+ }
+
+ return {
+ success: true,
+ update: {
+ textValue: rawValue,
+ },
+ };
+}
+
+/**
+ * Sets one issue field via GraphQL mutation.
+ * @param {Object} githubClient - Authenticated GitHub client
+ * @param {string} issueNodeId - GraphQL node ID of the issue
+ * @param {{fieldId: string, singleSelectOptionId?: string, numberValue?: number, dateValue?: string, textValue?: string}} fieldUpdate
+ * @returns {Promise}
+ */
+async function setIssueFieldValue(githubClient, issueNodeId, fieldUpdate) {
+ await githubClient.graphql(
+ `mutation($issueId: ID!, $issueFields: [IssueFieldCreateOrUpdateInput!]!) {
+ setIssueFieldValue(input: { issueId: $issueId, issueFields: $issueFields }) {
+ issue {
+ id
+ }
+ }
+ }`,
+ {
+ issueId: issueNodeId,
+ issueFields: [fieldUpdate],
+ }
+ );
+}
+
+/**
+ * Main handler factory for set_issue_field.
+ * @type {HandlerFactoryFunction}
+ */
+async function main(config = {}) {
+ const maxCount = config.max || 5;
+ const allowedIssueFields = parseAllowedIssueFields(config.allowed_fields);
+ const { defaultTargetRepo, allowedRepos } = resolveTargetRepoConfig(config);
+ const githubClient = await createAuthenticatedGitHubClient(config);
+ const isStaged = isStagedMode(config);
+
+ core.info(`Set issue field configuration: max=${maxCount}`);
+ core.info(`Default target repo: ${defaultTargetRepo}`);
+ if (allowedRepos.size > 0) {
+ core.info(`Allowed repos: ${Array.from(allowedRepos).join(", ")}`);
+ }
+ if (allowedIssueFields.length > 0 && !allowedIssueFields.includes("*")) {
+ core.info(`Allowed issue fields: ${allowedIssueFields.join(", ")}`);
+ }
+
+ let processedCount = 0;
+
+ return async function handleSetIssueField(message, resolvedTemporaryIds) {
+ if (processedCount >= maxCount) {
+ core.warning(`Skipping set_issue_field: max count of ${maxCount} reached`);
+ return {
+ success: false,
+ error: `Max count of ${maxCount} reached`,
+ };
+ }
+
+ processedCount++;
+
+ const item = message;
+ const temporaryIdMap = loadTemporaryIdMapFromResolved(resolvedTemporaryIds);
+
+ const repoResult = resolveAndValidateRepo(item, defaultTargetRepo, allowedRepos, "issue");
+ if (!repoResult.success) {
+ core.warning(`Skipping set_issue_field: ${repoResult.error}`);
+ return {
+ success: false,
+ error: repoResult.error,
+ };
+ }
+ const { repo: itemRepo, repoParts } = repoResult;
+ core.info(`Target repository: ${itemRepo}`);
+
+ let issueNumber;
+ if (item.issue_number !== undefined && item.issue_number !== null) {
+ const resolvedTarget = resolveRepoIssueTarget(item.issue_number, temporaryIdMap, repoParts.owner, repoParts.repo);
+
+ if (resolvedTarget.wasTemporaryId && !resolvedTarget.resolved) {
+ core.info(`Deferring set_issue_field: unresolved temporary ID (${item.issue_number})`);
+ return {
+ success: false,
+ deferred: true,
+ error: resolvedTarget.errorMessage || `Unresolved temporary ID: ${item.issue_number}`,
+ };
+ }
+
+ if (resolvedTarget.errorMessage || !resolvedTarget.resolved) {
+ core.warning(`Invalid issue_number: ${item.issue_number}`);
+ return {
+ success: false,
+ error: `Invalid issue_number: ${item.issue_number}`,
+ };
+ }
+
+ issueNumber = resolvedTarget.resolved.number;
+ core.info(`Resolved issue number: #${issueNumber}`);
+ } else {
+ const contextIssueNumber = context.payload?.issue?.number;
+ if (!contextIssueNumber) {
+ core.warning("No issue_number provided and not in issue context");
+ return {
+ success: false,
+ error: "No issue number available",
+ };
+ }
+ issueNumber = contextIssueNumber;
+ }
+
+ if (item.value === undefined || item.value === null) {
+ return {
+ success: false,
+ error: "Missing required value. Provide the issue field value as a string.",
+ };
+ }
+
+ const fieldName = typeof item.field_name === "string" ? item.field_name.trim() : "";
+ let fieldNodeId = typeof item.field_node_id === "string" ? item.field_node_id.trim() : "";
+ const value = String(item.value);
+
+ if (!fieldName && !fieldNodeId) {
+ return {
+ success: false,
+ error: "Missing field identifier. Provide field_name or field_node_id.",
+ };
+ }
+
+ if (isStaged) {
+ const description = `Would set issue field ${JSON.stringify(fieldName || fieldNodeId)} to ${JSON.stringify(value)} on issue #${issueNumber} in ${itemRepo}`;
+ logStagedPreviewInfo(description);
+ return {
+ success: true,
+ staged: true,
+ previewInfo: {
+ issue_number: issueNumber,
+ field_name: fieldName,
+ field_node_id: fieldNodeId,
+ value,
+ repo: itemRepo,
+ },
+ };
+ }
+
+ try {
+ const { owner, repo } = repoParts;
+ const issueNodeId = await getIssueNodeId(githubClient, owner, repo, issueNumber);
+
+ /** @type {{id: string, name: string, __typename?: string, options?: Array<{id: string, name: string}>}|null} */
+ let resolvedField = null;
+
+ const availableFields = await fetchIssueFields(githubClient, owner, repo);
+
+ if (availableFields.length === 0) {
+ const error = "No issue fields were discovered for this repository. Verify issue fields are enabled and visible to this token.";
+ core.error(error);
+ return { success: false, error };
+ }
+
+ let resolvedFieldByName = null;
+ if (fieldName) {
+ resolvedFieldByName = availableFields.find(field => field.name.toLowerCase() === fieldName.toLowerCase()) || null;
+ if (!resolvedFieldByName) {
+ const availableNames = availableFields.map(field => field.name).join(", ");
+ const error = `Issue field ${JSON.stringify(fieldName)} not found. Available fields: ${availableNames}. Use a listed field_name or provide field_node_id to bypass discovery.`;
+ core.error(error);
+ return { success: false, error };
+ }
+ }
+
+ if (fieldNodeId) {
+ resolvedField = availableFields.find(field => field.id === fieldNodeId) || null;
+ }
+
+ if (!fieldNodeId && resolvedFieldByName) {
+ fieldNodeId = resolvedFieldByName.id;
+ resolvedField = resolvedFieldByName;
+ }
+
+ if (fieldNodeId && !resolvedField) {
+ const availableFieldsSummary = availableFields.map(field => `${field.name} (${field.id})`).join(", ");
+ const error = `Issue field ID ${JSON.stringify(fieldNodeId)} not found. Available fields: ${availableFieldsSummary}. Use a valid field_node_id or provide field_name.`;
+ core.error(error);
+ return { success: false, error };
+ }
+
+ const hasConflictingFieldIdentifiers = Boolean(fieldNodeId && fieldName && resolvedFieldByName && resolvedField && resolvedFieldByName.id !== resolvedField.id);
+ if (hasConflictingFieldIdentifiers) {
+ const error = `field_name ${JSON.stringify(fieldName)} resolves to ${JSON.stringify(resolvedFieldByName.id)}, but field_node_id was ${JSON.stringify(fieldNodeId)}. Provide only one identifier or make them match.`;
+ core.error(error);
+ return { success: false, error };
+ }
+
+ if (!fieldNodeId) {
+ const error = "Could not resolve field_node_id. Provide a valid field_name or explicit field_node_id.";
+ core.error(error);
+ return { success: false, error };
+ }
+
+ const resolvedFieldName = resolvedField?.name || fieldName;
+ validateAllowedIssueFieldName(resolvedFieldName, allowedIssueFields);
+
+ const fieldUpdateResult = buildFieldUpdatePayload(resolvedField, value);
+ if (!fieldUpdateResult.success) {
+ core.error(fieldUpdateResult.error);
+ return { success: false, error: fieldUpdateResult.error };
+ }
+
+ const fieldUpdate = {
+ fieldId: fieldNodeId,
+ ...fieldUpdateResult.update,
+ };
+
+ await setIssueFieldValue(githubClient, issueNodeId, fieldUpdate);
+
+ core.info(`Successfully set issue field ${JSON.stringify(fieldName || fieldNodeId)} to ${JSON.stringify(value)} on issue #${issueNumber}`);
+
+ return {
+ success: true,
+ issue_number: issueNumber,
+ field_name: fieldName,
+ field_node_id: fieldNodeId,
+ value,
+ repo: itemRepo,
+ };
+ } catch (error) {
+ const errorMessage = getErrorMessage(error);
+ core.error(`Failed to set issue field on issue #${issueNumber}: ${errorMessage}`);
+ return { success: false, error: errorMessage };
+ }
+ };
+}
+
+module.exports = { main };
diff --git a/actions/setup/js/set_issue_field.test.cjs b/actions/setup/js/set_issue_field.test.cjs
new file mode 100644
index 0000000000..eb3f32ccce
--- /dev/null
+++ b/actions/setup/js/set_issue_field.test.cjs
@@ -0,0 +1,243 @@
+import { describe, it, expect, beforeEach, vi } from "vitest";
+
+const mockCore = {
+ debug: vi.fn(),
+ info: vi.fn(),
+ warning: vi.fn(),
+ error: vi.fn(),
+ setFailed: vi.fn(),
+ setOutput: vi.fn(),
+ summary: {
+ addRaw: vi.fn().mockReturnThis(),
+ write: vi.fn().mockResolvedValue(),
+ },
+};
+
+const mockContext = {
+ repo: {
+ owner: "test-owner",
+ repo: "test-repo",
+ },
+ eventName: "issues",
+ payload: {
+ issue: {
+ number: 123,
+ },
+ },
+};
+
+const mockGraphql = vi.fn();
+
+const mockGithub = {
+ rest: {
+ issues: {
+ get: vi.fn(),
+ },
+ },
+ graphql: mockGraphql,
+};
+
+global.core = mockCore;
+global.context = mockContext;
+global.github = mockGithub;
+
+describe("set_issue_field (Handler Factory Architecture)", () => {
+ let handler;
+
+ const issueNodeId = "I_kwDOABCD123456";
+ const textFieldId = "IF_kwDO_text";
+ const statusFieldId = "IF_kwDO_status";
+ const effortFieldId = "IF_kwDO_direct";
+
+ const mockIssueFieldsQuery = {
+ repository: {
+ issueFields: {
+ nodes: [
+ { id: textFieldId, name: "Customer Impact", __typename: "IssueFieldText" },
+ {
+ id: statusFieldId,
+ name: "Status",
+ __typename: "IssueFieldSingleSelect",
+ options: [
+ { id: "IFOPT_open", name: "Open" },
+ { id: "IFOPT_closed", name: "Closed" },
+ ],
+ },
+ { id: effortFieldId, name: "Effort", __typename: "IssueFieldNumber" },
+ ],
+ },
+ owner: {
+ __typename: "Organization",
+ issueFields: {
+ nodes: [],
+ },
+ },
+ },
+ };
+
+ beforeEach(async () => {
+ vi.clearAllMocks();
+
+ mockGithub.rest.issues.get.mockResolvedValue({ data: { node_id: issueNodeId } });
+ mockGraphql.mockImplementation(query => {
+ if (query.includes("issueFields")) {
+ return Promise.resolve(mockIssueFieldsQuery);
+ }
+ if (query.includes("setIssueFieldValue")) {
+ return Promise.resolve({ setIssueFieldValue: { issue: { id: issueNodeId } } });
+ }
+ return Promise.resolve({});
+ });
+
+ const { main } = require("./set_issue_field.cjs");
+ handler = await main({ max: 5 });
+ });
+
+ it("should return a function from main()", async () => {
+ const { main } = require("./set_issue_field.cjs");
+ const result = await main({});
+ expect(typeof result).toBe("function");
+ });
+
+ it("should set issue text field successfully", async () => {
+ const message = {
+ type: "set_issue_field",
+ issue_number: 42,
+ field_name: "Customer Impact",
+ value: "High",
+ };
+
+ const result = await handler(message, {});
+
+ expect(result.success).toBe(true);
+ expect(result.issue_number).toBe(42);
+ expect(result.field_name).toBe("Customer Impact");
+ expect(result.field_node_id).toBe(textFieldId);
+ expect(mockGraphql).toHaveBeenCalledWith(
+ expect.stringContaining("setIssueFieldValue"),
+ expect.objectContaining({
+ issueId: issueNodeId,
+ issueFields: [expect.objectContaining({ fieldId: textFieldId, textValue: "High" })],
+ })
+ );
+ });
+
+ it("should set single-select field by option name", async () => {
+ const message = {
+ type: "set_issue_field",
+ issue_number: 42,
+ field_name: "Status",
+ value: "Closed",
+ };
+
+ const result = await handler(message, {});
+
+ expect(result.success).toBe(true);
+ expect(mockGraphql).toHaveBeenCalledWith(
+ expect.stringContaining("setIssueFieldValue"),
+ expect.objectContaining({
+ issueFields: [expect.objectContaining({ fieldId: statusFieldId, singleSelectOptionId: "IFOPT_closed" })],
+ })
+ );
+ });
+
+ it("should error with actionable message for unknown field name", async () => {
+ const message = {
+ type: "set_issue_field",
+ issue_number: 42,
+ field_name: "Unknown Field",
+ value: "foo",
+ };
+
+ const result = await handler(message, {});
+
+ expect(result.success).toBe(false);
+ expect(result.error).toContain("not found");
+ expect(result.error).toContain("Available fields");
+ expect(result.error).toContain("field_node_id");
+ });
+
+ it("should error with actionable message for invalid single-select value", async () => {
+ const message = {
+ type: "set_issue_field",
+ issue_number: 42,
+ field_name: "Status",
+ value: "Invalid",
+ };
+
+ const result = await handler(message, {});
+
+ expect(result.success).toBe(false);
+ expect(result.error).toContain("Invalid value");
+ expect(result.error).toContain("Available options");
+ });
+
+ it("should resolve field type when field_node_id is provided", async () => {
+ const message = {
+ type: "set_issue_field",
+ issue_number: 42,
+ field_node_id: effortFieldId,
+ value: "3.5",
+ };
+
+ const result = await handler(message, {});
+
+ expect(result.success).toBe(true);
+ expect(result.field_node_id).toBe(effortFieldId);
+ expect(mockGraphql).toHaveBeenCalledWith(expect.stringContaining("repository(owner"), expect.anything());
+ expect(mockGraphql).toHaveBeenCalledWith(
+ expect.stringContaining("setIssueFieldValue"),
+ expect.objectContaining({
+ issueFields: [expect.objectContaining({ fieldId: effortFieldId, numberValue: 3.5 })],
+ })
+ );
+ });
+
+ it("should error when provided field_node_id is unknown", async () => {
+ const message = {
+ type: "set_issue_field",
+ issue_number: 42,
+ field_node_id: "IF_kwDO_missing",
+ value: "3.5",
+ };
+
+ const result = await handler(message, {});
+
+ expect(result.success).toBe(false);
+ expect(result.error).toContain("not found");
+ expect(result.error).toContain("Available fields");
+ });
+
+ it("should enforce configured allowed-fields list", async () => {
+ const { main } = require("./set_issue_field.cjs");
+ const restrictedHandler = await main({
+ allowed_fields: ["Status"],
+ });
+
+ const result = await restrictedHandler({
+ type: "set_issue_field",
+ issue_number: 42,
+ field_name: "Customer Impact",
+ value: "High",
+ });
+
+ expect(result.success).toBe(false);
+ expect(result.error).toContain('issue field "Customer Impact" is not in the allowed-fields list: Status');
+ });
+
+ it("should allow any field when allowed-fields includes wildcard", async () => {
+ const { main } = require("./set_issue_field.cjs");
+ const unrestrictedHandler = await main({
+ allowed_fields: ["*"],
+ });
+
+ const result = await unrestrictedHandler({
+ type: "set_issue_field",
+ issue_number: 42,
+ field_name: "Customer Impact",
+ value: "High",
+ });
+
+ expect(result.success).toBe(true);
+ });
+});
diff --git a/actions/setup/js/types/safe-outputs.d.ts b/actions/setup/js/types/safe-outputs.d.ts
index 4e102871dd..4666139bef 100644
--- a/actions/setup/js/types/safe-outputs.d.ts
+++ b/actions/setup/js/types/safe-outputs.d.ts
@@ -324,6 +324,21 @@ interface SetIssueTypeItem extends BaseSafeOutputItem {
issue_number?: number | string;
}
+/**
+ * JSONL item for setting a custom issue field value
+ */
+interface SetIssueFieldItem extends BaseSafeOutputItem {
+ type: "set_issue_field";
+ /** Issue field name to set (e.g., "Priority", "Severity"). */
+ field_name?: string;
+ /** Optional issue field GraphQL node ID to skip name-based discovery. */
+ field_node_id?: string;
+ /** Field value to set. For single-select fields, provide the option name. */
+ value: string;
+ /** Issue number (optional - uses triggering issue if not provided) */
+ issue_number?: number | string;
+}
+
/**
* JSONL item for assigning a GitHub Copilot coding agent to an issue or project item
*/
@@ -455,6 +470,7 @@ type SafeOutputItem =
| UploadAssetItem
| AssignMilestoneItem
| SetIssueTypeItem
+ | SetIssueFieldItem
| AssignToAgentItem
| UpdateReleaseItem
| NoOpItem
@@ -498,6 +514,7 @@ export {
UploadAssetItem,
AssignMilestoneItem,
SetIssueTypeItem,
+ SetIssueFieldItem,
AssignToAgentItem,
UpdateReleaseItem,
NoOpItem,
diff --git a/docs/src/content/docs/reference/glossary.md b/docs/src/content/docs/reference/glossary.md
index 5c4fadde79..027abd51bc 100644
--- a/docs/src/content/docs/reference/glossary.md
+++ b/docs/src/content/docs/reference/glossary.md
@@ -269,6 +269,10 @@ A mandatory safe output signal that agents emit when a task cannot be completed
A safe output capability for setting or clearing the GitHub issue type on existing issues. The agent calls `set_issue_type` to assign a named type (e.g., `Bug`, `Feature`) to an issue. An `allowed` list restricts which types the agent may set; omitting it permits any type. Passing an empty string clears the current type. Supports cross-repository targeting via `target-repo` and `allowed-repos`. Configured via `set-issue-type:` in `safe-outputs`.
+### Set Issue Field (`set-issue-field:`)
+
+A safe output capability for setting one issue field value on existing issues. The agent calls `set_issue_field` with `value` and either `field_name` (for discovery by field label) or `field_node_id` (to skip discovery). Unknown field names return actionable errors listing available fields and suggesting explicit IDs. Supports optional `allowed-fields` restrictions (including `["*"]` wildcard) and cross-repository targeting via `target-repo` and `allowed-repos`. Configured via `set-issue-field:` in `safe-outputs`.
+
### Parameterized Safe-Output Fields
A pattern for `workflow_call` reuse where safe-output policy and list fields accept GitHub Actions expression strings (e.g., `${{ inputs.protected-files-policy }}`) in addition to literal values. At compile time the compiler detects the `${{...}}` form and passes it through unchanged; GitHub Actions evaluates the expression at runtime before the handler executes. Enum-valued policy fields such as `protected-files` and `patch-format` validate literal values at compile time but defer expression-based values to runtime (failing closed on unrecognized input). List-valued fields such as `labels`, `allowed-repos`, and `allowed-base-branches` accept either a YAML array or a single expression string. This enables a single reusable workflow to serve callers with different constraint configurations without duplicating files. See [Safe Outputs (Pull Requests)](/gh-aw/reference/safe-outputs-pull-requests/#parameterizing-policy-fields-in-reusable-workflows).
diff --git a/docs/src/content/docs/reference/safe-outputs.md b/docs/src/content/docs/reference/safe-outputs.md
index 2bfd9afa72..f47dc7a41a 100644
--- a/docs/src/content/docs/reference/safe-outputs.md
+++ b/docs/src/content/docs/reference/safe-outputs.md
@@ -55,6 +55,7 @@ The agent requests issue creation; a separate job with `issues: write` creates i
- [**Assign to User**](#assign-to-user-assign-to-user) (`assign-to-user`) - Assign users to issues (max: 1)
- [**Unassign from User**](#unassign-from-user-unassign-from-user) (`unassign-from-user`) - Remove user assignments from issues or PRs (max: 1)
- [**Set Issue Type**](#set-issue-type-set-issue-type) (`set-issue-type`) - Set or clear the type of GitHub issues (max: 5)
+- [**Set Issue Field**](#set-issue-field-set-issue-field) (`set-issue-field`) - Set one issue field value by name/value (max: 5)
### Projects, Releases & Assets
@@ -436,6 +437,23 @@ safe-outputs:
Agent calls `set_issue_type` with `issue_type` (the type name) and optionally `issue_number`. Omitting `issue_number` targets the triggering issue.
+### Set Issue Field (`set-issue-field:`)
+
+Sets one issue field value by field name and value, without needing the broader `update-issue` tool path.
+
+```yaml wrap
+safe-outputs:
+ set-issue-field: # null enables with defaults
+ max: 5 # max operations (default: 5)
+ target: "triggering" # "triggering" (default), "*", or issue number
+ allowed-fields: [Priority, Iteration] # restrict issue fields this workflow may set
+ target-repo: "owner/repo" # cross-repository
+ allowed-repos: ["owner/repo1"] # additional allowed repositories
+ github-token: ${{ secrets.SOME_CUSTOM_TOKEN }}
+```
+
+Agent calls `set_issue_field` with `value`, and either `field_name` (preferred) or `field_node_id`. It can also pass `issue_number`; if omitted, the triggering issue is targeted.
+
### Project Creation (`create-project:`)
Creates new GitHub Projects V2 boards. Requires a write-capable PAT or GitHub App token ([project token authentication](/gh-aw/patterns/project-ops/#project-token-authentication)); default `GITHUB_TOKEN` lacks Projects v2 access. Supports optional view configuration to create custom project views at creation time.
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index e21733dd1e..19a0279e51 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -4761,7 +4761,7 @@
},
"safe-outputs": {
"type": "object",
- "$comment": "Required if workflow creates or modifies GitHub resources. Operations requiring safe-outputs: autofix-code-scanning-alert, add-comment, add-labels, add-reviewer, assign-milestone, assign-to-agent, assign-to-user, close-discussion, close-issue, close-pull-request, create-agent-session, create-agent-task (deprecated, use create-agent-session), create-code-scanning-alert, create-discussion, create-issue, create-project, create-project-status-update, create-pull-request, create-pull-request-review-comment, dispatch-workflow, hide-comment, link-sub-issue, mark-pull-request-as-ready-for-review, merge-pull-request, missing-data, missing-tool, noop, push-to-pull-request-branch, remove-labels, reply-to-pull-request-review-comment, resolve-pull-request-review-thread, set-issue-type, submit-pull-request-review, threat-detection, unassign-from-user, update-discussion, update-issue, update-project, update-pull-request, update-release, upload-artifact, upload-asset. See documentation for complete details.",
+ "$comment": "Required if workflow creates or modifies GitHub resources. Operations requiring safe-outputs: autofix-code-scanning-alert, add-comment, add-labels, add-reviewer, assign-milestone, assign-to-agent, assign-to-user, close-discussion, close-issue, close-pull-request, create-agent-session, create-agent-task (deprecated, use create-agent-session), create-code-scanning-alert, create-discussion, create-issue, create-project, create-project-status-update, create-pull-request, create-pull-request-review-comment, dispatch-workflow, hide-comment, link-sub-issue, mark-pull-request-as-ready-for-review, merge-pull-request, missing-data, missing-tool, noop, push-to-pull-request-branch, remove-labels, reply-to-pull-request-review-comment, resolve-pull-request-review-thread, set-issue-field, set-issue-type, submit-pull-request-review, threat-detection, unassign-from-user, update-discussion, update-issue, update-project, update-pull-request, update-release, upload-artifact, upload-asset. See documentation for complete details.",
"description": "Safe output processing configuration that automatically creates GitHub issues, comments, and pull requests from AI workflow output without requiring write permissions in the main job",
"examples": [
{
@@ -7747,6 +7747,67 @@
],
"description": "Enable AI agents to set or clear the type of GitHub issues. Use an empty string to clear the current type."
},
+ "set-issue-field": {
+ "oneOf": [
+ {
+ "type": "null",
+ "description": "Null configuration enables set-issue-field with defaults."
+ },
+ {
+ "type": "object",
+ "description": "Configuration for setting one issue field value by field name and value.",
+ "properties": {
+ "max": {
+ "description": "Optional maximum number of set-issue-field operations (default: 5). Supports integer or GitHub Actions expression (e.g. '${{ inputs.max }}').",
+ "oneOf": [
+ {
+ "type": "integer",
+ "minimum": 1
+ },
+ {
+ "type": "string",
+ "pattern": "^\\$\\{\\{.*\\}\\}$",
+ "description": "GitHub Actions expression that resolves to an integer at runtime"
+ }
+ ]
+ },
+ "target": {
+ "type": "string",
+ "description": "Target for issue field updates: 'triggering' (default), '*' (any issue), or explicit issue number"
+ },
+ "allowed-fields": {
+ "type": "array",
+ "description": "Optional list of issue field names that can be modified by set-issue-field. If omitted or empty, any issue field may be set. Use ['*'] to explicitly allow all.",
+ "items": {
+ "type": "string"
+ }
+ },
+ "target-repo": {
+ "type": "string",
+ "description": "Target repository in format 'owner/repo' for cross-repository issue field updates. Takes precedence over trial target repo settings."
+ },
+ "allowed-repos": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "List of additional repositories in format 'owner/repo' where issue fields can be updated. When specified, the agent can use a 'repo' field in the output to specify which repository to target. The target repository (current or target-repo) is always implicitly allowed."
+ },
+ "github-token": {
+ "$ref": "#/$defs/github_token",
+ "description": "GitHub token to use for this specific output type. Overrides global github-token if specified."
+ },
+ "staged": {
+ "type": "boolean",
+ "description": "If true, emit step summary messages instead of making GitHub API calls for this specific output type (preview mode)",
+ "examples": [true, false]
+ }
+ },
+ "additionalProperties": false
+ }
+ ],
+ "description": "Enable AI agents to set one issue field value by field name and value."
+ },
"dispatch-workflow": {
"oneOf": [
{
diff --git a/pkg/workflow/compiler_safe_outputs_config_test.go b/pkg/workflow/compiler_safe_outputs_config_test.go
index 453c96f03c..bad798f3db 100644
--- a/pkg/workflow/compiler_safe_outputs_config_test.go
+++ b/pkg/workflow/compiler_safe_outputs_config_test.go
@@ -667,6 +667,21 @@ func TestAddHandlerManagerConfigEnvVar(t *testing.T) {
checkJSON: true,
expectedKeys: []string{"set_issue_type"},
},
+ {
+ name: "set_issue_field config",
+ safeOutputs: &SafeOutputsConfig{
+ SetIssueField: &SetIssueFieldConfig{
+ BaseSafeOutputConfig: BaseSafeOutputConfig{
+ Max: strPtr("1"),
+ },
+ },
+ },
+ checkContains: []string{
+ "GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG",
+ },
+ checkJSON: true,
+ expectedKeys: []string{"set_issue_field"},
+ },
{
name: "noop config",
safeOutputs: &SafeOutputsConfig{
diff --git a/pkg/workflow/compiler_safe_outputs_handlers.go b/pkg/workflow/compiler_safe_outputs_handlers.go
index a62b0a7e7b..92dbb2ca74 100644
--- a/pkg/workflow/compiler_safe_outputs_handlers.go
+++ b/pkg/workflow/compiler_safe_outputs_handlers.go
@@ -839,4 +839,23 @@ var handlerRegistry = map[string]handlerBuilder{
}
return config
},
+ "set_issue_field": func(cfg *SafeOutputsConfig) map[string]any {
+ if cfg.SetIssueField == nil {
+ return nil
+ }
+ c := cfg.SetIssueField
+ config := newHandlerConfigBuilder().
+ AddTemplatableInt("max", c.Max).
+ AddStringSlice("allowed_fields", c.AllowedFields).
+ AddIfNotEmpty("target", c.Target).
+ AddIfNotEmpty("target-repo", c.TargetRepoSlug).
+ AddStringSlice("allowed_repos", c.AllowedRepos).
+ AddIfNotEmpty("github-token", c.GitHubToken).
+ AddIfTrue("staged", c.Staged).
+ Build()
+ if len(config) == 0 {
+ return make(map[string]any)
+ }
+ return config
+ },
}
diff --git a/pkg/workflow/compiler_safe_outputs_job.go b/pkg/workflow/compiler_safe_outputs_job.go
index ea60e4ad31..c5cbaac233 100644
--- a/pkg/workflow/compiler_safe_outputs_job.go
+++ b/pkg/workflow/compiler_safe_outputs_job.go
@@ -196,6 +196,7 @@ func (c *Compiler) buildSafeOutputsHandlerOutputsAndActionSteps(data *WorkflowDa
data.SafeOutputs.MarkPullRequestAsReadyForReview != nil ||
data.SafeOutputs.HideComment != nil ||
data.SafeOutputs.SetIssueType != nil ||
+ data.SafeOutputs.SetIssueField != nil ||
data.SafeOutputs.DispatchWorkflow != nil ||
data.SafeOutputs.CallWorkflow != nil ||
data.SafeOutputs.CreateCodeScanningAlerts != nil ||
diff --git a/pkg/workflow/compiler_safe_outputs_job_test.go b/pkg/workflow/compiler_safe_outputs_job_test.go
index be6d95ca57..9db27dbc1f 100644
--- a/pkg/workflow/compiler_safe_outputs_job_test.go
+++ b/pkg/workflow/compiler_safe_outputs_job_test.go
@@ -59,6 +59,15 @@ func TestBuildConsolidatedSafeOutputsJob(t *testing.T) {
checkPermissions: true,
expectedPerms: []string{"contents: read", "issues: write", "discussions: write"},
},
+ {
+ name: "set issue field only",
+ safeOutputs: &SafeOutputsConfig{
+ SetIssueField: &SetIssueFieldConfig{},
+ },
+ expectedJobName: "safe_outputs",
+ checkPermissions: true,
+ expectedPerms: []string{"contents: read", "issues: write"},
+ },
{
name: "create pull requests with patch",
safeOutputs: &SafeOutputsConfig{
diff --git a/pkg/workflow/compiler_types.go b/pkg/workflow/compiler_types.go
index d792d9760e..74c58ddee5 100644
--- a/pkg/workflow/compiler_types.go
+++ b/pkg/workflow/compiler_types.go
@@ -602,6 +602,7 @@ type SafeOutputsConfig struct {
LinkSubIssue *LinkSubIssueConfig `yaml:"link-sub-issue,omitempty"` // Link issues as sub-issues
HideComment *HideCommentConfig `yaml:"hide-comment,omitempty"` // Hide comments
SetIssueType *SetIssueTypeConfig `yaml:"set-issue-type,omitempty"` // Set the type of an issue (empty string clears the type)
+ SetIssueField *SetIssueFieldConfig `yaml:"set-issue-field,omitempty"` // Set a single issue field value by name/value
DispatchWorkflow *DispatchWorkflowConfig `yaml:"dispatch-workflow,omitempty"` // Dispatch workflow_dispatch events to other workflows
DispatchRepository *DispatchRepositoryConfig `yaml:"dispatch_repository,omitempty"` // Dispatch repository_dispatch events to external repositories
CallWorkflow *CallWorkflowConfig `yaml:"call-workflow,omitempty"` // Call reusable workflows via workflow_call fan-out
diff --git a/pkg/workflow/imports.go b/pkg/workflow/imports.go
index 8042aa8da2..8a99311ac3 100644
--- a/pkg/workflow/imports.go
+++ b/pkg/workflow/imports.go
@@ -398,6 +398,8 @@ func hasSafeOutputType(config *SafeOutputsConfig, key string) bool {
return config.HideComment != nil
case "set-issue-type":
return config.SetIssueType != nil
+ case "set-issue-field":
+ return config.SetIssueField != nil
case "dispatch-workflow":
return config.DispatchWorkflow != nil
case "call-workflow":
@@ -552,6 +554,9 @@ func mergeSafeOutputConfig(result *SafeOutputsConfig, config map[string]any, c *
if result.SetIssueType == nil && importedConfig.SetIssueType != nil {
result.SetIssueType = importedConfig.SetIssueType
}
+ if result.SetIssueField == nil && importedConfig.SetIssueField != nil {
+ result.SetIssueField = importedConfig.SetIssueField
+ }
if result.DispatchWorkflow == nil && importedConfig.DispatchWorkflow != nil {
result.DispatchWorkflow = importedConfig.DispatchWorkflow
}
diff --git a/pkg/workflow/js/safe_outputs_tools.json b/pkg/workflow/js/safe_outputs_tools.json
index 9836f6f5be..ea91de1d0a 100644
--- a/pkg/workflow/js/safe_outputs_tools.json
+++ b/pkg/workflow/js/safe_outputs_tools.json
@@ -1285,6 +1285,46 @@
"additionalProperties": false
}
},
+ {
+ "name": "set_issue_field",
+ "description": "Set a single GitHub issue field by name and value. Use field_name for discovery by field label (for example, \"Priority\"), or provide field_node_id to skip discovery. Supports text, number, date (YYYY-MM-DD), and single-select fields (value must match an option name).",
+ "inputSchema": {
+ "type": "object",
+ "required": [
+ "value"
+ ],
+ "properties": {
+ "issue_number": {
+ "type": [
+ "number",
+ "string"
+ ],
+ "description": "Issue number to set the field on. If omitted, targets the issue that triggered this workflow."
+ },
+ "field_name": {
+ "type": "string",
+ "description": "Issue field name to set (e.g., \"Priority\", \"Severity\", \"Customer Impact\")."
+ },
+ "field_node_id": {
+ "type": "string",
+ "description": "Optional GraphQL node ID of the issue field. Provide this to skip field-name discovery and set a field directly."
+ },
+ "value": {
+ "type": "string",
+ "description": "Field value to set. For single-select fields, this must match an existing option name. For date fields, use YYYY-MM-DD."
+ },
+ "secrecy": {
+ "type": "string",
+ "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\")."
+ },
+ "integrity": {
+ "type": "string",
+ "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\")."
+ }
+ },
+ "additionalProperties": false
+ }
+ },
{
"name": "update_project",
"description": "Manage GitHub Projects: add issues/pull requests/draft issues, update item fields (status, priority, effort, dates), manage custom fields, and create project views. Use this to organize work by adding items to projects, updating field values, creating custom fields up-front, and setting up project views (table, board, roadmap).\n\nThree modes: (1) Add or update project items with custom field values; (2) Create project fields; (3) Create project views. This is the primary tool for ProjectOps automation - add items to projects, set custom fields for tracking, and organize project boards.",
diff --git a/pkg/workflow/safe_output_validation_config_test.go b/pkg/workflow/safe_output_validation_config_test.go
index 847f579902..21bec258c6 100644
--- a/pkg/workflow/safe_output_validation_config_test.go
+++ b/pkg/workflow/safe_output_validation_config_test.go
@@ -211,6 +211,7 @@ func TestValidationConfigConsistency(t *testing.T) {
"requiresOneOf:title,body,update_branch": true,
"requiresOneOf:title,body,labels": true,
"requiresOneOf:issue_number,pull_number": true,
+ "requiresOneOf:field_name,field_node_id": true,
"requiresOneOf:reviewers,team_reviewers": true,
"startLineLessOrEqualLine": true,
"parentAndSubDifferent": true,
diff --git a/pkg/workflow/safe_outputs_config.go b/pkg/workflow/safe_outputs_config.go
index b825bc86aa..7486e50ef3 100644
--- a/pkg/workflow/safe_outputs_config.go
+++ b/pkg/workflow/safe_outputs_config.go
@@ -307,6 +307,12 @@ func (c *Compiler) extractSafeOutputsConfig(frontmatter map[string]any) *SafeOut
config.SetIssueType = setIssueTypeConfig
}
+ // Handle set-issue-field
+ setIssueFieldConfig := c.parseSetIssueFieldConfig(outputMap)
+ if setIssueFieldConfig != nil {
+ config.SetIssueField = setIssueFieldConfig
+ }
+
// Handle dispatch-workflow
dispatchWorkflowConfig := c.parseDispatchWorkflowConfig(outputMap)
if dispatchWorkflowConfig != nil {
diff --git a/pkg/workflow/safe_outputs_max_validation.go b/pkg/workflow/safe_outputs_max_validation.go
index 3a0d705e35..4185e3e3fc 100644
--- a/pkg/workflow/safe_outputs_max_validation.go
+++ b/pkg/workflow/safe_outputs_max_validation.go
@@ -224,6 +224,11 @@ func validateSafeOutputsMax(config *SafeOutputsConfig) error {
return err
}
}
+ if config.SetIssueField != nil {
+ if err := checkMaxField("set_issue_field", config.SetIssueField.Max); err != nil {
+ return err
+ }
+ }
if config.SubmitPullRequestReview != nil {
if err := checkMaxField("submit_pull_request_review", config.SubmitPullRequestReview.Max); err != nil {
return err
diff --git a/pkg/workflow/safe_outputs_permissions.go b/pkg/workflow/safe_outputs_permissions.go
index 9aee847dc4..20435d569c 100644
--- a/pkg/workflow/safe_outputs_permissions.go
+++ b/pkg/workflow/safe_outputs_permissions.go
@@ -264,6 +264,10 @@ func ComputePermissionsForSafeOutputs(safeOutputs *SafeOutputsConfig) *Permissio
safeOutputsPermissionsLog.Print("Adding permissions for set-issue-type")
permissions.Merge(NewPermissionsContentsReadIssuesWrite())
}
+ if safeOutputs.SetIssueField != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.SetIssueField.Staged) {
+ safeOutputsPermissionsLog.Print("Adding permissions for set-issue-field")
+ permissions.Merge(NewPermissionsContentsReadIssuesWrite())
+ }
if safeOutputs.AddReviewer != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.AddReviewer.Staged) {
safeOutputsPermissionsLog.Print("Adding permissions for add-reviewer")
permissions.Merge(NewPermissionsContentsReadPRWrite())
diff --git a/pkg/workflow/safe_outputs_state.go b/pkg/workflow/safe_outputs_state.go
index 0f9d827502..456e65b385 100644
--- a/pkg/workflow/safe_outputs_state.go
+++ b/pkg/workflow/safe_outputs_state.go
@@ -65,6 +65,7 @@ var safeOutputFieldMapping = map[string]string{
"MissingTool": "missing_tool",
"MissingData": "missing_data",
"SetIssueType": "set_issue_type",
+ "SetIssueField": "set_issue_field",
"NoOp": "noop",
"MarkPullRequestAsReadyForReview": "mark_pull_request_as_ready_for_review",
}
@@ -129,6 +130,7 @@ func hasAnySafeOutputEnabled(safeOutputs *SafeOutputsConfig) bool {
safeOutputs.MissingTool != nil ||
safeOutputs.MissingData != nil ||
safeOutputs.SetIssueType != nil ||
+ safeOutputs.SetIssueField != nil ||
safeOutputs.NoOp != nil // 43rd field
}
@@ -190,7 +192,8 @@ func hasNonBuiltinSafeOutputsEnabled(safeOutputs *SafeOutputsConfig) bool {
safeOutputs.DispatchWorkflow != nil ||
safeOutputs.DispatchRepository != nil ||
safeOutputs.CallWorkflow != nil ||
- safeOutputs.SetIssueType != nil // 40th non-builtin field
+ safeOutputs.SetIssueType != nil ||
+ safeOutputs.SetIssueField != nil // non-builtin safe output field
}
// HasSafeOutputsEnabled checks if any safe-outputs are enabled
diff --git a/pkg/workflow/safe_outputs_tools_computation.go b/pkg/workflow/safe_outputs_tools_computation.go
index 2b477f0792..fbe66c9d50 100644
--- a/pkg/workflow/safe_outputs_tools_computation.go
+++ b/pkg/workflow/safe_outputs_tools_computation.go
@@ -119,6 +119,9 @@ func computeEnabledToolNames(data *WorkflowData) map[string]bool {
if data.SafeOutputs.SetIssueType != nil {
enabledTools["set_issue_type"] = true
}
+ if data.SafeOutputs.SetIssueField != nil {
+ enabledTools["set_issue_field"] = true
+ }
if data.SafeOutputs.UpdateProjects != nil {
enabledTools["update_project"] = true
}
diff --git a/pkg/workflow/safe_outputs_tools_repo_params.go b/pkg/workflow/safe_outputs_tools_repo_params.go
index 787b213895..712a3fe68b 100644
--- a/pkg/workflow/safe_outputs_tools_repo_params.go
+++ b/pkg/workflow/safe_outputs_tools_repo_params.go
@@ -76,7 +76,7 @@ func addRepoParameterIfNeeded(tool map[string]any, toolName string, safeOutputs
}
case "add_labels", "remove_labels", "hide_comment", "link_sub_issue", "mark_pull_request_as_ready_for_review",
"add_reviewer", "assign_milestone", "assign_to_agent", "assign_to_user", "unassign_from_user",
- "set_issue_type":
+ "set_issue_type", "set_issue_field":
// These use SafeOutputTargetConfig - check the appropriate config
switch toolName {
case "add_labels":
@@ -134,6 +134,11 @@ func addRepoParameterIfNeeded(tool map[string]any, toolName string, safeOutputs
hasAllowedRepos = len(config.AllowedRepos) > 0
targetRepoSlug = config.TargetRepoSlug
}
+ case "set_issue_field":
+ if config := safeOutputs.SetIssueField; config != nil {
+ hasAllowedRepos = len(config.AllowedRepos) > 0
+ targetRepoSlug = config.TargetRepoSlug
+ }
}
}
diff --git a/pkg/workflow/safe_outputs_validation_config.go b/pkg/workflow/safe_outputs_validation_config.go
index 8dad93d78a..3de340069a 100644
--- a/pkg/workflow/safe_outputs_validation_config.go
+++ b/pkg/workflow/safe_outputs_validation_config.go
@@ -127,6 +127,17 @@ var ValidationConfig = map[string]TypeValidationConfig{
"repo": {Type: "string", MaxLength: 256}, // Optional: target repository in format "owner/repo"
},
},
+ "set_issue_field": {
+ DefaultMax: 5,
+ CustomValidation: "requiresOneOf:field_name,field_node_id",
+ Fields: map[string]FieldValidation{
+ "issue_number": {IssueOrPRNumber: true},
+ "field_name": {Type: "string", Sanitize: true, MaxLength: 128},
+ "field_node_id": {Type: "string", MaxLength: 256},
+ "value": {Required: true, Type: "string", Sanitize: true, MaxLength: 256},
+ "repo": {Type: "string", MaxLength: 256}, // Optional: target repository in format "owner/repo"
+ },
+ },
"assign_to_agent": {
DefaultMax: 1,
CustomValidation: "requiresOneOf:issue_number,pull_number",
diff --git a/pkg/workflow/set_issue_field.go b/pkg/workflow/set_issue_field.go
new file mode 100644
index 0000000000..9ca4bbb565
--- /dev/null
+++ b/pkg/workflow/set_issue_field.go
@@ -0,0 +1,24 @@
+package workflow
+
+import "github.com/github/gh-aw/pkg/logger"
+
+var setIssueFieldLog = logger.New("workflow:set_issue_field")
+
+// SetIssueFieldConfig holds configuration for setting a single issue field from agent output.
+type SetIssueFieldConfig struct {
+ BaseSafeOutputConfig `yaml:",inline"`
+ SafeOutputTargetConfig `yaml:",inline"`
+ AllowedFields []string `yaml:"allowed-fields,omitempty"` // Optional list of allowed issue field names. If omitted or empty, any field is allowed. Use ["*"] to explicitly allow all.
+}
+
+// parseSetIssueFieldConfig handles set-issue-field configuration.
+func (c *Compiler) parseSetIssueFieldConfig(outputMap map[string]any) *SetIssueFieldConfig {
+ config := parseConfigScaffold(outputMap, "set-issue-field", setIssueFieldLog, func(err error) *SetIssueFieldConfig {
+ setIssueFieldLog.Printf("Failed to unmarshal set-issue-field config, disabling handler: %v", err)
+ return nil
+ })
+ if config != nil {
+ setIssueFieldLog.Printf("Parsed configuration: target=%s", config.Target)
+ }
+ return config
+}
diff --git a/pkg/workflow/set_issue_field_handler_config_test.go b/pkg/workflow/set_issue_field_handler_config_test.go
new file mode 100644
index 0000000000..3297a37d95
--- /dev/null
+++ b/pkg/workflow/set_issue_field_handler_config_test.go
@@ -0,0 +1,84 @@
+//go:build !integration
+
+package workflow
+
+import (
+ "encoding/json"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/github/gh-aw/pkg/testutil"
+)
+
+func TestSetIssueFieldHandlerConfigIncludesAllowedFields(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "set-issue-field-handler-config-test")
+
+ testContent := `---
+name: Test Set Issue Field Handler Config
+on: workflow_dispatch
+permissions:
+ contents: read
+engine: copilot
+safe-outputs:
+ set-issue-field:
+ max: 2
+ allowed-fields: [Priority, Iteration]
+---
+
+Set issue field values.
+`
+
+ testFile := filepath.Join(tmpDir, "test-set-issue-field-handler-config.md")
+ if err := os.WriteFile(testFile, []byte(testContent), 0644); err != nil {
+ t.Fatal(err)
+ }
+
+ compiler := NewCompiler()
+ if err := compiler.CompileWorkflow(testFile); err != nil {
+ t.Fatalf("failed to compile workflow: %v", err)
+ }
+
+ outputFile := filepath.Join(tmpDir, "test-set-issue-field-handler-config.lock.yml")
+ compiledContent, err := os.ReadFile(outputFile)
+ if err != nil {
+ t.Fatalf("failed to read compiled output: %v", err)
+ }
+
+ lines := strings.Split(string(compiledContent), "\n")
+ var configJSON string
+ for _, line := range lines {
+ if strings.Contains(line, "GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG:") {
+ parts := strings.SplitN(line, "GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG:", 2)
+ if len(parts) == 2 {
+ configJSON = strings.TrimSpace(parts[1])
+ configJSON = strings.Trim(configJSON, "\"")
+ configJSON = strings.ReplaceAll(configJSON, "\\\"", "\"")
+ break
+ }
+ }
+ }
+
+ if configJSON == "" {
+ t.Fatal("could not extract handler config JSON")
+ }
+
+ var config map[string]any
+ if err := json.Unmarshal([]byte(configJSON), &config); err != nil {
+ t.Fatalf("failed to parse handler config JSON: %v\njson: %s", err, configJSON)
+ }
+
+ setIssueFieldConfig, ok := config["set_issue_field"].(map[string]any)
+ if !ok {
+ t.Fatal("expected set_issue_field in handler config")
+ }
+
+ allowedFields, ok := setIssueFieldConfig["allowed_fields"].([]any)
+ if !ok {
+ t.Fatal("expected allowed_fields array in set_issue_field config")
+ }
+ if len(allowedFields) != 2 || allowedFields[0] != "Priority" || allowedFields[1] != "Iteration" {
+ t.Fatalf("expected allowed_fields=[Priority, Iteration], got: %v", allowedFields)
+ }
+}
diff --git a/pkg/workflow/tool_description_enhancer.go b/pkg/workflow/tool_description_enhancer.go
index dfae02423c..01902c4436 100644
--- a/pkg/workflow/tool_description_enhancer.go
+++ b/pkg/workflow/tool_description_enhancer.go
@@ -23,6 +23,17 @@ func formatStringList(items []string) string {
return "[" + strings.Join(quoted, " ") + "]"
}
+func appendAllowedIssueFieldsConstraint(constraints *[]string, allowedFields []string) {
+ if len(allowedFields) == 0 {
+ return
+ }
+ if slices.Contains(allowedFields, "*") {
+ *constraints = append(*constraints, "Any issue field is allowed.")
+ return
+ }
+ *constraints = append(*constraints, fmt.Sprintf("Only these issue fields are allowed: %s.", formatStringList(allowedFields)))
+}
+
// enhanceToolDescription adds configuration-specific constraints to tool descriptions
// This provides agents with context about limits and restrictions configured in the workflow
func enhanceToolDescription(toolName, baseDescription string, safeOutputs *SafeOutputsConfig) string {
@@ -50,13 +61,7 @@ func enhanceToolDescription(toolName, baseDescription string, safeOutputs *SafeO
if len(config.AllowedLabels) > 0 {
constraints = append(constraints, fmt.Sprintf("Only these labels are allowed: %s.", formatStringList(config.AllowedLabels)))
}
- if len(config.AllowedFields) > 0 {
- if slices.Contains(config.AllowedFields, "*") {
- constraints = append(constraints, "Any issue field is allowed.")
- } else {
- constraints = append(constraints, fmt.Sprintf("Only these issue fields are allowed: %s.", formatStringList(config.AllowedFields)))
- }
- }
+ appendAllowedIssueFieldsConstraint(&constraints, config.AllowedFields)
if len(config.Assignees) > 0 {
constraints = append(constraints, fmt.Sprintf("Assignees %s will be automatically assigned.", formatStringList(config.Assignees)))
}
@@ -65,6 +70,17 @@ func enhanceToolDescription(toolName, baseDescription string, safeOutputs *SafeO
}
}
+ case "set_issue_field":
+ if config := safeOutputs.SetIssueField; config != nil {
+ if templatableIntValue(config.Max) > 0 {
+ constraints = append(constraints, fmt.Sprintf("Maximum %d issue field update(s) can be made.", templatableIntValue(config.Max)))
+ }
+ appendAllowedIssueFieldsConstraint(&constraints, config.AllowedFields)
+ if config.TargetRepoSlug != "" {
+ constraints = append(constraints, fmt.Sprintf("Issue fields will be updated in repository %q.", config.TargetRepoSlug))
+ }
+ }
+
case "create_agent_session":
if config := safeOutputs.CreateAgentSessions; config != nil {
if templatableIntValue(config.Max) > 0 {
diff --git a/pkg/workflow/tool_description_enhancer_test.go b/pkg/workflow/tool_description_enhancer_test.go
index 3e9f2cb23e..53736a38a8 100644
--- a/pkg/workflow/tool_description_enhancer_test.go
+++ b/pkg/workflow/tool_description_enhancer_test.go
@@ -33,3 +33,30 @@ func TestEnhanceToolDescriptionCreateIssueAllowedFieldsList(t *testing.T) {
t.Fatalf("expected restrictive fields message in description, got: %s", description)
}
}
+
+func TestEnhanceToolDescriptionSetIssueFieldAllowedFieldsWildcard(t *testing.T) {
+ description := enhanceToolDescription("set_issue_field", "Set one issue field.", &SafeOutputsConfig{
+ SetIssueField: &SetIssueFieldConfig{
+ AllowedFields: []string{"*"},
+ },
+ })
+
+ if !strings.Contains(description, "Any issue field is allowed.") {
+ t.Fatalf("expected wildcard message in description, got: %s", description)
+ }
+ if strings.Contains(description, "Only these issue fields are allowed") {
+ t.Fatalf("did not expect restrictive fields message for wildcard, got: %s", description)
+ }
+}
+
+func TestEnhanceToolDescriptionSetIssueFieldAllowedFieldsList(t *testing.T) {
+ description := enhanceToolDescription("set_issue_field", "Set one issue field.", &SafeOutputsConfig{
+ SetIssueField: &SetIssueFieldConfig{
+ AllowedFields: []string{"Priority", "Iteration"},
+ },
+ })
+
+ if !strings.Contains(description, "Only these issue fields are allowed: [\"Priority\" \"Iteration\"].") {
+ t.Fatalf("expected restrictive fields message in description, got: %s", description)
+ }
+}
diff --git a/pkg/workflow/unified_prompt_step.go b/pkg/workflow/unified_prompt_step.go
index 55a916c2bd..57ac39c7cd 100644
--- a/pkg/workflow/unified_prompt_step.go
+++ b/pkg/workflow/unified_prompt_step.go
@@ -649,6 +649,9 @@ func buildSafeOutputsSections(safeOutputs *SafeOutputsConfig) []PromptSection {
if safeOutputs.SetIssueType != nil {
tools = append(tools, toolWithMaxBudget("set_issue_type", safeOutputs.SetIssueType.Max))
}
+ if safeOutputs.SetIssueField != nil {
+ tools = append(tools, toolWithMaxBudget("set_issue_field", safeOutputs.SetIssueField.Max))
+ }
if safeOutputs.DispatchWorkflow != nil {
tools = append(tools, toolWithMaxBudget("dispatch_workflow", safeOutputs.DispatchWorkflow.Max))
}