From 7160ff82c4ac8a2d39b675f84da2752f73e0ff5b Mon Sep 17 00:00:00 2001 From: Geoff Hutchison Date: Wed, 18 Mar 2026 14:39:17 -0400 Subject: [PATCH 1/6] Add a script to parse the repositories and suggest update PRs Signed-off-by: Geoff Hutchison --- .github/scripts/parse_plugins.py | 297 +++++++++++++++++++++ .github/workflows/check-plugin-updates.yml | 196 ++++++++++++++ 2 files changed, 493 insertions(+) create mode 100644 .github/scripts/parse_plugins.py create mode 100644 .github/workflows/check-plugin-updates.yml diff --git a/.github/scripts/parse_plugins.py b/.github/scripts/parse_plugins.py new file mode 100644 index 0000000..f67593d --- /dev/null +++ b/.github/scripts/parse_plugins.py @@ -0,0 +1,297 @@ +#!/usr/bin/env python3 +"""Parse repositories.toml and provide plugin information for GitHub Actions.""" + +import argparse +import json +import subprocess +import sys + +# Python 3.11+ has tomllib in stdlib; fall back to tomli +try: + import tomllib +except ImportError: + try: + import tomli as tomllib + except ImportError: + print("ERROR: Need Python 3.11+ or 'pip install tomli'", file=sys.stderr) + sys.exit(1) + + +TOML_PATH = "repositories.toml" + +# Keys that are file-level guidance comments, not plugin entries +NON_PLUGIN_KEYS: set[str] = set() + + +def load_toml(path: str) -> dict: + with open(path, "rb") as f: + return tomllib.load(f) + + +def extract_plugins(data: dict) -> dict: + """Return only the plugin tables from the parsed TOML.""" + plugins = {} + for key, value in data.items(): + if isinstance(value, dict) and key not in NON_PLUGIN_KEYS: + plugins[key] = value + return plugins + + +def cmd_list(): + """Print all plugins as a JSON array.""" + data = load_toml(TOML_PATH) + plugins = extract_plugins(data) + result = [] + for name, info in plugins.items(): + entry = {"name": name} + git = info.get("git", {}) + if git: + entry["repo"] = git.get("repo", "") + entry["commit"] = git.get("commit", "") + src = info.get("src", {}) + if src: + entry["src_url"] = src.get("url", "") + entry["src_sha256"] = src.get("sha256", "") + entry["path"] = info.get("path", ".") + entry["release_tag"] = info.get("release-tag", "") + entry["plugin_type"] = info.get("plugin-type", "pypkg") + entry["metadata"] = info.get("metadata", "pyproject.toml") + result.append(entry) + print(json.dumps(result, indent=2)) + + +def cmd_check_updates(): + """ + For each git-based plugin, check if the upstream default branch + has moved past the pinned commit. Output JSON with update info. + """ + data = load_toml(TOML_PATH) + plugins = extract_plugins(data) + updates = [] + + for name, info in plugins.items(): + git = info.get("git", {}) + repo_url = git.get("repo", "") + pinned_commit = git.get("commit", "") + + if not repo_url or not pinned_commit: + continue + + # Use git ls-remote to get the current HEAD of the default branch + try: + result = subprocess.run( + ["git", "ls-remote", repo_url, "HEAD"], + capture_output=True, + text=True, + timeout=30, + ) + if result.returncode != 0: + print( + f"WARNING: Could not reach {repo_url}: {result.stderr.strip()}", + file=sys.stderr, + ) + continue + + lines = result.stdout.strip().split("\n") + if not lines or not lines[0]: + continue + + remote_head = lines[0].split()[0] + + if remote_head != pinned_commit: + updates.append( + { + "name": name, + "repo": repo_url, + "pinned_commit": pinned_commit, + "latest_commit": remote_head, + "release_tag": info.get("release-tag", ""), + } + ) + except subprocess.TimeoutExpired: + print(f"WARNING: Timeout reaching {repo_url}", file=sys.stderr) + except Exception as e: + print(f"WARNING: Error checking {repo_url}: {e}", file=sys.stderr) + + print(json.dumps(updates, indent=2)) + + +def cmd_diff(base_path: str, head_path: str): + """ + Compare two versions of repositories.toml. + Output JSON describing added, removed, and modified plugins. + """ + base_data = extract_plugins(load_toml(base_path)) + head_data = extract_plugins(load_toml(head_path)) + + base_names = set(base_data.keys()) + head_names = set(head_data.keys()) + + added = [] + removed = [] + modified = [] + + for name in head_names - base_names: + info = head_data[name] + git = info.get("git", {}) + added.append( + { + "name": name, + "repo": git.get("repo", ""), + "commit": git.get("commit", ""), + "path": info.get("path", "."), + "plugin_type": info.get("plugin-type", "pypkg"), + } + ) + + for name in base_names - head_names: + removed.append({"name": name}) + + for name in base_names & head_names: + if base_data[name] != head_data[name]: + old_git = base_data[name].get("git", {}) + new_git = head_data[name].get("git", {}) + modified.append( + { + "name": name, + "repo": new_git.get("repo", old_git.get("repo", "")), + "old_commit": old_git.get("commit", ""), + "new_commit": new_git.get("commit", ""), + "path": head_data[name].get("path", "."), + "plugin_type": head_data[name].get("plugin-type", "pypkg"), + "changed_fields": [ + k + for k in set( + list(base_data[name].keys()) + list(head_data[name].keys()) + ) + if base_data[name].get(k) != head_data[name].get(k) + ], + } + ) + + result = { + "added": added, + "removed": removed, + "modified": modified, + "total_changes": len(added) + len(removed) + len(modified), + } + print(json.dumps(result, indent=2)) + + +def cmd_validate(path: str): + """Validate the structure of a repositories.toml file.""" + errors = [] + warnings = [] + + try: + data = load_toml(path) + except Exception as e: + errors.append(f"Failed to parse TOML: {e}") + print(json.dumps({"valid": False, "errors": errors, "warnings": warnings})) + return + + plugins = extract_plugins(data) + + for name, info in plugins.items(): + prefix = f"[{name}]" + git = info.get("git", {}) + src = info.get("src", {}) + + # Must have exactly one of git or src + has_git = bool(git) + has_src = bool(src) + if not has_git and not has_src: + errors.append(f"{prefix}: Must have either 'git' or 'src' section") + elif has_git and has_src: + errors.append(f"{prefix}: Cannot have both 'git' and 'src' sections") + + if has_git: + if not git.get("repo"): + errors.append(f"{prefix}: Missing git.repo") + elif not git["repo"].endswith(".git"): + warnings.append(f"{prefix}: git.repo should end with '.git'") + + commit = git.get("commit", "") + if not commit: + errors.append(f"{prefix}: Missing git.commit") + elif len(commit) != 40: + errors.append( + f"{prefix}: git.commit should be a full 40-char SHA, " + f"got {len(commit)} chars" + ) + + if has_src: + if not src.get("url"): + errors.append(f"{prefix}: Missing src.url") + if not src.get("sha256"): + errors.append(f"{prefix}: Missing src.sha256") + + # Validate optional fields + plugin_type = info.get("plugin-type", "pypkg") + if plugin_type not in ("pypkg", "pyscript"): + errors.append( + f"{prefix}: plugin-type must be 'pypkg' or 'pyscript', " + f"got '{plugin_type}'" + ) + + metadata = info.get("metadata", "pyproject.toml") + if metadata not in ("pyproject.toml", "avogadro.toml"): + errors.append( + f"{prefix}: metadata must be 'pyproject.toml' or 'avogadro.toml', " + f"got '{metadata}'" + ) + + path_val = info.get("path", ".") + if "\\" in path_val: + errors.append(f"{prefix}: path should use '/' separators, not '\\'") + + result = { + "valid": len(errors) == 0, + "errors": errors, + "warnings": warnings, + "plugin_count": len(plugins), + } + print(json.dumps(result, indent=2)) + + +def main(): + parser = argparse.ArgumentParser( + description="Parse repositories.toml and provide plugin information " + "for GitHub Actions." + ) + subparsers = parser.add_subparsers(dest="command", required=True) + + subparsers.add_parser("list", help="List all plugins as JSON") + + subparsers.add_parser( + "check-updates", + help="Check for upstream updates (new commits on default branch)", + ) + + diff_parser = subparsers.add_parser( + "diff", help="Diff two versions of repositories.toml to find changes" + ) + diff_parser.add_argument("base_file", help="Base repositories.toml file") + diff_parser.add_argument("head_file", help="Head repositories.toml file") + + validate_parser = subparsers.add_parser( + "validate", help="Validate the TOML structure" + ) + validate_parser.add_argument( + "file", nargs="?", default=TOML_PATH, help="Path to TOML file to validate" + ) + + args = parser.parse_args() + + if args.command == "list": + cmd_list() + elif args.command == "check-updates": + cmd_check_updates() + elif args.command == "diff": + cmd_diff(args.base_file, args.head_file) + elif args.command == "validate": + cmd_validate(args.file) + + +if __name__ == "__main__": + main() diff --git a/.github/workflows/check-plugin-updates.yml b/.github/workflows/check-plugin-updates.yml new file mode 100644 index 0000000..e350d2d --- /dev/null +++ b/.github/workflows/check-plugin-updates.yml @@ -0,0 +1,196 @@ +# .github/workflows/check-plugin-updates.yml +# +# Runs daily (and on-demand). For each plugin in repositories.toml, +# checks whether the upstream repo has new commits past the pinned SHA. +# If updates are found, opens one PR per plugin with the new commit SHA. + +name: Check Plugin Updates + +on: + schedule: + # 06:00 UTC daily + - cron: "0 6 * * *" + workflow_dispatch: + inputs: + plugin_name: + description: "Check a specific plugin (leave blank for all)" + required: false + type: string + +permissions: + contents: write + pull-requests: write + +jobs: + check-updates: + runs-on: ubuntu-latest + outputs: + updates: ${{ steps.detect.outputs.updates }} + count: ${{ steps.detect.outputs.count }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 + with: + python-version: "3.12" + + - name: Detect upstream updates + id: detect + run: | + updates=$(python .github/scripts/parse_plugins.py check-updates) + echo "Raw updates: $updates" + + # If a specific plugin was requested, filter to just that one + if [ -n "${{ inputs.plugin_name }}" ]; then + updates=$(echo "$updates" | python3 -c " + import json, sys + data = json.load(sys.stdin) + filtered = [p for p in data if p['name'] == '${{ inputs.plugin_name }}'] + print(json.dumps(filtered)) + ") + fi + + count=$(echo "$updates" | python3 -c "import json,sys; print(len(json.load(sys.stdin)))") + echo "updates=$(echo "$updates" | jq -c .)" >> "$GITHUB_OUTPUT" + echo "count=$count" >> "$GITHUB_OUTPUT" + echo "### Plugin Update Check" >> "$GITHUB_STEP_SUMMARY" + echo "Found **$count** plugin(s) with upstream updates." >> "$GITHUB_STEP_SUMMARY" + + open-pr: + needs: check-updates + if: needs.check-updates.outputs.count != '0' + runs-on: ubuntu-latest + strategy: + # Process one plugin at a time to avoid branch conflicts + max-parallel: 1 + matrix: + plugin: ${{ fromJson(needs.check-updates.outputs.updates) }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 + with: + python-version: "3.12" + + - name: Check if PR already exists + id: check-pr + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + branch="update/${{ matrix.plugin.name }}" + existing=$(gh pr list --head "$branch" --state open --json number -q '.[0].number // empty') + if [ -n "$existing" ]; then + echo "skip=true" >> "$GITHUB_OUTPUT" + echo "PR #$existing already open for ${{ matrix.plugin.name }}, skipping." + else + echo "skip=false" >> "$GITHUB_OUTPUT" + fi + + - name: Gather commit info + if: steps.check-pr.outputs.skip == 'false' + id: commit-info + run: | + repo_url="${{ matrix.plugin.repo }}" + latest="${{ matrix.plugin.latest_commit }}" + + # Try to get the commit message for the PR body + # Convert git URL to GitHub API URL + api_url=$(echo "$repo_url" | sed 's|\.git$||' | sed 's|github.com|api.github.com/repos|') + commit_msg=$(curl -sf "${api_url}/commits/${latest}" \ + | python3 -c "import json,sys; d=json.load(sys.stdin); print(d['commit']['message'][:500])" \ + 2>/dev/null || echo "(could not fetch commit message)") + + # Check for a release tag + release_info="" + latest_tag=$(curl -sf "${api_url}/tags?per_page=5" \ + | python3 -c " + import json, sys + tags = json.load(sys.stdin) + for t in tags: + if t['commit']['sha'] == '${latest}': + print(t['name']) + break + " 2>/dev/null || true) + + echo "commit_msg<> "$GITHUB_OUTPUT" + echo "$commit_msg" >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + echo "latest_tag=$latest_tag" >> "$GITHUB_OUTPUT" + + - name: Update repositories.toml + if: steps.check-pr.outputs.skip == 'false' + run: | + plugin="${{ matrix.plugin.name }}" + old_commit="${{ matrix.plugin.pinned_commit }}" + new_commit="${{ matrix.plugin.latest_commit }}" + latest_tag="${{ steps.commit-info.outputs.latest_tag }}" + + # Replace the commit SHA + sed -i "s|$old_commit|$new_commit|" repositories.toml + + # If we found a release tag, update or add release-tag + if [ -n "$latest_tag" ]; then + # Check if release-tag already exists for this plugin + if grep -A5 "^\[${plugin}\]" repositories.toml | grep -q "release-tag"; then + # Update existing release-tag (within the plugin's section) + python3 -c " + import re + with open('repositories.toml', 'r') as f: + content = f.read() + # Find the plugin section and update release-tag within it + pattern = r'(\[${plugin}\].*?release-tag\s*=\s*)\"[^\"]*\"' + content = re.sub(pattern, r'\1\"${latest_tag}\"', content, flags=re.DOTALL) + with open('repositories.toml', 'w') as f: + f.write(content) + " + else + # Add release-tag after the commit line + sed -i "/^\[${plugin}\]/,/^$\|^\[/ { + /git\.commit/a release-tag = \"${latest_tag}\" + }" repositories.toml + fi + fi + + - name: Create Pull Request + if: steps.check-pr.outputs.skip == 'false' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + plugin="${{ matrix.plugin.name }}" + new_commit="${{ matrix.plugin.latest_commit }}" + short_sha="${new_commit:0:7}" + branch="update/${plugin}" + tag_note="" + if [ -n "${{ steps.commit-info.outputs.latest_tag }}" ]; then + tag_note=" (release: ${{ steps.commit-info.outputs.latest_tag }})" + fi + + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git checkout -b "$branch" + git add repositories.toml + git commit -m "Update ${plugin} to ${short_sha}${tag_note}" + git push -u origin "$branch" --force + + body="## Update \`${plugin}\` to \`${short_sha}\`${tag_note} + + **Repository:** ${{ matrix.plugin.repo }} + **Previous commit:** \`${{ matrix.plugin.pinned_commit }}\` + **New commit:** \`${new_commit}\` + + ### Latest commit message + \`\`\` + ${{ steps.commit-info.outputs.commit_msg }} + \`\`\` + + --- + *This PR was automatically created by the plugin update checker. + The security scan workflow will run automatically on this PR. + Please review the scan results before merging.*" + + gh pr create \ + --title "Update ${plugin} to ${short_sha}${tag_note}" \ + --body "$body" \ + --label "automated,plugin-update" \ + --base master \ + --head "$branch" From ce1d8a79e6b8a7d062e75c5fc1f393bad1255c55 Mon Sep 17 00:00:00 2001 From: Geoff Hutchison Date: Wed, 18 Mar 2026 15:42:20 -0400 Subject: [PATCH 2/6] Add a validate-pr action Signed-off-by: Geoff Hutchison --- .github/workflows/validate-pr.yml | 201 ++++++++++++++++++++++++++++++ 1 file changed, 201 insertions(+) create mode 100644 .github/workflows/validate-pr.yml diff --git a/.github/workflows/validate-pr.yml b/.github/workflows/validate-pr.yml new file mode 100644 index 0000000..c0b5119 --- /dev/null +++ b/.github/workflows/validate-pr.yml @@ -0,0 +1,201 @@ +# .github/workflows/validate-pr.yml +# +# Runs on every PR that touches repositories.toml. +# Ensures: +# 1. The TOML is valid and well-structured +# 2. Only one plugin is added or modified per PR +# 3. Commits are full 40-char SHAs +# 4. No unexpected fields or structural issues + +name: Validate Plugin PR + +on: + pull_request: + paths: + - "repositories.toml" + +permissions: + contents: read + pull-requests: write + +jobs: + validate: + runs-on: ubuntu-latest + steps: + - name: Checkout PR head + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + ref: ${{ github.event.pull_request.head.sha }} + path: head + + - name: Checkout base branch + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + ref: ${{ github.event.pull_request.base.sha }} + path: base + + - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 + with: + python-version: "3.12" + + - name: Copy helper script + run: cp head/.github/scripts/parse_plugins.py . + + # ── Step 1: Validate TOML structure ── + - name: Validate TOML structure + id: validate + run: | + echo "### TOML Validation" >> "$GITHUB_STEP_SUMMARY" + result=$(python parse_plugins.py validate head/repositories.toml) + valid=$(echo "$result" | python3 -c "import json,sys; print(json.load(sys.stdin)['valid'])") + errors=$(echo "$result" | python3 -c " + import json, sys + d = json.load(sys.stdin) + for e in d['errors']: + print(f'- ❌ {e}') + ") + warnings=$(echo "$result" | python3 -c " + import json, sys + d = json.load(sys.stdin) + for w in d['warnings']: + print(f'- ⚠️ {w}') + ") + + if [ "$valid" = "False" ]; then + echo "❌ **TOML validation failed:**" >> "$GITHUB_STEP_SUMMARY" + echo "$errors" >> "$GITHUB_STEP_SUMMARY" + echo "toml_valid=false" >> "$GITHUB_OUTPUT" + else + echo "✅ TOML structure is valid." >> "$GITHUB_STEP_SUMMARY" + echo "toml_valid=true" >> "$GITHUB_OUTPUT" + fi + + if [ -n "$warnings" ]; then + echo "" >> "$GITHUB_STEP_SUMMARY" + echo "**Warnings:**" >> "$GITHUB_STEP_SUMMARY" + echo "$warnings" >> "$GITHUB_STEP_SUMMARY" + fi + + # ── Step 2: Diff to find what changed ── + - name: Detect plugin changes + id: diff + run: | + echo "### Plugin Changes" >> "$GITHUB_STEP_SUMMARY" + diff_result=$(python parse_plugins.py diff base/repositories.toml head/repositories.toml) + echo "$diff_result" | python3 -c " + import json, sys + d = json.load(sys.stdin) + print(f\"Added: {len(d['added'])}\") + print(f\"Modified: {len(d['modified'])}\") + print(f\"Removed: {len(d['removed'])}\") + print(f\"Total changes: {d['total_changes']}\") + " >> "$GITHUB_STEP_SUMMARY" + + total=$(echo "$diff_result" | python3 -c "import json,sys; print(json.load(sys.stdin)['total_changes'])") + added=$(echo "$diff_result" | python3 -c "import json,sys; print(len(json.load(sys.stdin)['added']))") + modified=$(echo "$diff_result" | python3 -c "import json,sys; print(len(json.load(sys.stdin)['modified']))") + removed=$(echo "$diff_result" | python3 -c "import json,sys; print(len(json.load(sys.stdin)['removed']))") + + echo "total=$total" >> "$GITHUB_OUTPUT" + echo "added=$added" >> "$GITHUB_OUTPUT" + echo "modified=$modified" >> "$GITHUB_OUTPUT" + echo "removed=$removed" >> "$GITHUB_OUTPUT" + echo "diff_json=$(echo "$diff_result" | jq -c .)" >> "$GITHUB_OUTPUT" + + # ── Step 3: Enforce single-plugin-per-PR rule ── + - name: Check single plugin rule + id: single-check + run: | + total=${{ steps.diff.outputs.total }} + + echo "" >> "$GITHUB_STEP_SUMMARY" + echo "### Single Plugin Rule" >> "$GITHUB_STEP_SUMMARY" + + if [ "$total" -eq 0 ]; then + echo "⚠️ No plugin changes detected in repositories.toml." >> "$GITHUB_STEP_SUMMARY" + echo "pass=true" >> "$GITHUB_OUTPUT" + elif [ "$total" -eq 1 ]; then + echo "✅ Exactly one plugin changed — rule satisfied." >> "$GITHUB_STEP_SUMMARY" + echo "pass=true" >> "$GITHUB_OUTPUT" + else + echo "❌ **$total plugins changed.** Please submit one plugin change per PR." >> "$GITHUB_STEP_SUMMARY" + echo "" >> "$GITHUB_STEP_SUMMARY" + echo "This makes security review manageable and keeps the git history clean." >> "$GITHUB_STEP_SUMMARY" + echo "pass=false" >> "$GITHUB_OUTPUT" + fi + + # ── Step 4: Validate that changed repos are reachable ── + - name: Verify repository accessibility + id: repo-check + run: | + echo "" >> "$GITHUB_STEP_SUMMARY" + echo "### Repository Accessibility" >> "$GITHUB_STEP_SUMMARY" + + diff_json='${{ steps.diff.outputs.diff_json }}' + all_ok=true + + # Check added and modified plugins + for plugin_json in $(echo "$diff_json" | jq -c '.added[], .modified[]'); do + name=$(echo "$plugin_json" | jq -r '.name') + repo=$(echo "$plugin_json" | jq -r '.repo // empty') + commit=$(echo "$plugin_json" | jq -r '.commit // .new_commit // empty') + + if [ -z "$repo" ]; then + continue + fi + + echo -n "Checking \`$name\` at \`${commit:0:7}\`... " >> "$GITHUB_STEP_SUMMARY" + + # Verify the specific commit exists + if git ls-remote "$repo" | grep -q "^${commit}"; then + echo "✅ reachable" >> "$GITHUB_STEP_SUMMARY" + else + # ls-remote won't show arbitrary commits, try a shallow fetch + tmpdir=$(mktemp -d) + if git clone --bare --depth=1 "$repo" "$tmpdir" 2>/dev/null && \ + git -C "$tmpdir" cat-file -t "$commit" 2>/dev/null; then + echo "✅ reachable" >> "$GITHUB_STEP_SUMMARY" + else + echo "⚠️ commit not immediately verifiable (may need full clone)" >> "$GITHUB_STEP_SUMMARY" + fi + rm -rf "$tmpdir" + fi + done + + echo "repo_ok=$all_ok" >> "$GITHUB_OUTPUT" + + # ── Step 5: Post a summary comment on the PR ── + - name: Post PR comment + if: always() + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + toml_ok="${{ steps.validate.outputs.toml_valid }}" + single_ok="${{ steps.single-check.outputs.pass }}" + total="${{ steps.diff.outputs.total }}" + + if [ "$toml_ok" = "true" ] && [ "$single_ok" = "true" ]; then + status="✅ All validation checks passed" + else + status="❌ Some validation checks failed" + fi + + comment="## Plugin PR Validation + + ${status} + + | Check | Result | + |-------|--------| + | TOML structure | $([ "$toml_ok" = "true" ] && echo "✅ Valid" || echo "❌ Invalid") | + | Single plugin rule | $([ "$single_ok" = "true" ] && echo "✅ Pass ($total change)" || echo "❌ Fail ($total changes)") | + + See the [workflow summary](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for details." + + gh pr comment ${{ github.event.pull_request.number }} --body "$comment" + + # ── Final: fail the job if any check failed ── + - name: Fail on validation errors + if: steps.validate.outputs.toml_valid == 'false' || steps.single-check.outputs.pass == 'false' + run: | + echo "::error::Validation failed. See job summary for details." + exit 1 From de5f7e78912ac954e8bb356345efaefb27694037 Mon Sep 17 00:00:00 2001 From: Geoff Hutchison Date: Sat, 21 Mar 2026 16:33:46 -0400 Subject: [PATCH 3/6] Move some parsing to shared script - Check diff of repositories.toml first - Use GitHub token for authentication - Validate with --strict for any changed plugins Signed-off-by: Geoff Hutchison --- .github/workflows/validate-pr.yml | 68 +++---- generate_index.py | 2 +- {.github/scripts => scripts}/parse_plugins.py | 123 +---------- scripts/plugin_validation.py | 191 ++++++++++++++++++ 4 files changed, 230 insertions(+), 154 deletions(-) rename {.github/scripts => scripts}/parse_plugins.py (60%) create mode 100644 scripts/plugin_validation.py diff --git a/.github/workflows/validate-pr.yml b/.github/workflows/validate-pr.yml index c0b5119..2bee35e 100644 --- a/.github/workflows/validate-pr.yml +++ b/.github/workflows/validate-pr.yml @@ -41,42 +41,7 @@ jobs: - name: Copy helper script run: cp head/.github/scripts/parse_plugins.py . - # ── Step 1: Validate TOML structure ── - - name: Validate TOML structure - id: validate - run: | - echo "### TOML Validation" >> "$GITHUB_STEP_SUMMARY" - result=$(python parse_plugins.py validate head/repositories.toml) - valid=$(echo "$result" | python3 -c "import json,sys; print(json.load(sys.stdin)['valid'])") - errors=$(echo "$result" | python3 -c " - import json, sys - d = json.load(sys.stdin) - for e in d['errors']: - print(f'- ❌ {e}') - ") - warnings=$(echo "$result" | python3 -c " - import json, sys - d = json.load(sys.stdin) - for w in d['warnings']: - print(f'- ⚠️ {w}') - ") - - if [ "$valid" = "False" ]; then - echo "❌ **TOML validation failed:**" >> "$GITHUB_STEP_SUMMARY" - echo "$errors" >> "$GITHUB_STEP_SUMMARY" - echo "toml_valid=false" >> "$GITHUB_OUTPUT" - else - echo "✅ TOML structure is valid." >> "$GITHUB_STEP_SUMMARY" - echo "toml_valid=true" >> "$GITHUB_OUTPUT" - fi - - if [ -n "$warnings" ]; then - echo "" >> "$GITHUB_STEP_SUMMARY" - echo "**Warnings:**" >> "$GITHUB_STEP_SUMMARY" - echo "$warnings" >> "$GITHUB_STEP_SUMMARY" - fi - - # ── Step 2: Diff to find what changed ── + # ── Step 1: Diff to find what changed ── - name: Detect plugin changes id: diff run: | @@ -96,11 +61,42 @@ jobs: modified=$(echo "$diff_result" | python3 -c "import json,sys; print(len(json.load(sys.stdin)['modified']))") removed=$(echo "$diff_result" | python3 -c "import json,sys; print(len(json.load(sys.stdin)['removed']))") + # Extract names of added/modified plugins for validation + changed=$(echo "$diff_result" | python3 -c " + import json, sys + d = json.load(sys.stdin) + names = [p['name'] for p in d['added'] + d['modified']] + print(' '.join(names)) + ") + echo "total=$total" >> "$GITHUB_OUTPUT" echo "added=$added" >> "$GITHUB_OUTPUT" echo "modified=$modified" >> "$GITHUB_OUTPUT" echo "removed=$removed" >> "$GITHUB_OUTPUT" echo "diff_json=$(echo "$diff_result" | jq -c .)" >> "$GITHUB_OUTPUT" + echo "changed=$changed" >> "$GITHUB_OUTPUT" + + # ── Step 2: Validate changed plugin metadata ── + - name: Install dependencies + run: pip install PyGitHub + + - name: Validate plugin metadata + id: validate + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + echo "### Plugin Validation" >> "$GITHUB_STEP_SUMMARY" + changed="${{ steps.diff.outputs.changed }}" + if [ -z "$changed" ]; then + echo "✅ No added/modified plugins to validate." >> "$GITHUB_STEP_SUMMARY" + echo "toml_valid=true" >> "$GITHUB_OUTPUT" + elif python head/generate_index.py -t "$GH_TOKEN" --strict -p $changed; then + echo "✅ All changed plugins validated successfully." >> "$GITHUB_STEP_SUMMARY" + echo "toml_valid=true" >> "$GITHUB_OUTPUT" + else + echo "❌ **Plugin validation failed.**" >> "$GITHUB_STEP_SUMMARY" + echo "toml_valid=false" >> "$GITHUB_OUTPUT" + fi # ── Step 3: Enforce single-plugin-per-PR rule ── - name: Check single plugin rule diff --git a/generate_index.py b/generate_index.py index 297b6b2..027353d 100644 --- a/generate_index.py +++ b/generate_index.py @@ -396,7 +396,7 @@ def get_metadata_all(repos: dict[str, dict], gh: Github, strict: bool) -> list[d if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate plugin index") - parser.add_argument("--token", "-t", help="GitHub personal access token") + parser.add_argument("--token", "-t", help="GitHub access token") parser.add_argument( "--pretty", action="store_true", help="Pretty-print JSON output" ) diff --git a/.github/scripts/parse_plugins.py b/scripts/parse_plugins.py similarity index 60% rename from .github/scripts/parse_plugins.py rename to scripts/parse_plugins.py index f67593d..89df45c 100644 --- a/.github/scripts/parse_plugins.py +++ b/scripts/parse_plugins.py @@ -6,36 +6,15 @@ import subprocess import sys -# Python 3.11+ has tomllib in stdlib; fall back to tomli -try: - import tomllib -except ImportError: - try: - import tomli as tomllib - except ImportError: - print("ERROR: Need Python 3.11+ or 'pip install tomli'", file=sys.stderr) - sys.exit(1) +from plugin_validation import ( + extract_plugins, + load_toml, + validate_all_plugins, +) TOML_PATH = "repositories.toml" -# Keys that are file-level guidance comments, not plugin entries -NON_PLUGIN_KEYS: set[str] = set() - - -def load_toml(path: str) -> dict: - with open(path, "rb") as f: - return tomllib.load(f) - - -def extract_plugins(data: dict) -> dict: - """Return only the plugin tables from the parsed TOML.""" - plugins = {} - for key, value in data.items(): - if isinstance(value, dict) and key not in NON_PLUGIN_KEYS: - plugins[key] = value - return plugins - def cmd_list(): """Print all plugins as a JSON array.""" @@ -177,83 +156,6 @@ def cmd_diff(base_path: str, head_path: str): } print(json.dumps(result, indent=2)) - -def cmd_validate(path: str): - """Validate the structure of a repositories.toml file.""" - errors = [] - warnings = [] - - try: - data = load_toml(path) - except Exception as e: - errors.append(f"Failed to parse TOML: {e}") - print(json.dumps({"valid": False, "errors": errors, "warnings": warnings})) - return - - plugins = extract_plugins(data) - - for name, info in plugins.items(): - prefix = f"[{name}]" - git = info.get("git", {}) - src = info.get("src", {}) - - # Must have exactly one of git or src - has_git = bool(git) - has_src = bool(src) - if not has_git and not has_src: - errors.append(f"{prefix}: Must have either 'git' or 'src' section") - elif has_git and has_src: - errors.append(f"{prefix}: Cannot have both 'git' and 'src' sections") - - if has_git: - if not git.get("repo"): - errors.append(f"{prefix}: Missing git.repo") - elif not git["repo"].endswith(".git"): - warnings.append(f"{prefix}: git.repo should end with '.git'") - - commit = git.get("commit", "") - if not commit: - errors.append(f"{prefix}: Missing git.commit") - elif len(commit) != 40: - errors.append( - f"{prefix}: git.commit should be a full 40-char SHA, " - f"got {len(commit)} chars" - ) - - if has_src: - if not src.get("url"): - errors.append(f"{prefix}: Missing src.url") - if not src.get("sha256"): - errors.append(f"{prefix}: Missing src.sha256") - - # Validate optional fields - plugin_type = info.get("plugin-type", "pypkg") - if plugin_type not in ("pypkg", "pyscript"): - errors.append( - f"{prefix}: plugin-type must be 'pypkg' or 'pyscript', " - f"got '{plugin_type}'" - ) - - metadata = info.get("metadata", "pyproject.toml") - if metadata not in ("pyproject.toml", "avogadro.toml"): - errors.append( - f"{prefix}: metadata must be 'pyproject.toml' or 'avogadro.toml', " - f"got '{metadata}'" - ) - - path_val = info.get("path", ".") - if "\\" in path_val: - errors.append(f"{prefix}: path should use '/' separators, not '\\'") - - result = { - "valid": len(errors) == 0, - "errors": errors, - "warnings": warnings, - "plugin_count": len(plugins), - } - print(json.dumps(result, indent=2)) - - def main(): parser = argparse.ArgumentParser( description="Parse repositories.toml and provide plugin information " @@ -261,8 +163,6 @@ def main(): ) subparsers = parser.add_subparsers(dest="command", required=True) - subparsers.add_parser("list", help="List all plugins as JSON") - subparsers.add_parser( "check-updates", help="Check for upstream updates (new commits on default branch)", @@ -274,23 +174,12 @@ def main(): diff_parser.add_argument("base_file", help="Base repositories.toml file") diff_parser.add_argument("head_file", help="Head repositories.toml file") - validate_parser = subparsers.add_parser( - "validate", help="Validate the TOML structure" - ) - validate_parser.add_argument( - "file", nargs="?", default=TOML_PATH, help="Path to TOML file to validate" - ) - args = parser.parse_args() - if args.command == "list": - cmd_list() - elif args.command == "check-updates": + if args.command == "check-updates": cmd_check_updates() elif args.command == "diff": cmd_diff(args.base_file, args.head_file) - elif args.command == "validate": - cmd_validate(args.file) if __name__ == "__main__": diff --git a/scripts/plugin_validation.py b/scripts/plugin_validation.py new file mode 100644 index 0000000..dd52ade --- /dev/null +++ b/scripts/plugin_validation.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python3 +"""Shared validation logic and utilities for plugin repository management. + +Used by both generate_index.py and parse_plugins.py. +""" + +import sys + +# Python 3.11+ has tomllib in stdlib; fall back to tomli +try: + import tomllib +except ImportError: + try: + import tomli as tomllib + except ImportError: + print("ERROR: Need Python 3.11+ or 'pip install tomli'", file=sys.stderr) + sys.exit(1) + + +# Valid plugin types +PLUGIN_TYPES = [ + "pyscript", + "pypkg", +] + +# Valid plugin feature types +FEATURE_TYPES = [ + "electrostatic-models", + "energy-models", + "file-formats", + "input-generators", + "menu-commands", +] + +# Valid metadata file names +METADATA_FILES = ["pyproject.toml", "avogadro.toml"] + +# Default values for optional keys in repositories.toml +REPO_DEFAULTS = { + "metadata": "pyproject.toml", + "plugin-type": "pypkg", +} + +# Keys that are file-level guidance comments, not plugin entries +NON_PLUGIN_KEYS: set[str] = set() + + +def load_toml(path: str) -> dict: + """Load and parse a TOML file.""" + with open(path, "rb") as f: + return tomllib.load(f) + + +def extract_plugins(data: dict) -> dict: + """Return only the plugin tables from parsed TOML data.""" + plugins = {} + for key, value in data.items(): + if isinstance(value, dict) and key not in NON_PLUGIN_KEYS: + plugins[key] = value + return plugins + + +def set_defaults(repo_info: dict): + """Set default values for optional keys in a repo_info dict. + + Modifies the dict in place. + """ + for k, v in REPO_DEFAULTS.items(): + repo_info.setdefault(k, v) + + +class ValidationResult: + """Collects errors and warnings from validation.""" + + def __init__(self): + self.errors: list[str] = [] + self.warnings: list[str] = [] + + @property + def valid(self) -> bool: + return len(self.errors) == 0 + + def error(self, msg: str): + self.errors.append(msg) + + def warn(self, msg: str): + self.warnings.append(msg) + + def raise_on_errors(self): + """Raise an AssertionError if there are any errors. + + Useful for callers that want exception-based validation. + """ + if not self.valid: + raise AssertionError( + "Validation failed:\n" + "\n".join(f" - {e}" for e in self.errors) + ) + + +def validate_repo_info(name: str, info: dict) -> ValidationResult: + """Validate a single plugin entry from repositories.toml. + + Args: + name: The plugin table name (key in TOML). + info: The plugin's dict of values. + + Returns: + A ValidationResult with any errors and warnings. + """ + result = ValidationResult() + prefix = f"[{name}]" + + git = info.get("git", {}) + src = info.get("src", {}) + + # Must have exactly one of git or src + has_git = bool(git) + has_src = bool(src) + if not has_git and not has_src: + result.error(f"{prefix}: Must have either 'git' or 'src' section") + elif has_git and has_src: + result.error(f"{prefix}: Cannot have both 'git' and 'src' sections") + + if has_git: + if not git.get("repo"): + result.error(f"{prefix}: Missing git.repo") + elif not git["repo"].endswith(".git"): + result.warn(f"{prefix}: git.repo should end with '.git'") + + commit = git.get("commit", "") + if not commit: + result.error(f"{prefix}: Missing git.commit") + elif len(commit) != 40: + result.error( + f"{prefix}: git.commit should be a full 40-char SHA, " + f"got {len(commit)} chars" + ) + + if has_src: + if not src.get("url"): + result.error(f"{prefix}: Missing src.url") + if not src.get("sha256"): + result.error(f"{prefix}: Missing src.sha256") + + # Validate plugin-type + plugin_type = info.get("plugin-type", REPO_DEFAULTS["plugin-type"]) + if plugin_type not in PLUGIN_TYPES: + result.error( + f"{prefix}: plugin-type must be one of {PLUGIN_TYPES}, " + f"got '{plugin_type}'" + ) + + # Validate metadata file + metadata = info.get("metadata", REPO_DEFAULTS["metadata"]) + if metadata not in METADATA_FILES: + result.error( + f"{prefix}: metadata must be one of {METADATA_FILES}, " + f"got '{metadata}'" + ) + + # Validate path + path_val = info.get("path", ".") + if "\\" in path_val: + result.error(f"{prefix}: path should use '/' separators, not '\\'") + if path_val != "." and path_val.endswith("/"): + result.error(f"{prefix}: path should not end with '/'") + final_component = path_val.split("/") + if "." in final_component and path_val != ".": + result.error(f"{prefix}: path components should not be '.'") + + return result + + +def validate_all_plugins(data: dict) -> ValidationResult: + """Validate all plugin entries in parsed TOML data. + + Args: + data: The full parsed TOML dict. + + Returns: + A combined ValidationResult for all plugins. + """ + combined = ValidationResult() + plugins = extract_plugins(data) + + for name, info in plugins.items(): + plugin_result = validate_repo_info(name, info) + combined.errors.extend(plugin_result.errors) + combined.warnings.extend(plugin_result.warnings) + + return combined From 7816b33ae237f2990557727c8dce8b271d43d397 Mon Sep 17 00:00:00 2001 From: Geoff Hutchison Date: Sat, 21 Mar 2026 16:45:27 -0400 Subject: [PATCH 4/6] More cleanups Signed-off-by: Geoff Hutchison --- .github/workflows/validate-pr.yml | 93 ++++++++----------------------- scripts/parse_plugins.py | 3 +- scripts/plugin_validation.py | 20 +------ 3 files changed, 27 insertions(+), 89 deletions(-) diff --git a/.github/workflows/validate-pr.yml b/.github/workflows/validate-pr.yml index 2bee35e..e1b7132 100644 --- a/.github/workflows/validate-pr.yml +++ b/.github/workflows/validate-pr.yml @@ -38,43 +38,38 @@ jobs: with: python-version: "3.12" - - name: Copy helper script - run: cp head/.github/scripts/parse_plugins.py . + - name: Copy helper scripts + run: cp head/scripts/parse_plugins.py head/scripts/plugin_validation.py . # ── Step 1: Diff to find what changed ── - name: Detect plugin changes id: diff run: | - echo "### Plugin Changes" >> "$GITHUB_STEP_SUMMARY" diff_result=$(python parse_plugins.py diff base/repositories.toml head/repositories.toml) echo "$diff_result" | python3 -c " - import json, sys + import json, sys, os d = json.load(sys.stdin) - print(f\"Added: {len(d['added'])}\") - print(f\"Modified: {len(d['modified'])}\") - print(f\"Removed: {len(d['removed'])}\") - print(f\"Total changes: {d['total_changes']}\") - " >> "$GITHUB_STEP_SUMMARY" - - total=$(echo "$diff_result" | python3 -c "import json,sys; print(json.load(sys.stdin)['total_changes'])") - added=$(echo "$diff_result" | python3 -c "import json,sys; print(len(json.load(sys.stdin)['added']))") - modified=$(echo "$diff_result" | python3 -c "import json,sys; print(len(json.load(sys.stdin)['modified']))") - removed=$(echo "$diff_result" | python3 -c "import json,sys; print(len(json.load(sys.stdin)['removed']))") - - # Extract names of added/modified plugins for validation - changed=$(echo "$diff_result" | python3 -c " - import json, sys - d = json.load(sys.stdin) - names = [p['name'] for p in d['added'] + d['modified']] - print(' '.join(names)) - ") - - echo "total=$total" >> "$GITHUB_OUTPUT" - echo "added=$added" >> "$GITHUB_OUTPUT" - echo "modified=$modified" >> "$GITHUB_OUTPUT" - echo "removed=$removed" >> "$GITHUB_OUTPUT" + added = len(d['added']) + modified = len(d['modified']) + removed = len(d['removed']) + total = d['total_changes'] + changed = ' '.join(p['name'] for p in d['added'] + d['modified']) + + with open(os.environ['GITHUB_STEP_SUMMARY'], 'a') as summary: + summary.write('### Plugin Changes\n') + summary.write(f'Added: {added}\n') + summary.write(f'Modified: {modified}\n') + summary.write(f'Removed: {removed}\n') + summary.write(f'Total changes: {total}\n') + + with open(os.environ['GITHUB_OUTPUT'], 'a') as out: + out.write(f'total={total}\n') + out.write(f'added={added}\n') + out.write(f'modified={modified}\n') + out.write(f'removed={removed}\n') + out.write(f'changed={changed}\n') + " echo "diff_json=$(echo "$diff_result" | jq -c .)" >> "$GITHUB_OUTPUT" - echo "changed=$changed" >> "$GITHUB_OUTPUT" # ── Step 2: Validate changed plugin metadata ── - name: Install dependencies @@ -120,47 +115,7 @@ jobs: echo "pass=false" >> "$GITHUB_OUTPUT" fi - # ── Step 4: Validate that changed repos are reachable ── - - name: Verify repository accessibility - id: repo-check - run: | - echo "" >> "$GITHUB_STEP_SUMMARY" - echo "### Repository Accessibility" >> "$GITHUB_STEP_SUMMARY" - - diff_json='${{ steps.diff.outputs.diff_json }}' - all_ok=true - - # Check added and modified plugins - for plugin_json in $(echo "$diff_json" | jq -c '.added[], .modified[]'); do - name=$(echo "$plugin_json" | jq -r '.name') - repo=$(echo "$plugin_json" | jq -r '.repo // empty') - commit=$(echo "$plugin_json" | jq -r '.commit // .new_commit // empty') - - if [ -z "$repo" ]; then - continue - fi - - echo -n "Checking \`$name\` at \`${commit:0:7}\`... " >> "$GITHUB_STEP_SUMMARY" - - # Verify the specific commit exists - if git ls-remote "$repo" | grep -q "^${commit}"; then - echo "✅ reachable" >> "$GITHUB_STEP_SUMMARY" - else - # ls-remote won't show arbitrary commits, try a shallow fetch - tmpdir=$(mktemp -d) - if git clone --bare --depth=1 "$repo" "$tmpdir" 2>/dev/null && \ - git -C "$tmpdir" cat-file -t "$commit" 2>/dev/null; then - echo "✅ reachable" >> "$GITHUB_STEP_SUMMARY" - else - echo "⚠️ commit not immediately verifiable (may need full clone)" >> "$GITHUB_STEP_SUMMARY" - fi - rm -rf "$tmpdir" - fi - done - - echo "repo_ok=$all_ok" >> "$GITHUB_OUTPUT" - - # ── Step 5: Post a summary comment on the PR ── + # ── Step 4: Post a summary comment on the PR ── - name: Post PR comment if: always() env: diff --git a/scripts/parse_plugins.py b/scripts/parse_plugins.py index 89df45c..32089ed 100644 --- a/scripts/parse_plugins.py +++ b/scripts/parse_plugins.py @@ -8,8 +8,7 @@ from plugin_validation import ( extract_plugins, - load_toml, - validate_all_plugins, + load_toml ) diff --git a/scripts/plugin_validation.py b/scripts/plugin_validation.py index dd52ade..a59f1b9 100644 --- a/scripts/plugin_validation.py +++ b/scripts/plugin_validation.py @@ -6,15 +6,7 @@ import sys -# Python 3.11+ has tomllib in stdlib; fall back to tomli -try: - import tomllib -except ImportError: - try: - import tomli as tomllib - except ImportError: - print("ERROR: Need Python 3.11+ or 'pip install tomli'", file=sys.stderr) - sys.exit(1) +import tomllib # Valid plugin types @@ -41,10 +33,6 @@ "plugin-type": "pypkg", } -# Keys that are file-level guidance comments, not plugin entries -NON_PLUGIN_KEYS: set[str] = set() - - def load_toml(path: str) -> dict: """Load and parse a TOML file.""" with open(path, "rb") as f: @@ -53,11 +41,7 @@ def load_toml(path: str) -> dict: def extract_plugins(data: dict) -> dict: """Return only the plugin tables from parsed TOML data.""" - plugins = {} - for key, value in data.items(): - if isinstance(value, dict) and key not in NON_PLUGIN_KEYS: - plugins[key] = value - return plugins + return {key: value for key, value in data.items() if isinstance(value, dict)} def set_defaults(repo_info: dict): From 66de9729d834a443b45a8073f7633f925fa3fea1 Mon Sep 17 00:00:00 2001 From: Geoff Hutchison Date: Wed, 25 Mar 2026 20:18:45 -0400 Subject: [PATCH 5/6] Move generate_index.py as requested Signed-off-by: Geoff Hutchison --- .github/workflows/validate-pr.yml | 2 +- generate_index.py => scripts/generate_index.py | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename generate_index.py => scripts/generate_index.py (100%) diff --git a/.github/workflows/validate-pr.yml b/.github/workflows/validate-pr.yml index e1b7132..cb842c4 100644 --- a/.github/workflows/validate-pr.yml +++ b/.github/workflows/validate-pr.yml @@ -85,7 +85,7 @@ jobs: if [ -z "$changed" ]; then echo "✅ No added/modified plugins to validate." >> "$GITHUB_STEP_SUMMARY" echo "toml_valid=true" >> "$GITHUB_OUTPUT" - elif python head/generate_index.py -t "$GH_TOKEN" --strict -p $changed; then + elif python head/scripts/generate_index.py -t "$GH_TOKEN" --strict -p $changed; then echo "✅ All changed plugins validated successfully." >> "$GITHUB_STEP_SUMMARY" echo "toml_valid=true" >> "$GITHUB_OUTPUT" else diff --git a/generate_index.py b/scripts/generate_index.py similarity index 100% rename from generate_index.py rename to scripts/generate_index.py From b897a361fabc8d92464c808db28996c37a22f93c Mon Sep 17 00:00:00 2001 From: Geoff Hutchison Date: Wed, 25 Mar 2026 20:19:01 -0400 Subject: [PATCH 6/6] Fix check-plugin-updates after move of scripts Signed-off-by: Geoff Hutchison --- .github/workflows/check-plugin-updates.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-plugin-updates.yml b/.github/workflows/check-plugin-updates.yml index e350d2d..453b4e6 100644 --- a/.github/workflows/check-plugin-updates.yml +++ b/.github/workflows/check-plugin-updates.yml @@ -37,7 +37,7 @@ jobs: - name: Detect upstream updates id: detect run: | - updates=$(python .github/scripts/parse_plugins.py check-updates) + updates=$(python scripts/parse_plugins.py check-updates) echo "Raw updates: $updates" # If a specific plugin was requested, filter to just that one