diff --git a/.github/actions/cached-ci-job/action.yaml b/.github/actions/cached-ci-job/action.yaml new file mode 100644 index 0000000..e0396b4 --- /dev/null +++ b/.github/actions/cached-ci-job/action.yaml @@ -0,0 +1,157 @@ +name: Cached CI Job +description: Execute job only if not already successful for this commit SHA + +inputs: + check-name: + description: Full check run name (defaults to github.job, include matrix values for matrix jobs) + required: false + default: ${{ github.job }} + path-filters: + description: Regex pattern for relevant file paths (empty means always relevant) + required: false + default: '' + force-run: + description: Force execution even if already successful + required: false + default: 'false' + +outputs: + should-run: + description: Whether job should execute (true/false) + value: ${{ steps.decide.outputs.should-run }} + previously-succeeded: + description: Whether this job previously succeeded for this commit + value: ${{ steps.check-history.outputs.previously-succeeded }} + relevant-changes: + description: Whether relevant file changes were detected + value: ${{ steps.check-paths.outputs.relevant-changes }} + +runs: + using: composite + steps: + - name: Query GitHub Checks API for execution history + id: check-history + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # ratchet:actions/github-script@v7 + env: + CHECK_NAME: ${{ inputs.check-name }} + with: + script: | + const checkName = process.env.CHECK_NAME; + const commit = context.sha; + + core.info(`Querying execution history for: "${checkName}" @ ${commit}`); + + try { + const { data: checks } = await github.rest.checks.listForRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: commit, + check_name: checkName, + }); + + core.info(`Found ${checks.check_runs.length} check run(s) for this commit`); + + // Find any completed successful run + const successfulRun = checks.check_runs.find(run => + run.conclusion === 'success' && + run.status === 'completed' + ); + + if (successfulRun) { + core.info(`✓ Job already succeeded: ${successfulRun.html_url}`); + core.setOutput('previously-succeeded', 'true'); + } else { + const failedRuns = checks.check_runs.filter(run => + run.conclusion === 'failure' && + run.status === 'completed' + ); + if (failedRuns.length > 0) { + core.info(`✗ Found ${failedRuns.length} previous failed run(s)`); + } else { + core.info(`✗ No previous runs found`); + } + core.setOutput('previously-succeeded', 'false'); + } + } catch (error) { + core.warning(`API query failed: ${error.message}`); + core.setOutput('previously-succeeded', 'false'); + } + + - name: Check for relevant file changes + id: check-paths + if: | + steps.check-history.outputs.previously-succeeded != 'true' && + inputs.path-filters != '' + shell: bash + env: + PATH_FILTERS: ${{ inputs.path-filters }} + run: | + # Determine base ref for comparison + if [ "${{ github.event_name }}" = "pull_request" ]; then + BASE_REF="${{ github.event.pull_request.base.sha }}" + else + BASE_REF="HEAD^" + fi + + echo "Checking for changes matching: $PATH_FILTERS" + echo "Comparing $BASE_REF...HEAD" + + # Workflow changes always trigger all jobs + if git diff --name-only "$BASE_REF" HEAD | grep -qE '\.github/workflows/'; then + echo "relevant-changes=true" >> $GITHUB_OUTPUT + echo "✓ Workflow changes detected - job is relevant" + exit 0 + fi + + # Check if any relevant files changed + if git diff --name-only "$BASE_REF" HEAD | grep -qE "$PATH_FILTERS"; then + echo "relevant-changes=true" >> $GITHUB_OUTPUT + echo "✓ Relevant file changes detected" + else + echo "relevant-changes=false" >> $GITHUB_OUTPUT + echo "✗ No relevant file changes" + fi + + - name: Make execution decision + id: decide + shell: bash + env: + FORCE: ${{ inputs.force-run }} + PREV_SUCCESS: ${{ steps.check-history.outputs.previously-succeeded }} + HAS_CHANGES: ${{ steps.check-paths.outputs.relevant-changes }} + HAS_FILTERS: ${{ inputs.path-filters != '' }} + run: | + echo "=== Execution Decision ===" + echo "Force run: $FORCE" + echo "Previously succeeded: $PREV_SUCCESS" + echo "Has path filters: $HAS_FILTERS" + echo "Relevant changes: $HAS_CHANGES" + echo "" + + # Force run overrides everything + if [ "$FORCE" = "true" ]; then + echo "should-run=true" >> $GITHUB_OUTPUT + echo "Decision: RUN (forced by input)" + exit 0 + fi + + # If already succeeded for this commit, skip + if [ "$PREV_SUCCESS" = "true" ]; then + echo "should-run=false" >> $GITHUB_OUTPUT + echo "Decision: SKIP (already succeeded for this commit)" + exit 0 + fi + + # If we have path filters, honor them + if [ "$HAS_FILTERS" = "true" ]; then + echo "should-run=${HAS_CHANGES}" >> $GITHUB_OUTPUT + if [ "$HAS_CHANGES" = "true" ]; then + echo "Decision: RUN (relevant file changes detected)" + else + echo "Decision: SKIP (no relevant file changes)" + fi + else + # No filters means always run if not already succeeded + echo "should-run=true" >> $GITHUB_OUTPUT + echo "Decision: RUN (no path filters specified)" + fi diff --git a/.github/actions/setup-nix/action.yml b/.github/actions/setup-nix/action.yml index adee1ed..edf1529 100644 --- a/.github/actions/setup-nix/action.yml +++ b/.github/actions/setup-nix/action.yml @@ -1,61 +1,86 @@ name: setup-nix -description: Setup Nix with optional disk space optimization and cachix binary cache configuration +description: setup nix using nothing-but-nix pattern with space reclamation and github actions cache inputs: installer: description: | - Nix installation strategy: - - 'full' (default): Aggressive disk cleanup + DeterminateSystems installer - - 'quick': Lightweight install with nixbuild/nix-quick-install-action + nix installation strategy: + - 'full' (default): space reclamation + cache + cachix for builds + - 'quick': minimal install for simple tasks (no space reclamation, no caching overhead) type: string required: false default: full system: - description: Nix system to configure (e.g., x86_64-linux, aarch64-darwin) + description: nix system to configure (e.g., x86_64-linux, aarch64-darwin) type: string - required: false - default: x86_64-linux - extra-conf: - description: Additional nix.conf configuration + required: true + sandbox: + description: enable nix sandbox builds + type: string + default: "true" + cache-key: + description: primary cache key (auto-generated from nix files if not provided) type: string required: false - default: system-features = nixos-test benchmark big-parallel kvm - setup-cachix: - description: Setup cachix binary cache after Nix installation (requires SOPS_AGE_KEY in env) + default: "" + gc-max-store-size-linux: + description: max nix store size on linux before garbage collection (e.g., 5G, 10G) + type: string + default: "5G" + gc-max-store-size-macos: + description: max nix store size on macos before garbage collection (e.g., 5G, 10G) + type: string + default: "5G" + enable-cachix: + description: enable cachix binary cache type: boolean - required: false default: false - cachix-auth: - description: Authenticate with cachix for pushing (requires setup-cachix=true) - type: boolean + cachix-name: + description: cachix cache name + type: string + required: false + cachix-auth-token: + description: cachix auth token + type: string required: false + cachix-skip-push: + description: skip pushing to cachix (read-only) + type: boolean default: false +outputs: + cache-hit: + description: whether the primary cache key was hit + value: ${{ steps.cache.outputs.hit-primary-key }} + cache-key: + description: the cache key that was used + value: ${{ steps.cache.outputs.primary-key }} + runs: using: composite steps: - # Full installer: Aggressive disk cleanup + DeterminateSystems - - name: Reclaim disk space (Linux) + - name: reclaim space (linux) if: runner.os == 'Linux' && inputs.installer == 'full' - uses: wimpysworld/nothing-but-nix@main + uses: wimpysworld/nothing-but-nix@10c936d9e46521bf923f75458e0cbd4fa309300d # ratchet:wimpysworld/nothing-but-nix@main with: - hatchet-protocol: rampage + hatchet-protocol: carve + nix-permission-edict: true - - name: Reclaim disk space (macOS) + - name: reclaim space (darwin) if: runner.os == 'macOS' && inputs.installer == 'full' shell: bash run: | - echo "::group::Disk space before cleanup" + echo "::group::disk space (before)" sudo df -h echo "::endgroup::" - echo "::group::Disable Spotlight indexing" + echo "::group::disable mds" sudo mdutil -i off -a || echo "mdutil failed" sudo launchctl unload -w /System/Library/LaunchDaemons/com.apple.metadata.mds.plist \ - || echo "launchctl unload failed" + || echo "launchctl unload failed" echo "::endgroup::" - echo "Starting background cleanup to reclaim disk space..." + echo "Background space expansion started. /nix will grow as space becomes available." sudo rm -rf \ /Applications/Xcode_* \ /Library/Developer/CoreSimulator \ @@ -67,44 +92,45 @@ runs: /Users/runner/Library/Developer/CoreSimulator \ /Users/runner/hostedtoolcache & - - name: Install Nix (DeterminateSystems) - if: inputs.installer == 'full' - uses: DeterminateSystems/nix-installer-action@main + - name: install nix + uses: nixbuild/nix-quick-install-action@2c9db80fb984ceb1bcaa77cdda3fdf8cfba92035 # ratchet:nixbuild/nix-quick-install-action@v34 with: - extra-conf: | + nix_conf: | + sandbox = ${{ inputs.sandbox }} system = ${{ inputs.system }} - ${{ inputs.extra-conf }} + keep-env-derivations = true + keep-outputs = true - # Quick installer: Lightweight nixbuild/nix-quick-install-action - - name: Install Nix (Quick Install) - if: inputs.installer == 'quick' - uses: nixbuild/nix-quick-install-action@master + - name: setup cache + if: runner.os == 'Linux' && inputs.installer == 'full' + id: cache + uses: nix-community/cache-nix-action@135667ec418502fa5a3598af6fb9eb733888ce6a # ratchet:nix-community/cache-nix-action@v6 + with: + primary-key: ${{ inputs.cache-key != '' && inputs.cache-key || format('nix-{0}-{1}', runner.os, hashFiles('**/*.nix', '**/flake.lock')) }} + restore-prefixes-first-match: ${{ format('nix-{0}-', runner.os) }} + gc-max-store-size-linux: ${{ inputs.gc-max-store-size-linux }} + gc-max-store-size-macos: ${{ inputs.gc-max-store-size-macos }} + purge: true + purge-prefixes: ${{ format('nix-{0}-', runner.os) }} + purge-created: 0 + purge-last-accessed: 0 + purge-primary-key: never - - name: Report disk space (macOS post-cleanup) + - name: setup cachix + if: inputs.enable-cachix + uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # ratchet:cachix/cachix-action@v16 + continue-on-error: true + with: + name: ${{ inputs.cachix-name }} + authToken: ${{ inputs.cachix-auth-token }} + skipPush: ${{ inputs.cachix-skip-push }} + + - name: post setup-nix if: runner.os == 'macOS' && inputs.installer == 'full' - uses: srz-zumix/post-run-action@v2 + uses: srz-zumix/post-run-action@2bf288bc024acd0341914f792a811080ebd0f252 # ratchet:srz-zumix/post-run-action@v2 with: shell: bash -e {0} post-run: | - echo "::group::Disk space after workflow" + echo "::group::disk space (after)" sudo df -h echo "::endgroup::" - - - name: Setup and authenticate cachix - if: inputs.setup-cachix == 'true' && inputs.cachix-auth == 'true' - shell: bash - run: | - nix develop -c sops exec-env vars/shared.yaml ' - cachix authtoken "$CACHIX_AUTH_TOKEN" - cachix use "$CACHIX_CACHE_NAME" - cachix use nix-community - ' - - - name: Setup cachix for binary cache - if: inputs.setup-cachix == 'true' && inputs.cachix-auth != 'true' - shell: bash - run: | - nix develop -c sops exec-env vars/shared.yaml ' - cachix use "$CACHIX_CACHE_NAME" - cachix use nix-community - ' diff --git a/.github/mergify.yml b/.github/mergify.yml index cd53156..809eec5 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -8,19 +8,38 @@ pull_request_rules: assign: add_users: - "{{ author }}" - - name: automatic merge when not WIP, CI passes, and at least 1 approving review + - name: automatic merge to queue conditions: - "#approved-reviews-by>=1" - - check-success=scan - - check-success=nix (ubuntu-latest) - - check-success=test (docs) - - base=main - label!=work-in-progress + - -draft + - -conflict + - check-success=skip-check + - check-success=gitleaks + - check-success=set-variables + - or: + - check-success=nix + - check-skipped=nix + - or: + - check-success=test (docs) + - check-skipped=test (docs) + - check-success=PR Check Fast-forward actions: queue: name: default + queue_rules: - name: default merge_method: fast-forward update_method: rebase update_bot_account: cameronraysmith + batch_size: 1 + checks_timeout: 3600 + queue_conditions: + - check-success=skip-check + merge_conditions: + - check-success=skip-check + commit_message_template: | + {{ title }} (#{{ number }}) + + {{ body }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d9a8338..83cb93a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -25,6 +25,11 @@ on: required: false type: boolean default: false + force_run: + description: "Force execution even if already successful for this commit" + required: false + type: boolean + default: false workflow_call: pull_request: types: [opened, labeled, reopened, synchronize] @@ -47,29 +52,50 @@ permissions: id-token: write jobs: - scan: - name: gitguardian + # job 1: secrets-scan + # scans repository for hardcoded secrets using gitleaks + # Security critical - runs for all commits + secrets-scan: runs-on: ubuntu-latest if: | - github.event_name != 'workflow_dispatch' || - inputs.job == '' || - inputs.job == 'scan' + !cancelled() && + (github.event_name != 'workflow_dispatch' || + inputs.job == '' || + inputs.job == 'secrets-scan') steps: - - name: Checkout + - name: Checkout repository uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4 with: - fetch-depth: 0 - - name: GitGuardian scan - uses: GitGuardian/ggshield-action@455483042671cc73b40d0e753baddffef7309a1f # ratchet:GitGuardian/ggshield-action@v1.37.0 + fetch-depth: 0 # Required for git diff in composite action and comprehensive secret scanning + + - name: Check execution cache + id: cache + uses: ./.github/actions/cached-ci-job + with: + force-run: ${{ inputs.force_run || 'false' }} + # No path-filters - security scanning always relevant when code changes + # Cache provides value for workflow retries (don't re-scan unchanged commits) + # Commit SHA is content-addressed: same SHA = same content = same security posture + + - name: Setup Nix + if: steps.cache.outputs.should-run == 'true' + uses: ./.github/actions/setup-nix env: - GITHUB_PUSH_BEFORE_SHA: ${{ github.event.before }} - GITHUB_PUSH_BASE_SHA: ${{ github.event.base }} - GITHUB_PULL_BASE_SHA: ${{ github.event.pull_request.base.sha }} - GITHUB_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} - GITGUARDIAN_API_KEY: ${{ secrets.GITGUARDIAN_API_KEY }} + SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} + with: + installer: ${{ inputs.nix_installer || 'quick' }} + system: x86_64-linux + enable-cachix: true + - name: Scan for secrets with gitleaks + if: steps.cache.outputs.should-run == 'true' + run: nix develop -c just scan-secrets + + # job 2: set-variables + # determines deployment settings and variables based on event type + # Always runs - needed for production job routing and provides outputs set-variables: - needs: scan + needs: [secrets-scan] runs-on: ubuntu-latest if: | !cancelled() && @@ -83,6 +109,7 @@ jobs: deploy_environment: ${{ steps.set-variables.outputs.deploy_environment }} checkout_ref: ${{ steps.set-variables.outputs.checkout_ref }} checkout_rev: ${{ steps.set-variables.outputs.checkout_rev }} + packages: ${{ steps.discover-packages.outputs.packages }} steps: - name: Set action variables @@ -105,6 +132,10 @@ jobs: if ${{ contains(github.event.pull_request.labels.*.name, 'actions-debug') }}; then DEBUG="true" fi + if ${{ contains(github.event.pull_request.labels.*.name, 'docs-preview') }}; then + DEPLOY_ENABLED="true" + DEPLOY_ENVIRONMENT="preview" + fi CHECKOUT_REF="${{ github.event.pull_request.head.ref }}" CHECKOUT_REV="${{ github.event.pull_request.head.sha }}" else @@ -132,12 +163,29 @@ jobs: echo "CHECKOUT_REF=$CHECKOUT_REF" >> $GITHUB_OUTPUT echo "CHECKOUT_REV=$CHECKOUT_REV" >> $GITHUB_OUTPUT + - name: Checkout for package discovery + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4 + with: + sparse-checkout: | + packages + justfile + sparse-checkout-cone-mode: false + + - name: Discover packages + id: discover-packages + run: | + # Install just for package discovery + curl --proto '=https' --tlsv1.2 -sSf https://just.systems/install.sh | bash -s -- --to /usr/local/bin + PACKAGES=$(just list-packages-json) + echo "packages=$PACKAGES" >> $GITHUB_OUTPUT + echo "Discovered packages: $PACKAGES" + nix: runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest] - needs: set-variables + needs: [secrets-scan, set-variables] if: | !cancelled() && needs.set-variables.outputs.skip_ci != 'true' && @@ -149,23 +197,42 @@ jobs: cancel-in-progress: true steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4 + with: + fetch-depth: 0 # Required for git diff in composite action + + - name: Check execution cache + id: cache + uses: ./.github/actions/cached-ci-job + with: + path-filters: '\.nix$|flake\.lock|justfile|packages/.*\.(ts|tsx|js|jsx)|.*\.config\.(ts|js)|package\.json|.*\.lock' + force-run: ${{ inputs.force_run || 'false' }} + - name: Setup Nix + if: steps.cache.outputs.should-run == 'true' uses: ./.github/actions/setup-nix env: SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} with: installer: ${{ inputs.nix_installer || 'quick' }} system: x86_64-linux - setup-cachix: true - cachix-auth: true + enable-cachix: true + cachix-name: ${{ vars.CACHIX_CACHE_NAME }} + cachix-auth-token: ${{ secrets.CACHIX_AUTH_TOKEN }} + - name: Setup tmate debug session + if: steps.cache.outputs.should-run == 'true' && needs.set-variables.outputs.debug == 'true' uses: mxschmitt/action-tmate@e5c7151931ca95bad1c6f4190c730ecf8c7dde48 # ratchet:mxschmitt/action-tmate@v3 - if: ${{ needs.set-variables.outputs.debug == 'true' }} + - name: Install omnix + if: steps.cache.outputs.should-run == 'true' run: nix --accept-flake-config profile install "github:juspay/omnix" + - name: Summarize flake + if: steps.cache.outputs.should-run == 'true' run: om show . + - name: Run flake CI and push to cachix + if: steps.cache.outputs.should-run == 'true' env: SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} run: | @@ -173,68 +240,139 @@ jobs: om ci run | tee /dev/stderr | cachix push "$CACHIX_CACHE_NAME" ' + # Note: Reusable workflow calls cannot use composite action steps directly + # Per-matrix-element caching happens via GitHub Checks API based on job name: test (package-name) test: - needs: [set-variables] + needs: [secrets-scan, set-variables] if: | !cancelled() && needs.set-variables.outputs.skip_ci != 'true' && (github.event_name != 'workflow_dispatch' || inputs.job == '' || inputs.job == 'test') - runs-on: ubuntu-latest strategy: + fail-fast: false matrix: - package: - - name: docs - path: packages/docs - concurrency: - group: test-${{ matrix.package.name }}-${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.event.pull_request.number || github.ref_name }} - cancel-in-progress: true + package: ${{ fromJson(needs.set-variables.outputs.packages) }} + uses: ./.github/workflows/package-test.yaml + with: + package-name: ${{ matrix.package.name }} + package-path: ${{ matrix.package.path }} + debug-enabled: ${{ needs.set-variables.outputs.debug }} + nix-installer: ${{ inputs.nix_installer || 'quick' }} + secrets: + SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} + + # job 3: preview-release-version + # Preview semantic-release version for each package (PR only, fast feedback) + preview-release-version: + needs: [set-variables] + if: | + !cancelled() && + github.event_name == 'pull_request' + strategy: + fail-fast: false + matrix: + package: ${{ fromJson(needs.set-variables.outputs.packages) }} + runs-on: ubuntu-latest + # semantic-release verifyAuth requires push permission even in dry-run mode + # https://github.com/semantic-release/semantic-release/blob/v25.0.1/index.js#L87-L98 + permissions: + contents: write steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4 + - name: Checkout repository + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4 + with: + ref: ${{ github.head_ref }} # Checkout actual PR branch, not merge commit + fetch-depth: 0 # Full history needed for semantic-release analysis + fetch-tags: true # Explicitly fetch all tags for version detection + + - name: Fetch target branch for preview + run: | + git fetch origin + git branch -f main origin/main + + - name: Configure git identity for temporary commits + run: | + git config --global user.name "github-actions[bot]" + git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com" + - name: Setup Nix uses: ./.github/actions/setup-nix env: SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} with: - installer: ${{ inputs.nix_installer || 'quick' }} + installer: quick system: x86_64-linux - setup-cachix: true + enable-cachix: true + - name: Install dependencies - run: nix develop -c just install - - name: Run unit tests with coverage - run: nix develop -c just test-coverage - - name: Build for E2E tests - run: nix develop -c just build - - name: Run E2E tests - run: nix develop -c just test-e2e - - name: Upload build artifacts - uses: actions/upload-artifact@v4 - with: - name: dist-${{ matrix.package.name }} - path: ${{ matrix.package.path }}/dist/ - retention-days: 7 - include-hidden-files: true - - name: Upload test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: playwright-report-${{ matrix.package.name }} - path: ${{ matrix.package.path }}/playwright-report/ - retention-days: 7 - - name: Upload coverage - if: always() - uses: actions/upload-artifact@v4 - with: - name: coverage-${{ matrix.package.name }} - path: ${{ matrix.package.path }}/coverage/ - retention-days: 7 + run: nix develop -c bun install + + - name: Setup tmate debug session + if: needs.set-variables.outputs.debug == 'true' + uses: mxschmitt/action-tmate@e5c7151931ca95bad1c6f4190c730ecf8c7dde48 # ratchet:mxschmitt/action-tmate@v3 + + - name: Preview version for ${{ matrix.package.name }} + run: | + echo "::group::Preview semantic-release version" + nix develop -c just preview-version main ${{ matrix.package.path }} + echo "::endgroup::" + + # job 4: preview-docs-deploy + # Deploy docs to preview environment (PR only, fast feedback) + preview-docs-deploy: + needs: [set-variables] + if: | + !cancelled() && + github.event_name == 'pull_request' + permissions: + contents: read + deployments: write + uses: ./.github/workflows/deploy-docs.yaml + with: + branch: ${{ github.head_ref }} + environment: preview + debug_enabled: ${{ needs.set-variables.outputs.debug }} + secrets: inherit + + # job 5: production-release-packages + # Release packages to production on main/beta branches + # Requires test+nix success/skipped (safe for fast-forward merge) + production-release-packages: + needs: [set-variables, test, nix] + if: | + github.repository_owner == 'sciexp' && + (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && + (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/beta') && + needs.set-variables.outputs.skip_ci != 'true' + strategy: + fail-fast: false + matrix: + package: ${{ fromJson(needs.set-variables.outputs.packages) }} + permissions: + contents: write + id-token: write + uses: ./.github/workflows/package-release.yaml + with: + package-path: ${{ matrix.package.path }} + package-name: ${{ matrix.package.name }} + release-dry-run: false + debug-enabled: ${{ needs.set-variables.outputs.debug == 'true' }} + checkout-ref: ${{ needs.set-variables.outputs.checkout_ref }} + secrets: + SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} - deploy: - needs: [set-variables, nix, test] + # job 6: production-docs-deploy + # Documentation deployment to production (conditional) + # Depends on production-release-packages to ensure packages released first + production-docs-deploy: + needs: [set-variables, test, production-release-packages] if: | !cancelled() && needs.set-variables.outputs.skip_ci != 'true' && + (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && + (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/beta') && needs.set-variables.outputs.deploy_enabled == 'true' && (github.event_name != 'workflow_dispatch' || inputs.job == '' || diff --git a/.github/workflows/deploy-docs.yaml b/.github/workflows/deploy-docs.yaml index e8afced..2520ce1 100644 --- a/.github/workflows/deploy-docs.yaml +++ b/.github/workflows/deploy-docs.yaml @@ -43,92 +43,50 @@ permissions: deployments: write jobs: - build: + deploy-docs: runs-on: ubuntu-latest + environment: + name: ${{ inputs.environment }} + url: ${{ inputs.environment == 'preview' && format('https://b-{0}-ts-nix-docs.sciexp.workers.dev', inputs.branch) || 'https://ts-nix.scientistexperience.net' }} + permissions: + contents: read + deployments: write steps: - - name: Check for existing dist artifact - id: check-artifact - uses: actions/download-artifact@v4 - continue-on-error: true - with: - name: dist-docs - path: packages/docs/dist/ - - name: Checkout - if: steps.check-artifact.outcome == 'failure' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4 with: ref: ${{ inputs.branch }} - name: Setup Nix - if: steps.check-artifact.outcome == 'failure' uses: ./.github/actions/setup-nix env: SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} with: installer: quick system: x86_64-linux - setup-cachix: true - name: Setup tmate debug session - if: steps.check-artifact.outcome == 'failure' && inputs.debug_enabled == 'true' + if: inputs.debug_enabled == 'true' uses: mxschmitt/action-tmate@e5c7151931ca95bad1c6f4190c730ecf8c7dde48 # ratchet:mxschmitt/action-tmate@v3 - name: Install dependencies - if: steps.check-artifact.outcome == 'failure' run: nix develop -c just install - - name: Build documentation site - if: steps.check-artifact.outcome == 'failure' - run: nix develop -c just build - - - name: Upload build artifacts - uses: actions/upload-artifact@v4 - with: - name: dist-docs-${{ github.run_id }} - path: packages/docs/dist/ - retention-days: 7 - include-hidden-files: true - overwrite: true - - deploy: - needs: build - runs-on: ubuntu-latest - environment: - name: ${{ inputs.environment }} - url: https://ts-nix.scientistexperience.net - steps: - - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4 - with: - ref: ${{ inputs.branch }} - - - name: Download build artifacts - uses: actions/download-artifact@v4 - with: - name: dist-docs-${{ github.run_id }} - path: packages/docs/dist/ - - - name: Setup Nix - uses: ./.github/actions/setup-nix - env: - SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} - with: - installer: quick - system: x86_64-linux - setup-cachix: true - - - name: Install dependencies - run: nix develop -c just install + - name: Validate documentation links + run: nix develop -c just docs-linkcheck - name: Deploy to Cloudflare Workers id: deployment env: SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} + GITHUB_ACTIONS: true + GITHUB_ACTOR: ${{ github.actor }} + GITHUB_WORKFLOW: ${{ github.workflow }} run: | - cd packages/docs if [ "${{ inputs.environment }}" = "preview" ]; then - nix develop -c sops exec-env ../../vars/shared.yaml "bunx wrangler versions upload --preview-alias b-${{ inputs.branch }}" + echo "Deploying preview for branch: ${{ inputs.branch }}" + nix develop -c just cf-deploy-preview ${{ inputs.branch }} else - nix develop -c sops exec-env ../../vars/shared.yaml "bunx wrangler deploy" + echo "Deploying to production (promoting existing version if available)" + nix develop -c just cf-deploy-production fi diff --git a/.github/workflows/package-release.yaml b/.github/workflows/package-release.yaml index 0a7ee5d..00a58f5 100644 --- a/.github/workflows/package-release.yaml +++ b/.github/workflows/package-release.yaml @@ -1,18 +1,150 @@ name: Package Release on: - workflow_call: + workflow_dispatch: inputs: + package-path: + description: "Path to the package directory" + required: true + type: string package-name: + description: "Name of the package" required: true type: string + release-dry-run: + description: "Whether to run the release in dry-run mode" + required: false + type: boolean + default: false + debug-enabled: + description: "Enable tmate debug session" + required: false + type: boolean + default: false + checkout-ref: + description: "Git ref to checkout" + required: false + type: string + default: "" + + workflow_call: + inputs: package-path: + description: "Path to the package directory" + required: true + type: string + package-name: + description: "Name of the package" required: true type: string + release-dry-run: + description: "Whether to run the release in dry-run mode" + required: false + type: string + default: "false" + debug-enabled: + description: "Enable tmate debug session" + required: false + type: boolean + default: false + checkout-ref: + description: "Git ref to checkout" + required: false + type: string + default: "" + secrets: + SOPS_AGE_KEY: + required: true + outputs: + version: + description: "Released version" + value: ${{ jobs.release.outputs.version }} + released: + description: "Whether a new release was published" + value: ${{ jobs.release.outputs.released }} + tag: + description: "Git tag created for the release" + value: ${{ jobs.release.outputs.tag }} + +defaults: + run: + shell: bash + +permissions: + contents: write + id-token: write jobs: - stub: + release: runs-on: ubuntu-latest + permissions: + contents: write + id-token: write + outputs: + version: ${{ steps.release-info.outputs.version }} + released: ${{ steps.release-info.outputs.released }} + tag: ${{ steps.release-info.outputs.tag }} steps: - - name: Workflow stub - run: echo "Stub - full implementation in PR" + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true # Explicitly fetch all tags for version detection + ref: ${{ inputs.checkout-ref != '' && inputs.checkout-ref || github.ref }} + persist-credentials: false + + - name: Setup Nix + uses: ./.github/actions/setup-nix + with: + installer: quick + system: x86_64-linux + enable-cachix: true + + - name: Install dependencies + run: nix develop -c bun install + + - name: Run semantic-release + id: semantic-release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} + run: | + nix develop -c just release-package ${{ inputs.package-name }} ${{ inputs.release-dry-run }} + + - name: Extract release info + if: always() + id: release-info + run: | + # Extract version and tag from git if release was created + # For dry-run mode, these will remain as defaults + if [ "${{ inputs.release-dry-run }}" = "false" ]; then + LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "") + if [ -n "$LATEST_TAG" ]; then + # Extract version from tag (e.g., docs-v1.2.3 -> 1.2.3) + VERSION=$(echo "$LATEST_TAG" | grep -oP '\d+\.\d+\.\d+' || echo "unknown") + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "released=true" >> $GITHUB_OUTPUT + echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT + else + echo "version=unknown" >> $GITHUB_OUTPUT + echo "released=false" >> $GITHUB_OUTPUT + echo "tag=" >> $GITHUB_OUTPUT + fi + else + echo "version=unknown" >> $GITHUB_OUTPUT + echo "released=false" >> $GITHUB_OUTPUT + echo "tag=" >> $GITHUB_OUTPUT + fi + + - name: Setup tmate debug session + if: inputs.debug-enabled == true + uses: mxschmitt/action-tmate@e5c7151931ca95bad1c6f4190c730ecf8c7dde48 # ratchet:mxschmitt/action-tmate@v3 + + - name: Log release information + if: always() + run: | + if [ "${{ steps.release-info.outputs.released }}" == "true" ]; then + echo "Package ${{ inputs.package-name }} released version ${{ steps.release-info.outputs.version }}" + echo "Tag: ${{ steps.release-info.outputs.tag }}" + else + echo "No release created for ${{ inputs.package-name }}" + fi diff --git a/.github/workflows/package-test.yaml b/.github/workflows/package-test.yaml index a53b9b2..f69b4fa 100644 --- a/.github/workflows/package-test.yaml +++ b/.github/workflows/package-test.yaml @@ -1,18 +1,106 @@ name: Package Test on: + workflow_dispatch: + inputs: + package-name: + description: "Name of the package to test" + required: true + type: string + package-path: + description: "Path to the package directory" + required: true + type: string + debug-enabled: + description: "Run with tmate.io debugging enabled" + required: false + type: boolean + default: false + nix-installer: + description: "Nix installer strategy (quick or full)" + required: false + type: string + default: "quick" + workflow_call: inputs: package-name: + description: "Name of the package to test" required: true type: string package-path: + description: "Path to the package directory" required: true type: string + debug-enabled: + description: "Run with tmate.io debugging enabled" + required: false + type: string + default: "false" + nix-installer: + description: "Nix installer strategy (quick or full)" + required: false + type: string + default: "quick" + secrets: + SOPS_AGE_KEY: + required: true + +defaults: + run: + shell: bash jobs: - stub: + test: runs-on: ubuntu-latest steps: - - name: Workflow stub - run: echo "Stub - full implementation in PR" + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4 + + - name: Setup Nix + uses: ./.github/actions/setup-nix + env: + SOPS_AGE_KEY: ${{ secrets.SOPS_AGE_KEY }} + with: + installer: ${{ inputs.nix-installer }} + system: x86_64-linux + enable-cachix: true + + - name: Setup tmate debug session + uses: mxschmitt/action-tmate@e5c7151931ca95bad1c6f4190c730ecf8c7dde48 # ratchet:mxschmitt/action-tmate@v3 + if: ${{ inputs.debug-enabled == 'true' }} + + - name: Install dependencies + run: nix develop -c just install + + - name: Run unit tests with coverage + run: nix develop -c just docs-test-coverage + + - name: Build for E2E tests + run: nix develop -c just docs-build + + - name: Run E2E tests + run: nix develop -c just docs-test-e2e + + - name: Upload build artifacts + uses: actions/upload-artifact@v4 + with: + name: dist-${{ inputs.package-name }} + path: ${{ inputs.package-path }}/dist/ + retention-days: 7 + include-hidden-files: true + + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: playwright-report-${{ inputs.package-name }} + path: ${{ inputs.package-path }}/playwright-report/ + retention-days: 7 + + - name: Upload coverage + if: always() + uses: actions/upload-artifact@v4 + with: + name: coverage-${{ inputs.package-name }} + path: ${{ inputs.package-path }}/coverage/ + retention-days: 7 diff --git a/.github/workflows/pr-check.yaml b/.github/workflows/pr-check.yaml new file mode 100644 index 0000000..4021412 --- /dev/null +++ b/.github/workflows/pr-check.yaml @@ -0,0 +1,19 @@ +name: PR Check Fast-forward +on: + pull_request: + types: [opened, reopened, synchronize] +jobs: + check-fast-forward: + runs-on: ubuntu-latest + + permissions: + contents: read + pull-requests: write + issues: write + + steps: + - name: check fast-forward merge is possible + uses: sequoia-pgp/fast-forward@042cd23fbf9d5ed1400497a106c8abe4b45408ab # ratchet:sequoia-pgp/fast-forward@main + with: + merge: false + comment: on-error diff --git a/.github/workflows/pr-merge.yaml b/.github/workflows/pr-merge.yaml new file mode 100644 index 0000000..819f161 --- /dev/null +++ b/.github/workflows/pr-merge.yaml @@ -0,0 +1,25 @@ +name: PR Fast-forward Merge +on: + issue_comment: + types: [created, edited] + +env: + GITHUB_ACTOR: ${{ vars.FAST_FORWARD_ACTOR }} + GITHUB_TOKEN: ${{ secrets.FAST_FORWARD_PAT }} + +jobs: + fast-forward: + if: ${{ contains(github.event.comment.body, '/fast-forward') && github.event.issue.pull_request }} + runs-on: ubuntu-latest + + permissions: + contents: write + pull-requests: write + issues: write + + steps: + - name: fast-forward merge + uses: sequoia-pgp/fast-forward@042cd23fbf9d5ed1400497a106c8abe4b45408ab # ratchet:sequoia-pgp/fast-forward@main + with: + merge: true + comment: on-error diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 351a3e7..5c17943 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -56,7 +56,7 @@ jobs: with: installer: quick system: x86_64-linux - setup-cachix: true + enable-cachix: true - name: Install dependencies run: nix develop -c bun install diff --git a/.gitleaksignore b/.gitleaksignore new file mode 100644 index 0000000..0043902 --- /dev/null +++ b/.gitleaksignore @@ -0,0 +1,17 @@ +# .gitleaksignore - Fingerprints of false positives safe to ignore +# Format: ::: +# These are age public keys used in documentation and scripts (safe to commit) + +# Age public key in CI/CD setup documentation (commit 3e1d314b) +3e1d314b2feafeba8c46da73257557caca7bd1dc:packages/docs/src/content/docs/guides/ci-cd-setup.md:generic-api-key:54 + +# Age public key placeholders in sops-bootstrap.sh (commit 5c662602) +5c662602e6d280c86c2e12321aa91b6c7db2d3aa:scripts/sops-bootstrap.sh:generic-api-key:21 +5c662602e6d280c86c2e12321aa91b6c7db2d3aa:scripts/sops-bootstrap.sh:generic-api-key:22 + +# Age public key placeholders in justfile sops-add-key recipe (commit e8c56a3a) +e8c56a3ace8525d2c0960563f172c3c300eb044c:justfile:generic-api-key:424 +e8c56a3ace8525d2c0960563f172c3c300eb044c:justfile:generic-api-key:425 + +# Age public key in old CI_SETUP.md documentation (commit 7b972f52, file later moved) +7b972f52937be8b6ab36c804e6e8316cd2e7f3a3:CI_SETUP.md:generic-api-key:51 diff --git a/bun.lock b/bun.lock index 950d43f..96cd573 100644 --- a/bun.lock +++ b/bun.lock @@ -24,7 +24,7 @@ }, "devDependencies": { "@biomejs/biome": "2.2.4", - "@playwright/test": "~1.54.0", + "@playwright/test": "^1.56.1", "@semantic-release/changelog": "^6.0.3", "@semantic-release/git": "^10.0.1", "@semantic-release/github": "^11.0.1", @@ -34,9 +34,10 @@ "semantic-release": "^24.2.3", "semantic-release-major-tag": "^0.3.2", "semantic-release-monorepo": "^8.0.2", + "starlight-links-validator": "^0.19.0", "svgo": "^4.0.0", "vitest": "^3.2.4", - "wrangler": "^4.42.0", + "wrangler": "^4.45.0", }, }, }, @@ -99,17 +100,17 @@ "@cloudflare/kv-asset-handler": ["@cloudflare/kv-asset-handler@0.4.0", "", { "dependencies": { "mime": "^3.0.0" } }, "sha512-+tv3z+SPp+gqTIcImN9o0hqE9xyfQjI1XD9pL6NuKjua9B1y7mNYv0S9cP+QEbA4ppVgGZEmKOvHX5G5Ei1CVA=="], - "@cloudflare/unenv-preset": ["@cloudflare/unenv-preset@2.7.6", "", { "peerDependencies": { "unenv": "2.0.0-rc.21", "workerd": "^1.20250927.0" }, "optionalPeers": ["workerd"] }, "sha512-ykG2nd3trk6jbknRCH69xL3RpGLLbKCrbTbWSOvKEq7s4jH06yLrQlRr/q9IU+dK9p1JY1EXqhFK7VG5KqhzmQ=="], + "@cloudflare/unenv-preset": ["@cloudflare/unenv-preset@2.7.8", "", { "peerDependencies": { "unenv": "2.0.0-rc.21", "workerd": "^1.20250927.0" }, "optionalPeers": ["workerd"] }, "sha512-Ky929MfHh+qPhwCapYrRPwPVHtA2Ioex/DbGZyskGyNRDe9Ru3WThYZivyNVaPy5ergQSgMs9OKrM9Ajtz9F6w=="], - "@cloudflare/workerd-darwin-64": ["@cloudflare/workerd-darwin-64@1.20251001.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-y1ST/cCscaRewWRnsHZdWbgiLJbki5UMGd0hMo/FLqjlztwPeDgQ5CGm5jMiCDdw/IBCpWxEukftPYR34rWNog=="], + "@cloudflare/workerd-darwin-64": ["@cloudflare/workerd-darwin-64@1.20251011.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-0DirVP+Z82RtZLlK2B+VhLOkk+ShBqDYO/jhcRw4oVlp0TOvk3cOVZChrt3+y3NV8Y/PYgTEywzLKFSziK4wCg=="], - "@cloudflare/workerd-darwin-arm64": ["@cloudflare/workerd-darwin-arm64@1.20251001.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-+z4QHHZ/Yix82zLFYS+ZS2UV09IENFPwDCEKUWfnrM9Km2jOOW3Ua4hJNob1EgQUYs8fFZo7k5O/tpwxMsSbbQ=="], + "@cloudflare/workerd-darwin-arm64": ["@cloudflare/workerd-darwin-arm64@1.20251011.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-1WuFBGwZd15p4xssGN/48OE2oqokIuc51YvHvyNivyV8IYnAs3G9bJNGWth1X7iMDPe4g44pZrKhRnISS2+5dA=="], - "@cloudflare/workerd-linux-64": ["@cloudflare/workerd-linux-64@1.20251001.0", "", { "os": "linux", "cpu": "x64" }, "sha512-hGS+O2V9Mm2XjJUaB9ZHMA5asDUaDjKko42e+accbew0PQR7zrAl1afdII6hMqCLV4tk4GAjvhv281pN4g48rg=="], + "@cloudflare/workerd-linux-64": ["@cloudflare/workerd-linux-64@1.20251011.0", "", { "os": "linux", "cpu": "x64" }, "sha512-BccMiBzFlWZyFghIw2szanmYJrJGBGHomw2y/GV6pYXChFzMGZkeCEMfmCyJj29xczZXxcZmUVJxNy4eJxO8QA=="], - "@cloudflare/workerd-linux-arm64": ["@cloudflare/workerd-linux-arm64@1.20251001.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-QYaMK+pRgt28N7CX1JlJ+ToegJF9LxzqdT7MjWqPgVj9D2WTyIhBVYl3wYjJRcgOlnn+DRt42+li4T64CPEeuA=="], + "@cloudflare/workerd-linux-arm64": ["@cloudflare/workerd-linux-arm64@1.20251011.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-79o/216lsbAbKEVDZYXR24ivEIE2ysDL9jvo0rDTkViLWju9dAp3CpyetglpJatbSi3uWBPKZBEOqN68zIjVsQ=="], - "@cloudflare/workerd-windows-64": ["@cloudflare/workerd-windows-64@1.20251001.0", "", { "os": "win32", "cpu": "x64" }, "sha512-ospnDR/FlyRvrv9DSHuxDAXmzEBLDUiAHQrQHda1iUH9HqxnNQ8giz9VlPfq7NIRc7bQ1ZdIYPGLJOY4Q366Ng=="], + "@cloudflare/workerd-windows-64": ["@cloudflare/workerd-windows-64@1.20251011.0", "", { "os": "win32", "cpu": "x64" }, "sha512-RIXUQRchFdqEvaUqn1cXZXSKjpqMaSaVAkI5jNZ8XzAw/bw2bcdOVUtakrflgxDprltjFb0PTNtuss1FKtH9Jg=="], "@cloudflare/workers-types": ["@cloudflare/workers-types@4.20251004.0", "", {}, "sha512-FkTBHEyOBwphbW4SLQ2XLCgNntD2wz0v1Si7NwJeN0JAPW/39/w6zhsKy3rsh+203tuSfBgsoP34+Os4RaySOw=="], @@ -287,7 +288,7 @@ "@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="], - "@playwright/test": ["@playwright/test@1.54.2", "", { "dependencies": { "playwright": "1.54.2" }, "bin": { "playwright": "cli.js" } }, "sha512-A+znathYxPf+72riFd1r1ovOLqsIIB0jKIoPjyK2kqEIe30/6jF6BC7QNluHuwUmsD2tv1XZVugN8GqfTMOxsA=="], + "@playwright/test": ["@playwright/test@1.56.1", "", { "dependencies": { "playwright": "1.56.1" }, "bin": { "playwright": "cli.js" } }, "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg=="], "@pnpm/config.env-replace": ["@pnpm/config.env-replace@1.1.0", "", {}, "sha512-htyl8TWnKL7K/ESFa1oW2UB5lVDxuF5DpM7tBi6Hu2LNL3mWkIzNLG6N4zoCUP1lCKNxWy/3iu8mS8MvToGd6w=="], @@ -413,6 +414,8 @@ "@types/normalize-package-data": ["@types/normalize-package-data@2.4.4", "", {}, "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA=="], + "@types/picomatch": ["@types/picomatch@3.0.2", "", {}, "sha512-n0i8TD3UDB7paoMMxA3Y65vUncFJXjcUf7lQY7YyKGl6031FNjfsLs6pdLFCy2GNFxItPJG8GvvpbZc2skH7WA=="], + "@types/sax": ["@types/sax@1.2.7", "", { "dependencies": { "@types/node": "*" } }, "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A=="], "@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="], @@ -881,6 +884,8 @@ "iron-webcrypto": ["iron-webcrypto@1.2.1", "", {}, "sha512-feOM6FaSr6rEABp/eDfVseKyTMDt+KGpeB35SkVn9Tyn0CqvVsY3EwI0v5i8nMHyJnzCIQf7nsy3p41TPkJZhg=="], + "is-absolute-url": ["is-absolute-url@4.0.1", "", {}, "sha512-/51/TKE88Lmm7Gc4/8btclNXWS+g50wXhYJq8HWIBAGUBnoAdRu1aXeh364t/O7wXDAcTJDP8PNuNKWUDWie+A=="], + "is-alphabetical": ["is-alphabetical@2.0.1", "", {}, "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ=="], "is-alphanumerical": ["is-alphanumerical@2.0.1", "", { "dependencies": { "is-alphabetical": "^2.0.0", "is-decimal": "^2.0.0" } }, "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw=="], @@ -1113,7 +1118,7 @@ "mimic-fn": ["mimic-fn@2.1.0", "", {}, "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="], - "miniflare": ["miniflare@4.20251001.0", "", { "dependencies": { "@cspotcode/source-map-support": "0.8.1", "acorn": "8.14.0", "acorn-walk": "8.3.2", "exit-hook": "2.2.1", "glob-to-regexp": "0.4.1", "sharp": "^0.33.5", "stoppable": "1.1.0", "undici": "7.14.0", "workerd": "1.20251001.0", "ws": "8.18.0", "youch": "4.1.0-beta.10", "zod": "3.22.3" }, "bin": { "miniflare": "bootstrap.js" } }, "sha512-OHd31D2LT8JH+85nVXClV0Z18jxirCohzKNAcZs/fgt4mIkUDtidX3VqR3ovAM0jWooNxrFhB9NSs3iDbiJF7Q=="], + "miniflare": ["miniflare@4.20251011.1", "", { "dependencies": { "@cspotcode/source-map-support": "0.8.1", "acorn": "8.14.0", "acorn-walk": "8.3.2", "exit-hook": "2.2.1", "glob-to-regexp": "0.4.1", "sharp": "^0.33.5", "stoppable": "1.1.0", "undici": "7.14.0", "workerd": "1.20251011.0", "ws": "8.18.0", "youch": "4.1.0-beta.10", "zod": "3.22.3" }, "bin": { "miniflare": "bootstrap.js" } }, "sha512-Qbw1Z8HTYM1adWl6FAtzhrj34/6dPRDPwdYOx21dkae8a/EaxbMzRIPbb4HKVGMVvtqbK1FaRCgDLVLolNzGHg=="], "minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], @@ -1241,9 +1246,9 @@ "pkg-up": ["pkg-up@3.1.0", "", { "dependencies": { "find-up": "^3.0.0" } }, "sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA=="], - "playwright": ["playwright@1.54.2", "", { "dependencies": { "playwright-core": "1.54.2" }, "optionalDependencies": { "fsevents": "2.3.2" }, "bin": { "playwright": "cli.js" } }, "sha512-Hu/BMoA1NAdRUuulyvQC0pEqZ4vQbGfn8f7wPXcnqQmM+zct9UliKxsIkLNmz/ku7LElUNqmaiv1TG/aL5ACsw=="], + "playwright": ["playwright@1.56.1", "", { "dependencies": { "playwright-core": "1.56.1" }, "optionalDependencies": { "fsevents": "2.3.2" }, "bin": { "playwright": "cli.js" } }, "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw=="], - "playwright-core": ["playwright-core@1.54.2", "", { "bin": { "playwright-core": "cli.js" } }, "sha512-n5r4HFbMmWsB4twG7tJLDN9gmBUeSPcsBZiWSE4DnYz9mJMAFqr2ID7+eGC9kpEnxExJ1epttwR59LEWCk8mtA=="], + "playwright-core": ["playwright-core@1.56.1", "", { "bin": { "playwright-core": "cli.js" } }, "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ=="], "postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="], @@ -1411,6 +1416,8 @@ "stackback": ["stackback@0.0.2", "", {}, "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw=="], + "starlight-links-validator": ["starlight-links-validator@0.19.0", "", { "dependencies": { "@types/picomatch": "^3.0.1", "github-slugger": "^2.0.0", "hast-util-from-html": "^2.0.3", "hast-util-has-property": "^3.0.0", "is-absolute-url": "^4.0.1", "kleur": "^4.1.5", "mdast-util-mdx-jsx": "^3.1.3", "mdast-util-to-string": "^4.0.0", "picomatch": "^4.0.2", "terminal-link": "^5.0.0", "unist-util-visit": "^5.0.0" }, "peerDependencies": { "@astrojs/starlight": ">=0.32.0" } }, "sha512-wsqVL/vL6A4wvnM0m1A9Rwu8WuKS4rZerg1Dw5p8Xq4MquJco2F68LNt5lm9kYa3G9dUi4jMZbUAUuzzczQyJQ=="], + "std-env": ["std-env@3.9.0", "", {}, "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw=="], "stoppable": ["stoppable@1.1.0", "", {}, "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw=="], @@ -1457,6 +1464,8 @@ "tempy": ["tempy@1.0.1", "", { "dependencies": { "del": "^6.0.0", "is-stream": "^2.0.0", "temp-dir": "^2.0.0", "type-fest": "^0.16.0", "unique-string": "^2.0.0" } }, "sha512-biM9brNqxSc04Ee71hzFbryD11nX7VPhQQY32AdDmjFvodsRFz/3ufeoTZ6uYkRFfGo188tENcASNs3vTdsM0w=="], + "terminal-link": ["terminal-link@5.0.0", "", { "dependencies": { "ansi-escapes": "^7.0.0", "supports-hyperlinks": "^4.1.0" } }, "sha512-qFAy10MTMwjzjU8U16YS4YoZD+NQLHzLssFMNqgravjbvIPNiqkGFR4yjhJfmY9R5OFU7+yHxc6y+uGHkKwLRA=="], + "test-exclude": ["test-exclude@7.0.1", "", { "dependencies": { "@istanbuljs/schema": "^0.1.2", "glob": "^10.4.1", "minimatch": "^9.0.4" } }, "sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg=="], "thenify": ["thenify@3.3.1", "", { "dependencies": { "any-promise": "^1.0.0" } }, "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw=="], @@ -1589,9 +1598,9 @@ "wordwrap": ["wordwrap@1.0.0", "", {}, "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q=="], - "workerd": ["workerd@1.20251001.0", "", { "optionalDependencies": { "@cloudflare/workerd-darwin-64": "1.20251001.0", "@cloudflare/workerd-darwin-arm64": "1.20251001.0", "@cloudflare/workerd-linux-64": "1.20251001.0", "@cloudflare/workerd-linux-arm64": "1.20251001.0", "@cloudflare/workerd-windows-64": "1.20251001.0" }, "bin": { "workerd": "bin/workerd" } }, "sha512-oT/K4YWNhmwpVmGeaHNmF7mLRfgjszlVr7lJtpS4jx5khmxmMzWZEEQRrJEpgzeHP6DOq9qWLPNT0bjMK7TchQ=="], + "workerd": ["workerd@1.20251011.0", "", { "optionalDependencies": { "@cloudflare/workerd-darwin-64": "1.20251011.0", "@cloudflare/workerd-darwin-arm64": "1.20251011.0", "@cloudflare/workerd-linux-64": "1.20251011.0", "@cloudflare/workerd-linux-arm64": "1.20251011.0", "@cloudflare/workerd-windows-64": "1.20251011.0" }, "bin": { "workerd": "bin/workerd" } }, "sha512-Dq35TLPEJAw7BuYQMkN3p9rge34zWMU2Gnd4DSJFeVqld4+DAO2aPG7+We2dNIAyM97S8Y9BmHulbQ00E0HC7Q=="], - "wrangler": ["wrangler@4.42.0", "", { "dependencies": { "@cloudflare/kv-asset-handler": "0.4.0", "@cloudflare/unenv-preset": "2.7.6", "blake3-wasm": "2.1.5", "esbuild": "0.25.4", "miniflare": "4.20251001.0", "path-to-regexp": "6.3.0", "unenv": "2.0.0-rc.21", "workerd": "1.20251001.0" }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@cloudflare/workers-types": "^4.20251001.0" }, "optionalPeers": ["@cloudflare/workers-types"], "bin": { "wrangler": "bin/wrangler.js", "wrangler2": "bin/wrangler.js" } }, "sha512-OZXiUSfGD66OVkncDbjZtqrsH6bWPRQMYc6RmMbkzYm/lEvJ8lvARKcqDgEyq8zDAgJAivlMQLyPtKQoVjQ/4g=="], + "wrangler": ["wrangler@4.45.3", "", { "dependencies": { "@cloudflare/kv-asset-handler": "0.4.0", "@cloudflare/unenv-preset": "2.7.8", "blake3-wasm": "2.1.5", "esbuild": "0.25.4", "miniflare": "4.20251011.1", "path-to-regexp": "6.3.0", "unenv": "2.0.0-rc.21", "workerd": "1.20251011.0" }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@cloudflare/workers-types": "^4.20251011.0" }, "optionalPeers": ["@cloudflare/workers-types"], "bin": { "wrangler": "bin/wrangler.js", "wrangler2": "bin/wrangler.js" } }, "sha512-0ddEA9t4HeBgSVTVTcqtBHl7Z5CorWZ8tGgTQCP5XuL+9E1TJRwS6t/zzG51Ruwjb17SZYCaLchoM8V629S8cw=="], "wrap-ansi": ["wrap-ansi@9.0.2", "", { "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", "strip-ansi": "^7.1.0" } }, "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww=="], @@ -2119,6 +2128,8 @@ "tempy/type-fest": ["type-fest@0.16.0", "", {}, "sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg=="], + "terminal-link/supports-hyperlinks": ["supports-hyperlinks@4.3.0", "", { "dependencies": { "has-flag": "^5.0.1", "supports-color": "^10.0.0" } }, "sha512-i6sWEzuwadSlcr2mOnb0ktlIl+K5FVxsPXmoPfknDd2gyw4ZBIAZ5coc0NQzYqDdEYXMHy8NaY9rWwa1Q1myiQ=="], + "vite/esbuild": ["esbuild@0.25.10", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.10", "@esbuild/android-arm": "0.25.10", "@esbuild/android-arm64": "0.25.10", "@esbuild/android-x64": "0.25.10", "@esbuild/darwin-arm64": "0.25.10", "@esbuild/darwin-x64": "0.25.10", "@esbuild/freebsd-arm64": "0.25.10", "@esbuild/freebsd-x64": "0.25.10", "@esbuild/linux-arm": "0.25.10", "@esbuild/linux-arm64": "0.25.10", "@esbuild/linux-ia32": "0.25.10", "@esbuild/linux-loong64": "0.25.10", "@esbuild/linux-mips64el": "0.25.10", "@esbuild/linux-ppc64": "0.25.10", "@esbuild/linux-riscv64": "0.25.10", "@esbuild/linux-s390x": "0.25.10", "@esbuild/linux-x64": "0.25.10", "@esbuild/netbsd-arm64": "0.25.10", "@esbuild/netbsd-x64": "0.25.10", "@esbuild/openbsd-arm64": "0.25.10", "@esbuild/openbsd-x64": "0.25.10", "@esbuild/openharmony-arm64": "0.25.10", "@esbuild/sunos-x64": "0.25.10", "@esbuild/win32-arm64": "0.25.10", "@esbuild/win32-ia32": "0.25.10", "@esbuild/win32-x64": "0.25.10" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-9RiGKvCwaqxO2owP61uQ4BgNborAQskMR6QusfWzQqv7AZOg5oGehdY2pRJMTKuwxd1IDBP4rSbI5lHzU7SMsQ=="], "widest-line/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], @@ -2357,6 +2368,10 @@ "signale/figures/escape-string-regexp": ["escape-string-regexp@1.0.5", "", {}, "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg=="], + "terminal-link/supports-hyperlinks/has-flag": ["has-flag@5.0.1", "", {}, "sha512-CsNUt5x9LUdx6hnk/E2SZLsDyvfqANZSUq4+D3D8RzDJ2M+HDTIkF60ibS1vHaK55vzgiZw1bEPFG9yH7l33wA=="], + + "terminal-link/supports-hyperlinks/supports-color": ["supports-color@10.2.2", "", {}, "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g=="], + "vite/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.10", "", { "os": "aix", "cpu": "ppc64" }, "sha512-0NFWnA+7l41irNuaSVlLfgNT12caWJVLzp5eAVhZ0z1qpxbockccEt3s+149rE64VUI3Ml2zt8Nv5JVc4QXTsw=="], "vite/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.25.10", "", { "os": "android", "cpu": "arm" }, "sha512-dQAxF1dW1C3zpeCDc5KqIYuZ1tgAdRXNoZP7vkBIRtKZPYe2xVr/d3SkirklCHudW1B45tGiUlz2pUWDfbDD4w=="], diff --git a/docs b/docs new file mode 120000 index 0000000..7001ddf --- /dev/null +++ b/docs @@ -0,0 +1 @@ +packages/docs/src/content/docs \ No newline at end of file diff --git a/docs/.keep b/docs/.keep deleted file mode 100644 index e69de29..0000000 diff --git a/flake.lock b/flake.lock index 6580468..ef58484 100644 --- a/flake.lock +++ b/flake.lock @@ -7,11 +7,11 @@ ] }, "locked": { - "lastModified": 1759362264, - "narHash": "sha256-wfG0S7pltlYyZTM+qqlhJ7GMw2fTF4mLKCIVhLii/4M=", + "lastModified": 1760948891, + "narHash": "sha256-TmWcdiUUaWk8J4lpjzu4gCGxWY6/Ok7mOK4fIFfBuU4=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "758cf7296bee11f1706a574c77d072b8a7baa881", + "rev": "864599284fc7c0ba6357ed89ed5e2cd5040f0c04", "type": "github" }, "original": { @@ -20,14 +20,32 @@ "type": "github" } }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1726560853, + "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, "git-hooks": { "flake": false, "locked": { - "lastModified": 1759523803, - "narHash": "sha256-PTod9NG+i3XbbnBKMl/e5uHDBYpwIWivQ3gOWSEuIEM=", + "lastModified": 1760663237, + "narHash": "sha256-BflA6U4AM1bzuRMR8QqzPXqh8sWVCNDzOdsxXEguJIc=", "owner": "cachix", "repo": "git-hooks.nix", - "rev": "cfc9f7bb163ad8542029d303e599c0f7eee09835", + "rev": "ca5b894d3e3e151ffc1db040b6ce4dcc75d31c37", "type": "github" }, "original": { @@ -38,11 +56,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1759381078, - "narHash": "sha256-gTrEEp5gEspIcCOx9PD8kMaF1iEmfBcTbO0Jag2QhQs=", + "lastModified": 1761672384, + "narHash": "sha256-o9KF3DJL7g7iYMZq9SWgfS1BFlNbsm6xplRjVlOCkXI=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "7df7ff7d8e00218376575f0acdcc5d66741351ee", + "rev": "08dacfca559e1d7da38f3cf05f1f45ee9bfd213c", "type": "github" }, "original": { @@ -52,12 +70,35 @@ "type": "github" } }, + "playwright-web-flake": { + "inputs": { + "flake-utils": "flake-utils", + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1760833992, + "narHash": "sha256-CiVBf+Com8mwDexMVw6s4BIT1J1In/UNHvaqiZwSfIs=", + "owner": "pietdevries94", + "repo": "playwright-web-flake", + "rev": "d3996ee82c6bcdc4c9535b94068abaa2744a7411", + "type": "github" + }, + "original": { + "owner": "pietdevries94", + "ref": "1.56.1", + "repo": "playwright-web-flake", + "type": "github" + } + }, "root": { "inputs": { "flake-parts": "flake-parts", "git-hooks": "git-hooks", "nixpkgs": "nixpkgs", - "systems": "systems" + "playwright-web-flake": "playwright-web-flake", + "systems": "systems_2" } }, "systems": { @@ -74,6 +115,21 @@ "repo": "default", "type": "github" } + }, + "systems_2": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } } }, "root": "root", diff --git a/flake.nix b/flake.nix index 27a4b79..5146139 100644 --- a/flake.nix +++ b/flake.nix @@ -12,6 +12,11 @@ git-hooks.url = "github:cachix/git-hooks.nix"; git-hooks.flake = false; + + # playwright browsers pinned to match package.json (@playwright/test version) + # sync this when upgrading @playwright/test in packages/docs/package.json + playwright-web-flake.url = "github:pietdevries94/playwright-web-flake/1.56.1"; + playwright-web-flake.inputs.nixpkgs.follows = "nixpkgs"; }; outputs = diff --git a/justfile b/justfile index 3048112..11d2625 100644 --- a/justfile +++ b/justfile @@ -19,6 +19,43 @@ default: install: bun install +# Update dependencies to latest compatible versions (respects ^, ~) +[group('workspace')] +update: + bun update + +# Upgrade dependencies to latest versions (ignoring semver constraints) +[group('workspace')] +upgrade: + bun upgrade + +# Show outdated dependencies +[group('workspace')] +outdated: + bun outdated + +# Check if @playwright/test matches playwright-web-flake version +[group('workspace')] +check-playwright: + @./scripts/check-playwright-sync.sh + +# Update @playwright/test to match playwright-web-flake, update lockfile, and test +[group('workspace')] +update-playwright: check-playwright + #!/usr/bin/env bash + set -euo pipefail + FLAKE_VERSION=$(grep "playwright-web-flake.url" flake.nix | sed 's/.*\/\([0-9.]*\)".*/\1/') + echo "Updating @playwright/test to ^$FLAKE_VERSION..." + cd packages/docs + # Use jq to update package.json + jq ".devDependencies.\"@playwright/test\" = \"^$FLAKE_VERSION\"" package.json > package.json.tmp + mv package.json.tmp package.json + cd ../.. + echo "Running bun install to update lockfile..." + bun install + echo "Running tests to verify..." + just docs-test + # Clean all workspace build artifacts [group('workspace')] clean: @@ -85,9 +122,9 @@ ghsecrets repo="": echo sops exec-env vars/shared.yaml '\ gh secret set CACHIX_AUTH_TOKEN --repo='"$REPO"' --body="$CACHIX_AUTH_TOKEN" && \ - gh secret set GITGUARDIAN_API_KEY --repo='"$REPO"' --body="$GITGUARDIAN_API_KEY" && \ gh secret set CLOUDFLARE_ACCOUNT_ID --repo='"$REPO"' --body="$CLOUDFLARE_ACCOUNT_ID" && \ - gh secret set CLOUDFLARE_API_TOKEN --repo='"$REPO"' --body="$CLOUDFLARE_API_TOKEN"' + gh secret set CLOUDFLARE_API_TOKEN --repo='"$REPO"' --body="$CLOUDFLARE_API_TOKEN" && \ + gh secret set FAST_FORWARD_PAT --repo='"$REPO"' --body="$FAST_FORWARD_PAT"' echo echo "secrets after updates (wait 3 seconds for github to update):" sleep 3 @@ -248,6 +285,33 @@ gh-cancel run_id="": gh run cancel {{run_id}} --repo $REPO fi +# List all packages in packages/ directory +[group('CI/CD')] +list-packages: + @ls -1 packages/ + +# List packages in JSON format for CI matrix +[group('CI/CD')] +list-packages-json: + #!/usr/bin/env bash + cd packages + packages=() + for dir in */; do + pkg_name="${dir%/}" + if [ -f "$dir/package.json" ]; then + packages+=("{\"name\":\"$pkg_name\",\"path\":\"packages/$pkg_name\"}") + fi + done + echo "[$(IFS=,; echo "${packages[*]}")]" + +# Validate package structure +[group('CI/CD')] +validate-package package: + @echo "Validating package: {{ package }}" + @test -d "packages/{{ package }}" || (echo "Package directory not found" && exit 1) + @test -f "packages/{{ package }}/package.json" || (echo "package.json not found" && exit 1) + @echo "✓ Package {{ package }} is valid" + ## Cloudflare # Preview the site locally with Cloudflare Workers @@ -264,25 +328,138 @@ cf-build-deploy: install [group('cloudflare')] cf-deploy-preview branch=`git branch --show-current`: #!/usr/bin/env bash + set -euo pipefail cd packages/docs - sops exec-env ../../vars/shared.yaml " - echo 'Deploying preview for branch: {{branch}}' - echo 'Building...' - bun run build - echo 'Uploading version with preview alias...' - bunx wrangler versions upload --preview-alias b-{{branch}} - " -# Deploy to production (immediate 100% rollout) + # Capture git metadata (use 12-char SHA for tag - fits in 25 char limit, extremely collision-resistant) + COMMIT_SHA=$(git rev-parse HEAD) + COMMIT_TAG=$(git rev-parse --short=12 HEAD) + COMMIT_SHORT=$(git rev-parse --short HEAD) + COMMIT_MSG=$(git log -1 --pretty=format:'%s') + GIT_STATUS=$(git diff-index --quiet HEAD -- && echo "clean" || echo "dirty") + + # Tag is 12-char SHA (deterministic, <= 25 chars, used to find this version on main) + TAG="${COMMIT_TAG}" + # Message includes full context for verification + MESSAGE="[{{branch}}] ${COMMIT_MSG} (${COMMIT_TAG}, ${GIT_STATUS})" + + echo "Deploying preview for branch: {{branch}}" + echo "Commit: ${COMMIT_SHORT} (${GIT_STATUS})" + echo "Full SHA: ${COMMIT_SHA}" + echo "Tag: ${COMMIT_TAG}" + echo "Message: ${COMMIT_MSG}" + echo "" + + # Export variables for use in sops exec-env + export VERSION_TAG="${TAG}" + export VERSION_MESSAGE="${MESSAGE}" + + sops exec-env ../../vars/shared.yaml ' + echo "Building..." + bun run build + echo "Uploading version with preview alias and metadata..." + bunx wrangler versions upload \ + --preview-alias b-{{branch}} \ + --tag "$VERSION_TAG" \ + --message "$VERSION_MESSAGE" + ' + + echo "" + echo "✓ Version uploaded successfully" + echo " Tag: ${COMMIT_TAG}" + echo " Full SHA: ${COMMIT_SHA}" + echo " Message: ${MESSAGE}" + echo " Preview URL: https://b-{{branch}}-ts-nix-docs.sciexp.workers.dev" + +# Deploy to production (promote existing version or fallback to build+deploy) [group('cloudflare')] cf-deploy-production: #!/usr/bin/env bash + set -euo pipefail cd packages/docs - sops exec-env ../../vars/shared.yaml " - echo 'Building and deploying to production...' - bun run build - bunx wrangler deploy - " + + # Get current commit tag (should match a previously uploaded version if fast-forward merged) + CURRENT_SHA=$(git rev-parse HEAD) + CURRENT_TAG=$(git rev-parse --short=12 HEAD) + CURRENT_SHORT=$(git rev-parse --short HEAD) + CURRENT_BRANCH=$(git branch --show-current) + + # Build deployment message (works in both CI and local) + if [ -n "${GITHUB_ACTIONS:-}" ]; then + # Running in GitHub Actions + DEPLOYER="${GITHUB_ACTOR:-github-actions}" + DEPLOY_CONTEXT="${GITHUB_WORKFLOW:-CI}" + DEPLOY_MSG="Deployed by ${DEPLOYER} from ${CURRENT_BRANCH} via ${DEPLOY_CONTEXT}" + else + # Running locally + DEPLOYER=$(whoami) + DEPLOY_HOST=$(hostname -s) + DEPLOY_MSG="Deployed by ${DEPLOYER} from ${CURRENT_BRANCH} on ${DEPLOY_HOST}" + fi + + echo "Deploying to production from branch: ${CURRENT_BRANCH}" + echo "Current commit: ${CURRENT_SHORT}" + echo "Full SHA: ${CURRENT_SHA}" + echo "Looking for existing version with tag: ${CURRENT_TAG}" + echo "Deployment message: ${DEPLOY_MSG}" + echo "" + + # Query for existing version with matching tag (take most recent if multiple) + EXISTING_VERSION=$(sops exec-env ../../vars/shared.yaml \ + "bunx wrangler versions list --json" | \ + jq -r --arg tag "$CURRENT_TAG" \ + '.[] | select(.annotations["workers/tag"] == $tag) | .id' | head -1) + + if [ -n "$EXISTING_VERSION" ]; then + echo "✓ Found existing version: ${EXISTING_VERSION}" + echo " This version was already built and tested in preview" + echo " Promoting to 100% production traffic..." + echo "" + + # Export for use in sops exec-env + export DEPLOYMENT_MESSAGE="${DEPLOY_MSG}" + + if sops exec-env ../../vars/shared.yaml " + bunx wrangler versions deploy ${EXISTING_VERSION}@100% --yes --message \"\$DEPLOYMENT_MESSAGE\" + "; then + echo "" + echo "✓ Successfully promoted version ${EXISTING_VERSION} to production" + echo " Tag: ${CURRENT_TAG}" + echo " Full SHA: ${CURRENT_SHA}" + echo " Deployed by: ${DEPLOY_MSG}" + echo " Production URL: https://ts-nix.scientistexperience.net" + else + echo "" + echo "✗ Failed to promote version ${EXISTING_VERSION}" + echo " Deployment was cancelled or failed" + exit 1 + fi + else + echo "⚠ No existing version found with tag: ${CURRENT_TAG}" + echo " This should only happen if:" + echo " - This is the first deployment" + echo " - Commit was made directly on main (not recommended)" + echo " - Version was cleaned up (retention policy)" + echo "" + echo " Falling back to direct build and deploy..." + echo "" + + # Export for use in sops exec-env + export DEPLOYMENT_MESSAGE="${DEPLOY_MSG}" + + sops exec-env ../../vars/shared.yaml ' + echo "Building..." + bun run build + echo "Deploying to production..." + bunx wrangler deploy --message "$DEPLOYMENT_MESSAGE" + ' + + echo "" + echo "✓ Built and deployed to production" + echo " Full SHA: ${CURRENT_SHA}" + echo " Deployed by: ${DEPLOY_MSG}" + echo " Production URL: https://ts-nix.scientistexperience.net" + fi # List recent versions [group('cloudflare')] @@ -320,22 +497,62 @@ cf-types: # Start development server [group('docs')] -dev: - bun run --filter '@typescript-nix-template/docs' dev +docs-dev: + cd packages/docs && bun run dev # Build the documentation site [group('docs')] -build: - bun run --filter '@typescript-nix-template/docs' build +docs-build: + cd packages/docs && bun run build # Preview the built site [group('docs')] -preview: - bun run --filter '@typescript-nix-template/docs' preview +docs-preview: + cd packages/docs && bun run preview + +# Format documentation code +[group('docs')] +docs-format: + cd packages/docs && bun run format + +# Lint documentation code +[group('docs')] +docs-lint: + cd packages/docs && bun run lint + +# Check and fix documentation code +[group('docs')] +docs-check: + cd packages/docs && bun run check:fix + +# Validate documentation links +[group('docs')] +docs-linkcheck: + cd packages/docs && bun run linkcheck + +# Run all documentation tests +[group('docs')] +docs-test: + cd packages/docs && bun run test + +# Run documentation unit tests +[group('docs')] +docs-test-unit: + cd packages/docs && bun run test:unit + +# Run documentation E2E tests +[group('docs')] +docs-test-e2e: + cd packages/docs && bun run test:e2e + +# Generate documentation test coverage report +[group('docs')] +docs-test-coverage: + cd packages/docs && bun run test:coverage # Optimize favicon.svg with SVGO [group('docs')] -optimize-favicon: +docs-optimize-favicon: bunx svgo packages/docs/public/favicon.svg --multipass ## Nix @@ -375,8 +592,42 @@ test-release-all: cd "$pkg" && bun run test-release && cd ../..; \ done +# Preview semantic-release version after merging current branch to target +[group('release')] +preview-version target="main" package="": + #!/usr/bin/env bash + set -euo pipefail + if [ -n "{{package}}" ]; then + ./scripts/preview-version.sh "{{target}}" "{{package}}" + else + ./scripts/preview-version.sh "{{target}}" + fi + +# Release specific package with semantic-release +[group('release')] +release-package package dry_run="false": + #!/usr/bin/env bash + set -euo pipefail + cd packages/{{ package }} + if [ "{{ dry_run }}" = "true" ]; then + bun run test-release + else + echo "This will create a real release. Use dry_run=true for testing." + bun run semantic-release + fi + ## Secrets +# Scan repository for secrets +[group('secrets')] +scan-secrets: + gitleaks detect --verbose --redact + +# Scan staged changes for secrets (pre-commit) +[group('secrets')] +scan-staged: + gitleaks protect --staged --verbose --redact + # Show existing secrets using sops [group('secrets')] show-secrets: @@ -410,7 +661,7 @@ run-with-secrets +command: [group('secrets')] check-secrets: @printf "Check sops environment for secrets\n\n" - @sops exec-env vars/shared.yaml 'env | grep -E "GITHUB|CACHIX|CLOUDFLARE|GITGUARDIAN" | sed "s/=.*$/=***REDACTED***/"' + @sops exec-env vars/shared.yaml 'env | grep -E "GITHUB|CACHIX|CLOUDFLARE" | sed "s/=.*$/=***REDACTED***/"' # Show specific secret value from shared secrets [group('secrets')] @@ -584,6 +835,11 @@ test-ui: test-coverage: bun run --filter '@typescript-nix-template/docs' test:coverage +# Run all tests for specific package (CI workflow) +[group('testing')] +test-package package: + cd packages/{{ package }} && bun run test:unit && bun run test:coverage && bun run test:e2e + # Install playwright browsers (only needed outside Nix environment) # The Nix devshell provides browsers via playwright-driver.browsers # See: nix/modules/devshell.nix lines 36, 47-48 diff --git a/nix/modules/devshell.nix b/nix/modules/devshell.nix index 91954b0..3aa7beb 100644 --- a/nix/modules/devshell.nix +++ b/nix/modules/devshell.nix @@ -9,8 +9,13 @@ self', pkgs, lib, + system, ... }: + let + # Playwright driver from versioned flake (synced with package.json) + playwrightDriver = inputs.playwright-web-flake.packages.${system}.playwright-driver; + in { devShells = { default = pkgs.mkShell { @@ -27,14 +32,14 @@ age sops ssh-to-age + gitleaks # CI/CD tools gh act cachix - # Testing tools - playwright-driver.browsers + # E2E testing browsers from playwright-web-flake (pinned to 1.56.1) # Git environment setup config.packages.set-git-env @@ -44,8 +49,9 @@ export REPO_ROOT=$(git rev-parse --show-toplevel 2>/dev/null || pwd) set-git-env - # Playwright configuration for Nix - export PLAYWRIGHT_BROWSERS_PATH=${pkgs.playwright-driver.browsers} + # Playwright browser configuration (version-locked via flake input) + export PLAYWRIGHT_BROWSERS_PATH="${playwrightDriver.browsers}" + export PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1 export PLAYWRIGHT_SKIP_VALIDATE_HOST_REQUIREMENTS=true printf "\n$GIT_REPO_NAME $GIT_REF $GIT_SHA_SHORT\n\n" diff --git a/nix/modules/pre-commit.nix b/nix/modules/pre-commit.nix index 2a8f60a..3a6d9bf 100644 --- a/nix/modules/pre-commit.nix +++ b/nix/modules/pre-commit.nix @@ -16,6 +16,13 @@ hooks = { nixfmt-rfc-style.enable = true; biome.enable = true; + gitleaks = { + enable = true; + name = "gitleaks"; + entry = "${pkgs.gitleaks}/bin/gitleaks protect --staged --verbose --redact"; + language = "system"; + pass_filenames = false; + }; }; }; }; diff --git a/package.json b/package.json index 517bd77..f7fe909 100644 --- a/package.json +++ b/package.json @@ -80,15 +80,6 @@ "v${major}.${minor}" ] } - ], - [ - "@semantic-release/git", - { - "message": "chore(release): ${nextRelease.version} [skip ci]\n\n${nextRelease.notes}", - "assets": [ - "CHANGELOG.md" - ] - } ] ], "npmPublish": false diff --git a/packages/docs/astro.config.ts b/packages/docs/astro.config.ts index 3d44620..6d3d21d 100644 --- a/packages/docs/astro.config.ts +++ b/packages/docs/astro.config.ts @@ -1,6 +1,8 @@ import cloudflare from "@astrojs/cloudflare"; import starlight from "@astrojs/starlight"; import { defineConfig } from "astro/config"; +import starlightLinksValidator from "starlight-links-validator"; +import justGrammar from "./src/grammars/just.tmLanguage.json"; // ROLLDOWN INTEGRATION (DISABLED) - Uncomment when re-enabling (see ROLLDOWN.md) // import * as vite from "vite"; @@ -9,7 +11,20 @@ export default defineConfig({ integrations: [ starlight({ title: "typescript-nix-template", - prerender: false, + prerender: true, + plugins: process.env.CHECK_LINKS + ? [ + starlightLinksValidator({ + errorOnRelativeLinks: false, + errorOnLocalLinks: false, + }), + ] + : [], + expressiveCode: { + shiki: { + langs: [justGrammar], + }, + }, social: [ { icon: "github", diff --git a/packages/docs/package.json b/packages/docs/package.json index 5416299..eda001a 100644 --- a/packages/docs/package.json +++ b/packages/docs/package.json @@ -16,6 +16,7 @@ "build": "astro build", "preview": "astro build && wrangler dev", "deploy": "astro build && wrangler deploy", + "linkcheck": "CHECK_LINKS=true astro build", "cf-typegen": "wrangler types", "format": "biome format --write", "lint": "biome lint", @@ -37,25 +38,30 @@ }, "devDependencies": { "@biomejs/biome": "2.2.4", - "@playwright/test": "~1.54.0", + "@playwright/test": "^1.56.1", "@types/node": "^24.6.2", "@vitest/coverage-v8": "^3.2.4", "svgo": "^4.0.0", "vitest": "^3.2.4", - "wrangler": "^4.42.0", + "wrangler": "^4.45.0", "@semantic-release/changelog": "^6.0.3", "@semantic-release/git": "^10.0.1", "@semantic-release/github": "^11.0.1", "conventional-changelog-conventionalcommits": "^8.0.0", "semantic-release": "^24.2.3", "semantic-release-major-tag": "^0.3.2", - "semantic-release-monorepo": "^8.0.2" + "semantic-release-monorepo": "^8.0.2", + "starlight-links-validator": "^0.19.0" }, "release": { "extends": "semantic-release-monorepo", "branches": [ { "name": "main" + }, + { + "name": "beta", + "prerelease": true } ], "npmPublish": false, diff --git a/packages/docs/src/content/docs/guides/ci-cd-setup.md b/packages/docs/src/content/docs/guides/ci-cd-setup.md index 40453be..8bfa6ee 100644 --- a/packages/docs/src/content/docs/guides/ci-cd-setup.md +++ b/packages/docs/src/content/docs/guides/ci-cd-setup.md @@ -52,7 +52,7 @@ CI_AGE_KEY: age-secret-key-1... # CI age private key from .sops.yaml ``` The `CI_AGE_KEY` should be the private key corresponding to the public key: -`age1m9m8h5vqr7dqlmvnzcwshmm4uk8umcllazum6eaulkdp3qc88ugs22j3p8` +`age1m9m8h5vqr7dqlmvnzcwshmm4uk8umcllazum6eaulkdp3qc88ugs22j3p8` ### 1.5 Encrypt the Secrets File diff --git a/packages/docs/src/content/docs/guides/getting-started.md b/packages/docs/src/content/docs/guides/getting-started.md index 0cf1ada..caf38b2 100644 --- a/packages/docs/src/content/docs/guides/getting-started.md +++ b/packages/docs/src/content/docs/guides/getting-started.md @@ -74,7 +74,7 @@ bun install just dev ``` -Visit http://localhost:4321 to see your site. +Visit `http://localhost:4321` in your browser to see your site. ## Understanding the structure diff --git a/packages/docs/src/content/docs/guides/template-usage.md b/packages/docs/src/content/docs/guides/template-usage.md index 31aafca..2aa0f56 100644 --- a/packages/docs/src/content/docs/guides/template-usage.md +++ b/packages/docs/src/content/docs/guides/template-usage.md @@ -232,7 +232,7 @@ matrix: Update filter patterns: -```justfile +```just # Before bun run --filter '@typescript-nix-template/docs' dev diff --git a/docs/notes/build/rolldown-workers-incompatibility.md b/packages/docs/src/content/docs/notes/build/rolldown-workers-incompatibility.md similarity index 87% rename from docs/notes/build/rolldown-workers-incompatibility.md rename to packages/docs/src/content/docs/notes/build/rolldown-workers-incompatibility.md index 629a712..2a670c2 100644 --- a/docs/notes/build/rolldown-workers-incompatibility.md +++ b/packages/docs/src/content/docs/notes/build/rolldown-workers-incompatibility.md @@ -1,4 +1,6 @@ -# Rolldown-Vite Integration (Currently Disabled) +--- +title: Rolldown-Vite Integration (Currently Disabled) +--- ## Status @@ -11,9 +13,11 @@ ## Issue Details ### Problem + Rolldown's bundler generates runtime code that uses Node.js's `createRequire(import.meta.url)`, which fails in Cloudflare Workers because `import.meta.url` is undefined in that execution context. ### Error + ``` ✘ [ERROR] service core:user:typescript-nix-template: Uncaught TypeError: The argument 'path' must be a file URL object, a file URL string, or an absolute path string. @@ -26,16 +30,19 @@ at null. (index.js:1203:33) in dist/_worker.js/chunks/rolldown-runtim ### Investigation Results **GitHub Issues & PRs**: -- Issue: https://github.com/cloudflare/workers-sdk/issues/9415 (Closed) -- Fix Merged: https://github.com/cloudflare/workers-sdk/pull/9891 (July 18, 2025) + +- Issue: (Closed) +- Fix Merged: (July 18, 2025) **Root Cause**: + - The fix in PR #9891 sets `rollupOptions.platform: "neutral"` to prevent rolldown's Node.js polyfills - **Critical limitation**: The fix only applies to `@cloudflare/vite-plugin`, not `@astrojs/cloudflare` - We use `@astrojs/cloudflare` adapter which internally manages Vite configuration - No direct way to apply the platform: "neutral" fix through Astro's adapter layer **Attempted Workarounds** (all unsuccessful): + 1. Setting `vite.build.rollupOptions.platform: "neutral"` in astro.config.mjs 2. Setting `vite.ssr.build.rollupOptions.platform: "neutral"` 3. Setting `vite.optimizeDeps.esbuildOptions.platform: "neutral"` @@ -49,11 +56,13 @@ When rolldown compatibility is resolved with Astro + Cloudflare, follow these st ### Step 1: Update package.json Add to `devDependencies`: + ```json "vite": "npm:rolldown-vite@latest" ``` Add new `overrides` section at root level: + ```json "overrides": { "vite": "npm:rolldown-vite@latest" @@ -63,6 +72,7 @@ Add new `overrides` section at root level: ### Step 2: Update astro.config.mjs 1. Uncomment the vite import at the top: + ```javascript import * as vite from "vite"; ``` @@ -86,43 +96,51 @@ bun run preview ``` **Success Criteria**: + - Build completes without errors - No `createRequire` in `dist/_worker.js/chunks/rolldown-runtime_*.mjs` - Wrangler dev starts successfully -- Site loads at http://localhost:8787 +- Site loads at ## Alternative Paths Forward ### Option A: Wait for Official Support -- Monitor Astro + Rolldown roadmap: https://github.com/rolldown/rolldown/discussions/153 + +- Monitor Astro + Rolldown roadmap: - Currently "on hold" for Astro support -- Subscribe to: https://github.com/withastro/adapters/issues +- Subscribe to: ### Option B: Migrate to @cloudflare/vite-plugin + If Astro adds support for using `@cloudflare/vite-plugin` directly: **Pros**: + - Direct access to rolldown compatibility fixes - Native Workers runtime in dev server - Official Cloudflare support **Cons**: + - Would lose Astro-specific adapter features - Significant configuration changes required - Unknown SSR parity with `@astrojs/cloudflare` **Resources**: -- https://developers.cloudflare.com/workers/vite-plugin/ -- https://blog.cloudflare.com/introducing-the-cloudflare-vite-plugin/ + +- +- ## Performance Comparison **Standard Vite** (current): + - Server build: ~900ms - Client build: ~34ms - Total: ~1.6s **Rolldown-Vite** (when tested): + - Server build: ~890ms (similar) - Client build: ~33ms (similar) - Total: ~1.6s @@ -133,6 +151,7 @@ Reference projects report 2-16x improvements. ## Dependencies The `@astrojs/cloudflare` adapter currently depends on: + ```json { "@cloudflare/workers-types": "^4.20250109.0", diff --git a/packages/docs/src/content/docs/reference/architecture.md b/packages/docs/src/content/docs/reference/architecture.md index 2e5103a..8916756 100644 --- a/packages/docs/src/content/docs/reference/architecture.md +++ b/packages/docs/src/content/docs/reference/architecture.md @@ -121,7 +121,7 @@ docs: update installation guide - Clear documentation in CONTRIBUTING.md - Semantic-release requires proper format for version bumps -See [CONTRIBUTING.md](/CONTRIBUTING.md) for detailed conventional commit guidelines. +See [CONTRIBUTING.md](https://github.com/sciexp/typescript-nix-template/blob/main/CONTRIBUTING.md) for detailed conventional commit guidelines. ## Workspace configuration diff --git a/packages/docs/src/grammars/just.tmLanguage.json b/packages/docs/src/grammars/just.tmLanguage.json new file mode 100644 index 0000000..1233a1a --- /dev/null +++ b/packages/docs/src/grammars/just.tmLanguage.json @@ -0,0 +1,752 @@ +{ + "$schema": "https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json", + "name": "just", + "scopeName": "source.just", + "fileTypes": ["just", "justfile", "Justfile"], + "firstLineMatch": "#![\\s\\t]*\\/.*\\just\\b", + "uuid": "8b0cfae0-229f-4688-a4b7-8b5c3db82855", + "patterns": [ + { + "include": "#comments" + }, + { + "include": "#import" + }, + { + "include": "#module" + }, + { + "include": "#alias" + }, + { + "include": "#assignment" + }, + { + "include": "#builtins" + }, + { + "include": "#keywords" + }, + { + "include": "#expression-operators" + }, + { + "include": "#backtick" + }, + { + "include": "#strings" + }, + { + "include": "#parenthesis" + }, + { + "include": "#recipes" + }, + { + "include": "#recipe-operators" + }, + { + "include": "#embedded-languages" + }, + { + "include": "#escaping" + } + ], + "repository": { + "comments": { + "patterns": [ + { + "name": "comment.line.number-sign.just", + "match": "#(?!\\!).*$" + } + ] + }, + "import": { + "begin": "(?x)\n ^\n (import)\n (\\?)? \\s+\n", + "end": "$", + "beginCaptures": { + "1": { + "name": "keyword.other.reserved.just" + }, + "2": { + "name": "punctuation.optional.just" + } + }, + "patterns": [ + { + "include": "#strings" + } + ] + }, + "module": { + "begin": "(?x)\n ^\n (mod)\n (\\?)? \\s+\n ([a-zA-Z_][a-zA-Z0-9_-]*)\n (?=[$\\s])\n", + "end": "$", + "beginCaptures": { + "1": { + "name": "keyword.other.reserved.just" + }, + "2": { + "name": "punctuation.optional.just" + }, + "3": { + "name": "variable.name.module.just" + } + }, + "patterns": [ + { + "include": "#strings" + } + ] + }, + "alias": { + "match": "(?x)\n ^\n (alias) \\s+ \n ([a-zA-Z_][a-zA-Z0-9_-]*) \\s* \n (:=) \\s* \n ([a-zA-Z_][a-zA-Z0-9_-]*)\n", + "captures": { + "1": { + "name": "keyword.other.reserved.just" + }, + "2": { + "name": "variable.name.alias.just" + }, + "3": { + "name": "keyword.operator.assignment.just" + }, + "4": { + "name": "variable.other.just" + } + } + }, + "assignment": { + "patterns": [ + { + "include": "#variable-assignment" + }, + { + "include": "#setting-assignment" + } + ] + }, + "variable-assignment": { + "patterns": [ + { + "match": "^(unexport)\\s+([a-zA-Z_][a-zA-Z0-9_-]*)", + "captures": { + "1": { + "name": "keyword.other.reserved.just" + }, + "2": { + "name": "variable.other.just" + } + } + }, + { + "begin": "(?x) \n ^\n (?: (export) \\s+)?\n ([a-zA-Z_][a-zA-Z0-9_-]*) \\s*\n (:=)\n", + "end": "$", + "beginCaptures": { + "1": { + "name": "keyword.other.reserved.just" + }, + "2": { + "name": "variable.other.just" + }, + "3": { + "name": "keyword.operator.assignment.just" + } + }, + "patterns": [ + { + "include": "#expression" + }, + { + "include": "#comments" + } + ] + } + ] + }, + "setting-assignment": { + "patterns": [ + { + "begin": "(?x) \n ^\n (set) \\s+\n ([a-zA-Z_][a-zA-Z0-9_-]*) \\s*\n (:=)?\n", + "end": "$", + "beginCaptures": { + "1": { + "name": "keyword.other.reserved.just" + }, + "2": { + "name": "variable.other.just" + }, + "3": { + "name": "keyword.operator.assignment.just" + } + }, + "patterns": [ + { + "include": "#expression" + }, + { + "include": "#comments" + } + ] + } + ] + }, + "expression": { + "patterns": [ + { + "include": "#backtick" + }, + { + "include": "#builtins" + }, + { + "include": "#control-keywords" + }, + { + "include": "#expression-operators" + }, + { + "include": "#parenthesis" + }, + { + "include": "#strings" + } + ] + }, + "builtins": { + "patterns": [ + { + "name": "constant.language.const.just", + "match": "(?x) \\b(\n HEX|HEXLOWER|HEXUPPER|PATH_SEP|PATH_VAR_SEP|CLEAR|NORMAL|BOLD|ITALIC|UNDERLINE|INVERT|HIDE|\n STRIKETHROUGH|BLACK|RED|GREEN|YELLOW|BLUE|MAGENTA|CYAN|WHITE|BG_BLACK|\n BG_RED|BG_GREEN|BG_YELLOW|BG_BLUE|BG_MAGENTA|BG_CYAN|BG_WHITE\n)\\b\n" + }, + { + "include": "#builtin-functions" + }, + { + "include": "#literal" + } + ] + }, + "builtin-functions": { + "patterns": [ + { + "name": "support.function.builtin.just", + "match": "(?x) \\b(\n arch|num_cpus|os|os_family|shell|env_var|env_var_or_default|env|\n is_dependency|invocation_directory|invocation_dir|invocation_directory_native|\n invocation_dir_native|justfile|justfile_directory|justfile_dir|just_executable|\n just_pid|source_file|source_directory|source_dir|module_file|module_directory|\n module_dir|append|prepend|encode_uri_component|quote|replace|replace_regex|\n trim|trim_end|trim_end_match|trim_end_matches|trim_start|trim_start_match|\n trim_start_matches|capitalize|kebabcase|lowercamelcase|lowercase|\n shoutykebabcase|shoutysnakecase|snakecase|titlecase|uppercamelcase|\n uppercase|absolute_path|blake3|blake3_file|canonicalize|extension|\n file_name|file_stem|parent_directory|parent_dir|without_extension|clean|join|\n path_exists|error|assert|sha256|sha256_file|uuid|choose|datetime|\n datetime_utc|semver_matches|style|cache_directory|cache_dir|config_directory|config_dir|\n config_local_directory|config_local_dir|data_directory|data_dir|data_local_directory|\n data_local_dir|executable_directory|executable_dir|home_directory|home_dir|which|require|read\n)\\b\n" + } + ] + }, + "literal": { + "patterns": [ + { + "include": "#boolean" + }, + { + "include": "#number" + } + ] + }, + "boolean": { + "patterns": [ + { + "name": "constant.language.boolean.just", + "match": "\\b(true|false)\\b" + } + ] + }, + "number": { + "patterns": [ + { + "name": "constant.numeric.just", + "match": "(?x)\n (?> = { + [Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string; +}; +declare namespace NodeJS { + interface ProcessEnv extends StringifyValues> {} +} diff --git a/scripts/check-playwright-sync.sh b/scripts/check-playwright-sync.sh new file mode 100755 index 0000000..df4bbf7 --- /dev/null +++ b/scripts/check-playwright-sync.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +# Check if @playwright/test version matches playwright-web-flake pin +set -euo pipefail + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +# Get playwright-web-flake version from flake.nix +FLAKE_VERSION=$(grep "playwright-web-flake.url" flake.nix | sed 's/.*\/\([0-9.]*\)".*/\1/') + +# Get @playwright/test version from package.json (strip ^, ~, etc.) +NPM_VERSION=$(jq -r '.devDependencies."@playwright/test"' packages/docs/package.json | sed 's/[^0-9.]//g') + +# Extract major.minor for comparison (ignore patch) +FLAKE_MAJ_MIN=$(echo "$FLAKE_VERSION" | cut -d. -f1-2) +NPM_MAJ_MIN=$(echo "$NPM_VERSION" | cut -d. -f1-2) + +echo "Version Check:" +echo " playwright-web-flake: $FLAKE_VERSION" +echo " @playwright/test: $NPM_VERSION" +echo "" + +if [ "$FLAKE_MAJ_MIN" = "$NPM_MAJ_MIN" ]; then + echo -e "${GREEN}✓ Versions synchronized${NC}" + exit 0 +else + echo -e "${RED}✗ Version mismatch detected!${NC}" + echo "" + echo -e "${YELLOW}Action required:${NC}" + if [[ "$FLAKE_VERSION" > "$NPM_VERSION" ]]; then + echo " 1. Update @playwright/test to match flake:" + echo " just deps-update-playwright" + else + echo " 1. Update playwright-web-flake in flake.nix to $NPM_MAJ_MIN.x" + echo " 2. Run: nix flake update playwright-web-flake" + echo " 3. Test: just docs-test" + fi + exit 1 +fi diff --git a/scripts/preview-version.sh b/scripts/preview-version.sh new file mode 100755 index 0000000..1beba25 --- /dev/null +++ b/scripts/preview-version.sh @@ -0,0 +1,201 @@ +#!/usr/bin/env bash +# preview-version.sh - Preview semantic-release version after merging to target branch +# +# Usage: +# ./scripts/preview-version.sh [target-branch] [package-path] +# +# Examples: +# ./scripts/preview-version.sh # Preview root version on main +# ./scripts/preview-version.sh main packages/docs # Preview docs package version on main +# ./scripts/preview-version.sh beta packages/docs # Preview docs version on beta +# +# This script simulates merging the current branch into the target branch and +# runs semantic-release in dry-run mode to preview what version would be released. + +set -euo pipefail + +# Configuration +TARGET_BRANCH="${1:-main}" +PACKAGE_PATH="${2:-}" +CURRENT_BRANCH=$(git branch --show-current) +REPO_ROOT=$(git rev-parse --show-toplevel) +WORKTREE_DIR=$(mktemp -d "${TMPDIR:-/tmp}/semantic-release-preview.XXXXXX") + +# Save original target branch HEAD for restoration +ORIGINAL_TARGET_HEAD="" +ORIGINAL_REMOTE_HEAD="" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# Cleanup function +cleanup() { + local exit_code=$? + + # Always restore target branch to original state if we modified it + if [ -n "$ORIGINAL_TARGET_HEAD" ]; then + echo -e "\n${BLUE}restoring ${TARGET_BRANCH} to original state...${NC}" + git update-ref "refs/heads/$TARGET_BRANCH" "$ORIGINAL_TARGET_HEAD" 2>/dev/null || true + fi + + # Always restore remote-tracking branch to original state if we modified it + if [ -n "$ORIGINAL_REMOTE_HEAD" ]; then + git update-ref "refs/remotes/origin/$TARGET_BRANCH" "$ORIGINAL_REMOTE_HEAD" 2>/dev/null || true + fi + + # Clean up worktree + if [ -d "$WORKTREE_DIR" ]; then + echo -e "${BLUE}cleaning up worktree...${NC}" + git worktree remove --force "$WORKTREE_DIR" 2>/dev/null || true + # Prune any stale worktree references + git worktree prune 2>/dev/null || true + fi + + exit $exit_code +} + +trap cleanup EXIT INT TERM + +# Validation +if [ "$CURRENT_BRANCH" == "$TARGET_BRANCH" ]; then + echo -e "${YELLOW}already on target branch ${TARGET_BRANCH}${NC}" + echo -e "${YELLOW}running test-release instead of preview${NC}\n" + if [ -n "$PACKAGE_PATH" ]; then + cd "$REPO_ROOT/$PACKAGE_PATH" + fi + exec nix develop -c bun run test-release +fi + +# Display what we're doing +echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}" +echo -e "${BLUE}semantic-release version preview${NC}" +echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}" +echo -e "current branch: ${GREEN}${CURRENT_BRANCH}${NC}" +echo -e "target branch: ${GREEN}${TARGET_BRANCH}${NC}" +if [ -n "$PACKAGE_PATH" ]; then + echo -e "package: ${GREEN}${PACKAGE_PATH}${NC}" +else + echo -e "package: ${GREEN}(root)${NC}" +fi +echo -e "${BLUE}───────────────────────────────────────────────────────────────${NC}\n" + +# Verify target branch exists +if ! git show-ref --verify --quiet "refs/heads/$TARGET_BRANCH"; then + echo -e "${RED}error: target branch '${TARGET_BRANCH}' does not exist${NC}" >&2 + exit 1 +fi + +# Save original target branch HEAD before any modifications +ORIGINAL_TARGET_HEAD=$(git rev-parse "$TARGET_BRANCH") + +# Save original remote-tracking branch HEAD before any modifications +ORIGINAL_REMOTE_HEAD=$(git rev-parse "origin/$TARGET_BRANCH" 2>/dev/null || echo "") + +# Create merge tree to test if merge is possible +echo -e "${BLUE}simulating merge of ${CURRENT_BRANCH} → ${TARGET_BRANCH}...${NC}" + +# Perform merge-tree operation to test if merge is possible +MERGE_OUTPUT=$(git merge-tree --write-tree "$TARGET_BRANCH" "$CURRENT_BRANCH" 2>&1) +MERGE_EXIT=$? + +if [ $MERGE_EXIT -ne 0 ]; then + echo -e "${RED}error: merge conflicts detected${NC}" >&2 + echo -e "${YELLOW}please resolve conflicts in your branch before previewing${NC}" >&2 + echo -e "\n${YELLOW}conflict details:${NC}" >&2 + echo "$MERGE_OUTPUT" >&2 + exit 1 +fi + +# Extract tree hash from merge-tree output (first line) +MERGE_TREE=$(echo "$MERGE_OUTPUT" | head -1) + +if [ -z "$MERGE_TREE" ]; then + echo -e "${RED}error: failed to create merge tree${NC}" >&2 + exit 1 +fi + +# Create temporary merge commit +echo -e "${BLUE}creating temporary merge commit...${NC}" +TEMP_COMMIT=$(git commit-tree -p "$TARGET_BRANCH" -p "$CURRENT_BRANCH" \ + -m "Temporary merge for semantic-release preview" "$MERGE_TREE") + +if [ -z "$TEMP_COMMIT" ]; then + echo -e "${RED}error: failed to create temporary merge commit${NC}" >&2 + exit 1 +fi + +# Temporarily update target branch to point to merge commit +# This allows semantic-release to analyze the correct commit history +# The cleanup function will ALWAYS restore the original branch HEAD +echo -e "${BLUE}temporarily updating ${TARGET_BRANCH} ref for analysis...${NC}" +git update-ref "refs/heads/$TARGET_BRANCH" "$TEMP_COMMIT" + +# Also update remote-tracking branch to match (so semantic-release sees them as synchronized) +git update-ref "refs/remotes/origin/$TARGET_BRANCH" "$TEMP_COMMIT" + +# Create worktree at target branch (now pointing to merge commit) +echo -e "${BLUE}creating temporary worktree at ${TARGET_BRANCH}...${NC}" +git worktree add --quiet "$WORKTREE_DIR" "$TARGET_BRANCH" + +# Navigate to worktree +cd "$WORKTREE_DIR" + +# Install dependencies in worktree (bun uses global cache, so this is fast) +echo -e "${BLUE}installing dependencies in worktree...${NC}" +nix develop -c bun install --silent + +# Navigate to package if specified +if [ -n "$PACKAGE_PATH" ]; then + if [ ! -d "$PACKAGE_PATH" ]; then + echo -e "${RED}error: package path '${PACKAGE_PATH}' does not exist${NC}" >&2 + exit 1 + fi + cd "$PACKAGE_PATH" +fi + +# Run semantic-release in dry-run mode +echo -e "\n${BLUE}running semantic-release analysis...${NC}\n" + +# Capture output and parse version +# Exclude @semantic-release/github to avoid GitHub token requirement for preview +# This is safe because dry-run skips publish/success/fail steps anyway +PLUGINS="@semantic-release/commit-analyzer,@semantic-release/release-notes-generator" + +if [ -n "$PACKAGE_PATH" ]; then + # For monorepo packages, check if package.json has specific plugins configured + OUTPUT=$(GITHUB_REF="refs/heads/$TARGET_BRANCH" nix develop -c bun run semantic-release --dry-run --no-ci --branches "$TARGET_BRANCH" --plugins "$PLUGINS" 2>&1 || true) +else + # For root package + OUTPUT=$(GITHUB_REF="refs/heads/$TARGET_BRANCH" nix develop -c bun run semantic-release --dry-run --no-ci --branches "$TARGET_BRANCH" --plugins "$PLUGINS" 2>&1 || true) +fi + +# Display semantic-release summary (filter out verbose plugin repetition) +echo "$OUTPUT" | grep -v "^$" | grep -vE "(No more plugins|does not provide step)" | \ + grep -E "(semantic-release|Running|analyzing|Found.*commits|release version|Release note|Features|Bug Fixes|Breaking Changes|Published|\*\s)" || true + +echo -e "\n${BLUE}═══════════════════════════════════════════════════════════════${NC}" + +# Extract and display the next version +if echo "$OUTPUT" | grep -q "There are no relevant changes"; then + echo -e "${YELLOW}no version bump required${NC}" + echo -e "no semantic commits found since last release" +elif echo "$OUTPUT" | grep -q "is not configured to publish from"; then + echo -e "${YELLOW}cannot determine version${NC}" + echo -e "branch ${TARGET_BRANCH} is not in release configuration" +elif VERSION=$(echo "$OUTPUT" | grep -oP 'next release version is \K[0-9]+\.[0-9]+\.[0-9]+(-[a-z]+\.[0-9]+)?' | head -1); then + echo -e "${GREEN}next version: ${VERSION}${NC}" + + # Extract release type if available + if TYPE=$(echo "$OUTPUT" | grep -oP 'Release type: \K[a-z]+' | head -1); then + echo -e "release type: ${TYPE}" + fi +else + echo -e "${YELLOW}could not parse version from output${NC}" + echo -e "check the semantic-release output above for details" +fi + +echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}\n" diff --git a/scripts/sops-bootstrap.sh b/scripts/sops-bootstrap.sh index e18654e..68455eb 100755 --- a/scripts/sops-bootstrap.sh +++ b/scripts/sops-bootstrap.sh @@ -19,6 +19,7 @@ fi CURRENT_KEY=$(grep "^ - &${ROLE} " .sops.yaml | awk '{print $3}') # Placeholder keys indicate this is a fresh bootstrap +# gitleaks:allow - age public keys used as placeholders if [ "$CURRENT_KEY" = "age1dn8w7y4t4h23fmeenr3dghfz5qh53jcjq9qfv26km3mnv8l44g0sghptu3" ] || \ [ "$CURRENT_KEY" = "age1m9m8h5vqr7dqlmvnzcwshmm4uk8umcllazum6eaulkdp3qc88ugs22j3p8" ]; then IS_ROTATION=false