diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 622bf9587..620e757c9 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -4,6 +4,12 @@
This pull request …
+### Changelog
+
+
+
+-
+
### Testing
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 28d23a03d..d7e5d1e11 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -93,38 +93,38 @@ jobs:
uses: tj-actions/changed-files@v45
with:
files: |
- plugins/*/src/**
- plugins/*/test/**
- plugins/*/*.test.ts
- plugins/*/*.test.tsx
- plugins/*/vitest.config.ts
- plugins/*/test-setup.ts
+ {plugins,packages}/*/src/**
+ {plugins,packages}/*/test/**
+ {plugins,packages}/*/*.test.ts
+ {plugins,packages}/*/*.test.tsx
+ {plugins,packages}/*/vitest.config.ts
+ {plugins,packages}/*/test-setup.ts
- name: Install dependencies
if: steps.changed-files.outputs.any_changed == 'true'
run: yarn
- - name: Get changed plugins
+ - name: Get changed workspaces
if: steps.changed-files.outputs.any_changed == 'true'
- id: changed-plugins
+ id: changed-workspaces
run: |
- # Extract unique plugin names from changed files
- PLUGINS=$(echo "${{ steps.changed-files.outputs.all_changed_files }}" | \
- grep -E '^plugins/[^/]+/' | \
- cut -d'/' -f2 | \
+ # Extract unique workspace paths from changed files
+ WORKSPACES=$(echo "${{ steps.changed-files.outputs.all_changed_files }}" | \
+ grep -oE '(plugins|packages)/[^/]+' | \
sort -u | \
tr '\n' ' ')
- echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
- echo "Changed plugins: $PLUGINS"
+ echo "workspaces=$WORKSPACES" >> $GITHUB_OUTPUT
+ echo "Changed workspaces: $WORKSPACES"
- - name: Run tests for changed plugins
- if: steps.changed-files.outputs.any_changed == 'true' && steps.changed-plugins.outputs.plugins != ''
+ - name: Run tests for changed workspaces
+ if: steps.changed-files.outputs.any_changed == 'true' && steps.changed-workspaces.outputs.workspaces != ''
run: |
- for plugin in ${{ steps.changed-plugins.outputs.plugins }}; do
- echo "Checking tests for plugin: $plugin"
- if [ -f "plugins/$plugin/package.json" ] && grep -q '"check-vitest"' "plugins/$plugin/package.json"; then
- yarn workspace $plugin check-vitest
+ for workspace in ${{ steps.changed-workspaces.outputs.workspaces }}; do
+ echo "Checking tests for workspace: $workspace"
+ if [ -f "$workspace/package.json" ] && grep -q '"check-vitest"' "$workspace/package.json"; then
+ name=$(jq -r '.name' "$workspace/package.json")
+ yarn workspace "$name" check-vitest
else
- echo "No check-vitest script found for $plugin, skipping..."
+ echo "No check-vitest script found for $workspace, skipping..."
fi
done
diff --git a/.github/workflows/shippy.yml b/.github/workflows/shippy.yml
index ac9d0e9c9..d3bb94b01 100644
--- a/.github/workflows/shippy.yml
+++ b/.github/workflows/shippy.yml
@@ -7,6 +7,8 @@ on:
- edited
- ready_for_review
- synchronize
+ - labeled
+ - unlabeled
workflow_dispatch:
# NOTE: To prevent GitHub from adding PRs to the merge queue before check is done,
# make sure that there is a ruleset that requires the “Shippy check to pass.
@@ -24,11 +26,32 @@ jobs:
runs-on: ubuntu-latest
if: github.event.pull_request.draft == false && github.event.pull_request.user.login != 'dependabot[bot]'
steps:
- - name: Check PR description
+ - name: Check if Submit on merge label is present
+ id: check-label
uses: actions/github-script@v7
with:
script: |
- const prBody = context.payload.pull_request.body?.trim()
- if (!prBody) {
- core.setFailed("❌ PR description is required.")
- }
+ const labels = context.payload.pull_request.labels || []
+ const hasSubmitLabel = labels.some(label => label.name === 'Submit on merge')
+ core.setOutput('require_changelog', hasSubmitLabel ? 'true' : 'false')
+
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ sparse-checkout: |
+ scripts
+ package.json
+ yarn.lock
+ .yarnrc.yml
+ .yarn
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version-file: .tool-versions
+
+ - name: Validate PR body
+ run: yarn dlx tsx scripts/validate-pr-body.ts
+ env:
+ PR_BODY: ${{ github.event.pull_request.body }}
+ REQUIRE_CHANGELOG: ${{ steps.check-label.outputs.require_changelog }}
diff --git a/.github/workflows/submit-on-merge.yml b/.github/workflows/submit-on-merge.yml
new file mode 100644
index 000000000..bcfef2feb
--- /dev/null
+++ b/.github/workflows/submit-on-merge.yml
@@ -0,0 +1,64 @@
+name: Submit on Merge
+
+on:
+ pull_request:
+ types:
+ - closed
+ branches:
+ - main
+
+jobs:
+ submit:
+ name: Submit Changed Plugins
+ runs-on: ubuntu-latest
+ # Only run if PR was merged (not just closed) and has "Submit on merge" label
+ if: |
+ github.event.pull_request.merged == true &&
+ contains(github.event.pull_request.labels.*.name, 'Submit on merge')
+ # FIXME: Should be production
+ environment: development
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # Full history for git tags and diff
+
+ - name: Configure git identity
+ run: |
+ git config --global user.email "marketplace@framer.team"
+ git config --global user.name "Framer Marketplace"
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version-file: .tool-versions
+
+ - name: Install dependencies
+ run: yarn install
+
+ - name: Build framer-plugin-tools
+ working-directory: packages/plugin-tools
+ run: yarn build
+
+ - name: Write PR body to file
+ run: cat <<< "$PR_BODY" > /tmp/pr-body.txt
+ env:
+ PR_BODY: ${{ github.event.pull_request.body }}
+
+ - name: Submit changed plugins
+ run: |
+ export CHANGED_FILES=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }})
+ yarn tsx scripts/submit-on-merge.ts
+ env:
+ DEBUG: "1"
+ PR_BODY_FILE: /tmp/pr-body.txt
+ SESSION_TOKEN: ${{ secrets.SESSION_TOKEN }}
+ FRAMER_ADMIN_SECRET: ${{ secrets.FRAMER_ADMIN_SECRET }}
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
+ SLACK_ERROR_WEBHOOK_URL: ${{ secrets.SLACK_ERROR_WEBHOOK_URL }}
+ RETOOL_URL: ${{ secrets.RETOOL_URL }}
+ # FIXME: Should be production
+ FRAMER_ENV: development
+ GITHUB_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/submit-plugin.yml b/.github/workflows/submit-plugin.yml
new file mode 100644
index 000000000..cec0ab0de
--- /dev/null
+++ b/.github/workflows/submit-plugin.yml
@@ -0,0 +1,124 @@
+name: Submit Plugin
+
+on:
+ # Manual trigger from GitHub UI
+ workflow_dispatch:
+ inputs:
+ plugin_path:
+ description: 'Plugin directory (e.g., plugins/csv-import)'
+ required: true
+ type: string
+ changelog:
+ description: 'Changelog for this release'
+ required: true
+ type: string
+ environment:
+ description: 'Environment (development/production)'
+ required: true
+ default: 'development'
+ type: choice
+ options:
+ - development
+ - production
+ dry_run:
+ description: 'Dry run (skip submission and tagging)'
+ required: false
+ default: false
+ type: boolean
+
+ # Reusable workflow - can be called from other repos (e.g., framer/workshop)
+ workflow_call:
+ inputs:
+ plugin_path:
+ description: 'Plugin directory (e.g., plugins/csv-import)'
+ required: true
+ type: string
+ changelog:
+ description: 'Changelog for this release'
+ required: true
+ type: string
+ environment:
+ description: 'Environment (development/production)'
+ required: true
+ default: 'development'
+ type: string
+ dry_run:
+ description: 'Dry run (skip submission and tagging)'
+ required: false
+ default: false
+ type: boolean
+ secrets:
+ SESSION_TOKEN:
+ description: 'Framer session cookie'
+ required: true
+ FRAMER_ADMIN_SECRET:
+ description: 'Framer admin API key'
+ required: true
+ SLACK_WEBHOOK_URL:
+ description: 'Slack webhook URL for notifications'
+ required: false
+ RETOOL_URL:
+ description: 'Retool dashboard URL for Slack notifications'
+ required: false
+ SLACK_ERROR_WEBHOOK_URL:
+ description: 'Slack webhook URL for error notifications'
+ required: false
+
+jobs:
+ submit:
+ name: Submit Plugin to Marketplace
+ runs-on: ubuntu-latest
+ environment: ${{ inputs.environment }}
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # Full history for git tags and diff
+
+ - name: Configure git identity
+ run: |
+ git config --global user.email "marketplace@framer.team"
+ git config --global user.name "Framer Marketplace"
+
+ - name: Validate plugin path
+ run: |
+ if [ ! -d "${{ github.workspace }}/${{ inputs.plugin_path }}" ]; then
+ echo "Error: Plugin path '${{ inputs.plugin_path }}' does not exist"
+ echo ""
+ echo "Available plugins:"
+ ls -1 plugins/
+ exit 1
+ fi
+ if [ ! -f "${{ github.workspace }}/${{ inputs.plugin_path }}/framer.json" ]; then
+ echo "Error: No framer.json found in '${{ inputs.plugin_path }}'"
+ exit 1
+ fi
+ echo "Plugin path validated: ${{ inputs.plugin_path }}"
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version-file: .tool-versions
+
+ - name: Install dependencies
+ run: yarn install
+
+ - name: Build framer-plugin-tools
+ run: yarn turbo run build --filter=framer-plugin-tools
+
+ - name: Submit plugin
+ run: yarn tsx scripts/submit-plugin.ts
+ env:
+ PLUGIN_PATH: ${{ github.workspace }}/${{ inputs.plugin_path }}
+ REPO_ROOT: ${{ github.workspace }}
+ CHANGELOG: ${{ inputs.changelog }}
+ SESSION_TOKEN: ${{ secrets.SESSION_TOKEN }}
+ FRAMER_ADMIN_SECRET: ${{ secrets.FRAMER_ADMIN_SECRET }}
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
+ SLACK_ERROR_WEBHOOK_URL: ${{ secrets.SLACK_ERROR_WEBHOOK_URL }}
+ RETOOL_URL: ${{ secrets.RETOOL_URL }}
+ FRAMER_ENV: ${{ inputs.environment }}
+ GITHUB_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+ DRY_RUN: ${{ inputs.dry_run }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.yarn/cache/@babel-generator-npm-8.0.0-rc.1-7b4577a751-1949ed7a07.zip b/.yarn/cache/@babel-generator-npm-8.0.0-rc.1-7b4577a751-1949ed7a07.zip
new file mode 100644
index 000000000..dcc273dbd
Binary files /dev/null and b/.yarn/cache/@babel-generator-npm-8.0.0-rc.1-7b4577a751-1949ed7a07.zip differ
diff --git a/.yarn/cache/@babel-helper-string-parser-npm-8.0.0-rc.1-826f1bb41e-9c7b34ecce.zip b/.yarn/cache/@babel-helper-string-parser-npm-8.0.0-rc.1-826f1bb41e-9c7b34ecce.zip
new file mode 100644
index 000000000..0793d4190
Binary files /dev/null and b/.yarn/cache/@babel-helper-string-parser-npm-8.0.0-rc.1-826f1bb41e-9c7b34ecce.zip differ
diff --git a/.yarn/cache/@babel-helper-validator-identifier-npm-8.0.0-rc.1-13fd699081-75d6d5201c.zip b/.yarn/cache/@babel-helper-validator-identifier-npm-8.0.0-rc.1-13fd699081-75d6d5201c.zip
new file mode 100644
index 000000000..e555aeed6
Binary files /dev/null and b/.yarn/cache/@babel-helper-validator-identifier-npm-8.0.0-rc.1-13fd699081-75d6d5201c.zip differ
diff --git a/.yarn/cache/@babel-parser-npm-8.0.0-rc.1-d05e9b70b4-d0c5baf877.zip b/.yarn/cache/@babel-parser-npm-8.0.0-rc.1-d05e9b70b4-d0c5baf877.zip
new file mode 100644
index 000000000..857b57b0f
Binary files /dev/null and b/.yarn/cache/@babel-parser-npm-8.0.0-rc.1-d05e9b70b4-d0c5baf877.zip differ
diff --git a/.yarn/cache/@babel-types-npm-8.0.0-rc.1-35e137a67f-34def05264.zip b/.yarn/cache/@babel-types-npm-8.0.0-rc.1-35e137a67f-34def05264.zip
new file mode 100644
index 000000000..fecf4166d
Binary files /dev/null and b/.yarn/cache/@babel-types-npm-8.0.0-rc.1-35e137a67f-34def05264.zip differ
diff --git a/.yarn/cache/@commander-js-extra-typings-npm-13.1.0-360957fb64-cdb7f854ac.zip b/.yarn/cache/@commander-js-extra-typings-npm-13.1.0-360957fb64-cdb7f854ac.zip
new file mode 100644
index 000000000..74ec7b5d0
Binary files /dev/null and b/.yarn/cache/@commander-js-extra-typings-npm-13.1.0-360957fb64-cdb7f854ac.zip differ
diff --git a/.yarn/cache/@emnapi-core-npm-1.8.1-9be0a25589-904ea60c91.zip b/.yarn/cache/@emnapi-core-npm-1.8.1-9be0a25589-904ea60c91.zip
new file mode 100644
index 000000000..b97b75b57
Binary files /dev/null and b/.yarn/cache/@emnapi-core-npm-1.8.1-9be0a25589-904ea60c91.zip differ
diff --git a/.yarn/cache/@emnapi-runtime-npm-1.8.1-1ce27dc028-26725e202d.zip b/.yarn/cache/@emnapi-runtime-npm-1.8.1-1ce27dc028-26725e202d.zip
new file mode 100644
index 000000000..669c0bafe
Binary files /dev/null and b/.yarn/cache/@emnapi-runtime-npm-1.8.1-1ce27dc028-26725e202d.zip differ
diff --git a/.yarn/cache/@esbuild-darwin-arm64-npm-0.27.2-d675c4a521-10.zip b/.yarn/cache/@esbuild-darwin-arm64-npm-0.27.2-d675c4a521-10.zip
new file mode 100644
index 000000000..df4de2225
Binary files /dev/null and b/.yarn/cache/@esbuild-darwin-arm64-npm-0.27.2-d675c4a521-10.zip differ
diff --git a/.yarn/cache/@esbuild-darwin-x64-npm-0.27.2-ae63bf405f-10.zip b/.yarn/cache/@esbuild-darwin-x64-npm-0.27.2-ae63bf405f-10.zip
new file mode 100644
index 000000000..987fc4966
Binary files /dev/null and b/.yarn/cache/@esbuild-darwin-x64-npm-0.27.2-ae63bf405f-10.zip differ
diff --git a/.yarn/cache/@esbuild-linux-arm64-npm-0.27.2-bf1b0979ac-10.zip b/.yarn/cache/@esbuild-linux-arm64-npm-0.27.2-bf1b0979ac-10.zip
new file mode 100644
index 000000000..32c8cc871
Binary files /dev/null and b/.yarn/cache/@esbuild-linux-arm64-npm-0.27.2-bf1b0979ac-10.zip differ
diff --git a/.yarn/cache/@esbuild-linux-x64-npm-0.27.2-11f1a3d9db-10.zip b/.yarn/cache/@esbuild-linux-x64-npm-0.27.2-11f1a3d9db-10.zip
new file mode 100644
index 000000000..eea4066e3
Binary files /dev/null and b/.yarn/cache/@esbuild-linux-x64-npm-0.27.2-11f1a3d9db-10.zip differ
diff --git a/.yarn/cache/@esbuild-win32-arm64-npm-0.27.2-78a0e828ec-10.zip b/.yarn/cache/@esbuild-win32-arm64-npm-0.27.2-78a0e828ec-10.zip
new file mode 100644
index 000000000..e3bec141d
Binary files /dev/null and b/.yarn/cache/@esbuild-win32-arm64-npm-0.27.2-78a0e828ec-10.zip differ
diff --git a/.yarn/cache/@esbuild-win32-x64-npm-0.27.2-fb03408001-10.zip b/.yarn/cache/@esbuild-win32-x64-npm-0.27.2-fb03408001-10.zip
new file mode 100644
index 000000000..549aacedc
Binary files /dev/null and b/.yarn/cache/@esbuild-win32-x64-npm-0.27.2-fb03408001-10.zip differ
diff --git a/.yarn/cache/@jridgewell-gen-mapping-npm-0.3.13-9bd96ac800-902f8261dc.zip b/.yarn/cache/@jridgewell-gen-mapping-npm-0.3.13-9bd96ac800-902f8261dc.zip
new file mode 100644
index 000000000..e130971fd
Binary files /dev/null and b/.yarn/cache/@jridgewell-gen-mapping-npm-0.3.13-9bd96ac800-902f8261dc.zip differ
diff --git a/.yarn/cache/@jridgewell-trace-mapping-npm-0.3.31-1ae81d75ac-da0283270e.zip b/.yarn/cache/@jridgewell-trace-mapping-npm-0.3.31-1ae81d75ac-da0283270e.zip
new file mode 100644
index 000000000..d61ababcd
Binary files /dev/null and b/.yarn/cache/@jridgewell-trace-mapping-npm-0.3.31-1ae81d75ac-da0283270e.zip differ
diff --git a/.yarn/cache/@napi-rs-wasm-runtime-npm-1.1.1-c9debf141f-080e7f2aef.zip b/.yarn/cache/@napi-rs-wasm-runtime-npm-1.1.1-c9debf141f-080e7f2aef.zip
new file mode 100644
index 000000000..db34c520d
Binary files /dev/null and b/.yarn/cache/@napi-rs-wasm-runtime-npm-1.1.1-c9debf141f-080e7f2aef.zip differ
diff --git a/.yarn/cache/@oxc-project-types-npm-0.112.0-e7acfb5018-5954982169.zip b/.yarn/cache/@oxc-project-types-npm-0.112.0-e7acfb5018-5954982169.zip
new file mode 100644
index 000000000..f66ae9f97
Binary files /dev/null and b/.yarn/cache/@oxc-project-types-npm-0.112.0-e7acfb5018-5954982169.zip differ
diff --git a/.yarn/cache/@quansync-fs-npm-1.0.0-869f097647-8a27892b13.zip b/.yarn/cache/@quansync-fs-npm-1.0.0-869f097647-8a27892b13.zip
new file mode 100644
index 000000000..7281201bb
Binary files /dev/null and b/.yarn/cache/@quansync-fs-npm-1.0.0-869f097647-8a27892b13.zip differ
diff --git a/.yarn/cache/@rolldown-binding-darwin-arm64-npm-1.0.0-rc.3-c3196f4d78-10.zip b/.yarn/cache/@rolldown-binding-darwin-arm64-npm-1.0.0-rc.3-c3196f4d78-10.zip
new file mode 100644
index 000000000..9a6ccb913
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-darwin-arm64-npm-1.0.0-rc.3-c3196f4d78-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-darwin-x64-npm-1.0.0-rc.3-edaa9164af-10.zip b/.yarn/cache/@rolldown-binding-darwin-x64-npm-1.0.0-rc.3-edaa9164af-10.zip
new file mode 100644
index 000000000..1b4cc9c39
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-darwin-x64-npm-1.0.0-rc.3-edaa9164af-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-arm64-gnu-npm-1.0.0-rc.3-194ce4c287-10.zip b/.yarn/cache/@rolldown-binding-linux-arm64-gnu-npm-1.0.0-rc.3-194ce4c287-10.zip
new file mode 100644
index 000000000..225a9b1df
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-arm64-gnu-npm-1.0.0-rc.3-194ce4c287-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-arm64-musl-npm-1.0.0-rc.3-fd6342d84f-10.zip b/.yarn/cache/@rolldown-binding-linux-arm64-musl-npm-1.0.0-rc.3-fd6342d84f-10.zip
new file mode 100644
index 000000000..858f884c2
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-arm64-musl-npm-1.0.0-rc.3-fd6342d84f-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-x64-gnu-npm-1.0.0-rc.3-fb6aa24536-10.zip b/.yarn/cache/@rolldown-binding-linux-x64-gnu-npm-1.0.0-rc.3-fb6aa24536-10.zip
new file mode 100644
index 000000000..270ae9b19
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-x64-gnu-npm-1.0.0-rc.3-fb6aa24536-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-x64-musl-npm-1.0.0-rc.3-b285db285c-10.zip b/.yarn/cache/@rolldown-binding-linux-x64-musl-npm-1.0.0-rc.3-b285db285c-10.zip
new file mode 100644
index 000000000..eaf4f51ea
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-x64-musl-npm-1.0.0-rc.3-b285db285c-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-win32-arm64-msvc-npm-1.0.0-rc.3-9076c881e2-10.zip b/.yarn/cache/@rolldown-binding-win32-arm64-msvc-npm-1.0.0-rc.3-9076c881e2-10.zip
new file mode 100644
index 000000000..0ba96e657
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-win32-arm64-msvc-npm-1.0.0-rc.3-9076c881e2-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-win32-x64-msvc-npm-1.0.0-rc.3-cda6ed4bb8-10.zip b/.yarn/cache/@rolldown-binding-win32-x64-msvc-npm-1.0.0-rc.3-cda6ed4bb8-10.zip
new file mode 100644
index 000000000..3fabbba8d
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-win32-x64-msvc-npm-1.0.0-rc.3-cda6ed4bb8-10.zip differ
diff --git a/.yarn/cache/@rolldown-pluginutils-npm-1.0.0-rc.3-790b1e278c-b181a693b7.zip b/.yarn/cache/@rolldown-pluginutils-npm-1.0.0-rc.3-790b1e278c-b181a693b7.zip
new file mode 100644
index 000000000..aa5cfb7d8
Binary files /dev/null and b/.yarn/cache/@rolldown-pluginutils-npm-1.0.0-rc.3-790b1e278c-b181a693b7.zip differ
diff --git a/.yarn/cache/@rollup-rollup-darwin-arm64-npm-4.57.1-30fcad3f8c-10.zip b/.yarn/cache/@rollup-rollup-darwin-arm64-npm-4.57.1-30fcad3f8c-10.zip
new file mode 100644
index 000000000..fbdb3a060
Binary files /dev/null and b/.yarn/cache/@rollup-rollup-darwin-arm64-npm-4.57.1-30fcad3f8c-10.zip differ
diff --git a/.yarn/cache/@rollup-rollup-darwin-x64-npm-4.57.1-a79270fa64-10.zip b/.yarn/cache/@rollup-rollup-darwin-x64-npm-4.57.1-a79270fa64-10.zip
new file mode 100644
index 000000000..0f69fe496
Binary files /dev/null and b/.yarn/cache/@rollup-rollup-darwin-x64-npm-4.57.1-a79270fa64-10.zip differ
diff --git a/.yarn/cache/@rollup-rollup-linux-arm64-gnu-npm-4.57.1-fd27dea33b-10.zip b/.yarn/cache/@rollup-rollup-linux-arm64-gnu-npm-4.57.1-fd27dea33b-10.zip
new file mode 100644
index 000000000..ce22eddef
Binary files /dev/null and b/.yarn/cache/@rollup-rollup-linux-arm64-gnu-npm-4.57.1-fd27dea33b-10.zip differ
diff --git a/.yarn/cache/@rollup-rollup-linux-arm64-musl-npm-4.57.1-3dce1fd7b1-10.zip b/.yarn/cache/@rollup-rollup-linux-arm64-musl-npm-4.57.1-3dce1fd7b1-10.zip
new file mode 100644
index 000000000..505a36abe
Binary files /dev/null and b/.yarn/cache/@rollup-rollup-linux-arm64-musl-npm-4.57.1-3dce1fd7b1-10.zip differ
diff --git a/.yarn/cache/@rollup-rollup-linux-x64-gnu-npm-4.57.1-667d72ac2f-10.zip b/.yarn/cache/@rollup-rollup-linux-x64-gnu-npm-4.57.1-667d72ac2f-10.zip
new file mode 100644
index 000000000..a2bb10949
Binary files /dev/null and b/.yarn/cache/@rollup-rollup-linux-x64-gnu-npm-4.57.1-667d72ac2f-10.zip differ
diff --git a/.yarn/cache/@rollup-rollup-linux-x64-musl-npm-4.57.1-1bc4af3cb5-10.zip b/.yarn/cache/@rollup-rollup-linux-x64-musl-npm-4.57.1-1bc4af3cb5-10.zip
new file mode 100644
index 000000000..60174e053
Binary files /dev/null and b/.yarn/cache/@rollup-rollup-linux-x64-musl-npm-4.57.1-1bc4af3cb5-10.zip differ
diff --git a/.yarn/cache/@rollup-rollup-win32-arm64-msvc-npm-4.57.1-36312bb667-10.zip b/.yarn/cache/@rollup-rollup-win32-arm64-msvc-npm-4.57.1-36312bb667-10.zip
new file mode 100644
index 000000000..8373ce50e
Binary files /dev/null and b/.yarn/cache/@rollup-rollup-win32-arm64-msvc-npm-4.57.1-36312bb667-10.zip differ
diff --git a/.yarn/cache/@rollup-rollup-win32-x64-gnu-npm-4.57.1-ab351abab0-10.zip b/.yarn/cache/@rollup-rollup-win32-x64-gnu-npm-4.57.1-ab351abab0-10.zip
new file mode 100644
index 000000000..f1a73acdd
Binary files /dev/null and b/.yarn/cache/@rollup-rollup-win32-x64-gnu-npm-4.57.1-ab351abab0-10.zip differ
diff --git a/.yarn/cache/@rollup-rollup-win32-x64-msvc-npm-4.57.1-57e2356b3a-10.zip b/.yarn/cache/@rollup-rollup-win32-x64-msvc-npm-4.57.1-57e2356b3a-10.zip
new file mode 100644
index 000000000..5728375ed
Binary files /dev/null and b/.yarn/cache/@rollup-rollup-win32-x64-msvc-npm-4.57.1-57e2356b3a-10.zip differ
diff --git a/.yarn/cache/@types-adm-zip-npm-0.5.7-b19d96a568-24e9842bd6.zip b/.yarn/cache/@types-adm-zip-npm-0.5.7-b19d96a568-24e9842bd6.zip
new file mode 100644
index 000000000..568da9afe
Binary files /dev/null and b/.yarn/cache/@types-adm-zip-npm-0.5.7-b19d96a568-24e9842bd6.zip differ
diff --git a/.yarn/cache/@types-jsesc-npm-2.5.1-3d8289fff0-25407775ed.zip b/.yarn/cache/@types-jsesc-npm-2.5.1-3d8289fff0-25407775ed.zip
new file mode 100644
index 000000000..d1c21e278
Binary files /dev/null and b/.yarn/cache/@types-jsesc-npm-2.5.1-3d8289fff0-25407775ed.zip differ
diff --git a/.yarn/cache/@types-node-npm-22.19.7-d4e0361572-0a5c9cc3bd.zip b/.yarn/cache/@types-node-npm-22.19.7-d4e0361572-0a5c9cc3bd.zip
new file mode 100644
index 000000000..8c7de71a4
Binary files /dev/null and b/.yarn/cache/@types-node-npm-22.19.7-d4e0361572-0a5c9cc3bd.zip differ
diff --git a/.yarn/cache/@types-node-npm-22.19.8-a20ecb429d-a61c68d434.zip b/.yarn/cache/@types-node-npm-22.19.8-a20ecb429d-a61c68d434.zip
new file mode 100644
index 000000000..f744c46eb
Binary files /dev/null and b/.yarn/cache/@types-node-npm-22.19.8-a20ecb429d-a61c68d434.zip differ
diff --git a/.yarn/cache/@types-ws-npm-8.18.1-61dc106ff0-1ce05e3174.zip b/.yarn/cache/@types-ws-npm-8.18.1-61dc106ff0-1ce05e3174.zip
new file mode 100644
index 000000000..5c915f403
Binary files /dev/null and b/.yarn/cache/@types-ws-npm-8.18.1-61dc106ff0-1ce05e3174.zip differ
diff --git a/.yarn/cache/@typescript-ata-npm-0.9.8-0663e9063b-c0f9daf781.zip b/.yarn/cache/@typescript-ata-npm-0.9.8-0663e9063b-c0f9daf781.zip
new file mode 100644
index 000000000..500bfe18e
Binary files /dev/null and b/.yarn/cache/@typescript-ata-npm-0.9.8-0663e9063b-c0f9daf781.zip differ
diff --git a/.yarn/cache/@vitest-expect-npm-4.0.18-03919ccd0b-2115bff1bb.zip b/.yarn/cache/@vitest-expect-npm-4.0.18-03919ccd0b-2115bff1bb.zip
new file mode 100644
index 000000000..3db069f11
Binary files /dev/null and b/.yarn/cache/@vitest-expect-npm-4.0.18-03919ccd0b-2115bff1bb.zip differ
diff --git a/.yarn/cache/@vitest-mocker-npm-4.0.18-118c87f90e-46f584a4c1.zip b/.yarn/cache/@vitest-mocker-npm-4.0.18-118c87f90e-46f584a4c1.zip
new file mode 100644
index 000000000..9705f5fa8
Binary files /dev/null and b/.yarn/cache/@vitest-mocker-npm-4.0.18-118c87f90e-46f584a4c1.zip differ
diff --git a/.yarn/cache/@vitest-pretty-format-npm-4.0.18-a73855e4a4-4cafc7c985.zip b/.yarn/cache/@vitest-pretty-format-npm-4.0.18-a73855e4a4-4cafc7c985.zip
new file mode 100644
index 000000000..933564838
Binary files /dev/null and b/.yarn/cache/@vitest-pretty-format-npm-4.0.18-a73855e4a4-4cafc7c985.zip differ
diff --git a/.yarn/cache/@vitest-runner-npm-4.0.18-3dbdf3fb30-d7deebf086.zip b/.yarn/cache/@vitest-runner-npm-4.0.18-3dbdf3fb30-d7deebf086.zip
new file mode 100644
index 000000000..fd61fc011
Binary files /dev/null and b/.yarn/cache/@vitest-runner-npm-4.0.18-3dbdf3fb30-d7deebf086.zip differ
diff --git a/.yarn/cache/@vitest-snapshot-npm-4.0.18-35134202ed-50aa5fb7fc.zip b/.yarn/cache/@vitest-snapshot-npm-4.0.18-35134202ed-50aa5fb7fc.zip
new file mode 100644
index 000000000..3dfd6c15d
Binary files /dev/null and b/.yarn/cache/@vitest-snapshot-npm-4.0.18-35134202ed-50aa5fb7fc.zip differ
diff --git a/.yarn/cache/@vitest-spy-npm-4.0.18-296c59dce4-f7b1618ae1.zip b/.yarn/cache/@vitest-spy-npm-4.0.18-296c59dce4-f7b1618ae1.zip
new file mode 100644
index 000000000..cabab6646
Binary files /dev/null and b/.yarn/cache/@vitest-spy-npm-4.0.18-296c59dce4-f7b1618ae1.zip differ
diff --git a/.yarn/cache/@vitest-utils-npm-4.0.18-b1c99a49e0-e8b2ad7bc3.zip b/.yarn/cache/@vitest-utils-npm-4.0.18-b1c99a49e0-e8b2ad7bc3.zip
new file mode 100644
index 000000000..c61d50e0f
Binary files /dev/null and b/.yarn/cache/@vitest-utils-npm-4.0.18-b1c99a49e0-e8b2ad7bc3.zip differ
diff --git a/.yarn/cache/ansis-npm-4.2.0-35ae97bdc2-493e15fad2.zip b/.yarn/cache/ansis-npm-4.2.0-35ae97bdc2-493e15fad2.zip
new file mode 100644
index 000000000..0719e6447
Binary files /dev/null and b/.yarn/cache/ansis-npm-4.2.0-35ae97bdc2-493e15fad2.zip differ
diff --git a/.yarn/cache/any-promise-npm-1.3.0-f34eeaa7e7-6737469ba3.zip b/.yarn/cache/any-promise-npm-1.3.0-f34eeaa7e7-6737469ba3.zip
new file mode 100644
index 000000000..e93b1e228
Binary files /dev/null and b/.yarn/cache/any-promise-npm-1.3.0-f34eeaa7e7-6737469ba3.zip differ
diff --git a/.yarn/cache/ast-kit-npm-3.0.0-beta.1-eee5b0fb06-63c8f80f71.zip b/.yarn/cache/ast-kit-npm-3.0.0-beta.1-eee5b0fb06-63c8f80f71.zip
new file mode 100644
index 000000000..b50f29a39
Binary files /dev/null and b/.yarn/cache/ast-kit-npm-3.0.0-beta.1-eee5b0fb06-63c8f80f71.zip differ
diff --git a/.yarn/cache/birpc-npm-4.0.0-2cc419e494-f4418e2a04.zip b/.yarn/cache/birpc-npm-4.0.0-2cc419e494-f4418e2a04.zip
new file mode 100644
index 000000000..982376952
Binary files /dev/null and b/.yarn/cache/birpc-npm-4.0.0-2cc419e494-f4418e2a04.zip differ
diff --git a/.yarn/cache/bundle-require-npm-5.1.0-022b2c8e1b-735e022005.zip b/.yarn/cache/bundle-require-npm-5.1.0-022b2c8e1b-735e022005.zip
new file mode 100644
index 000000000..6d3fbfaba
Binary files /dev/null and b/.yarn/cache/bundle-require-npm-5.1.0-022b2c8e1b-735e022005.zip differ
diff --git a/.yarn/cache/chai-npm-6.2.2-e1795cadaa-13cda42cc4.zip b/.yarn/cache/chai-npm-6.2.2-e1795cadaa-13cda42cc4.zip
new file mode 100644
index 000000000..0accfad37
Binary files /dev/null and b/.yarn/cache/chai-npm-6.2.2-e1795cadaa-13cda42cc4.zip differ
diff --git a/.yarn/cache/chokidar-npm-4.0.3-962354fbb4-bf2a575ea5.zip b/.yarn/cache/chokidar-npm-4.0.3-962354fbb4-bf2a575ea5.zip
new file mode 100644
index 000000000..0085cb689
Binary files /dev/null and b/.yarn/cache/chokidar-npm-4.0.3-962354fbb4-bf2a575ea5.zip differ
diff --git a/.yarn/cache/chokidar-npm-5.0.0-2f70d31c86-a1c2a4ee6e.zip b/.yarn/cache/chokidar-npm-5.0.0-2f70d31c86-a1c2a4ee6e.zip
new file mode 100644
index 000000000..de48a7b82
Binary files /dev/null and b/.yarn/cache/chokidar-npm-5.0.0-2f70d31c86-a1c2a4ee6e.zip differ
diff --git a/.yarn/cache/commander-npm-13.1.0-bdbbfaaf9d-d3b4b79e6b.zip b/.yarn/cache/commander-npm-13.1.0-bdbbfaaf9d-d3b4b79e6b.zip
new file mode 100644
index 000000000..5ff7f5b04
Binary files /dev/null and b/.yarn/cache/commander-npm-13.1.0-bdbbfaaf9d-d3b4b79e6b.zip differ
diff --git a/.yarn/cache/commander-npm-14.0.3-93ab31471d-dfa9ebe2a4.zip b/.yarn/cache/commander-npm-14.0.3-93ab31471d-dfa9ebe2a4.zip
new file mode 100644
index 000000000..9b63f2535
Binary files /dev/null and b/.yarn/cache/commander-npm-14.0.3-93ab31471d-dfa9ebe2a4.zip differ
diff --git a/.yarn/cache/commander-npm-4.1.1-22a0fe921b-3b2dc4125f.zip b/.yarn/cache/commander-npm-4.1.1-22a0fe921b-3b2dc4125f.zip
new file mode 100644
index 000000000..c893cc7fc
Binary files /dev/null and b/.yarn/cache/commander-npm-4.1.1-22a0fe921b-3b2dc4125f.zip differ
diff --git a/.yarn/cache/confbox-npm-0.1.8-8396039b68-4ebcfb1c6a.zip b/.yarn/cache/confbox-npm-0.1.8-8396039b68-4ebcfb1c6a.zip
new file mode 100644
index 000000000..190ec9ef4
Binary files /dev/null and b/.yarn/cache/confbox-npm-0.1.8-8396039b68-4ebcfb1c6a.zip differ
diff --git a/.yarn/cache/consola-npm-3.4.2-133d72719e-32192c9f50.zip b/.yarn/cache/consola-npm-3.4.2-133d72719e-32192c9f50.zip
new file mode 100644
index 000000000..51736da15
Binary files /dev/null and b/.yarn/cache/consola-npm-3.4.2-133d72719e-32192c9f50.zip differ
diff --git a/.yarn/cache/defu-npm-6.1.4-c791c7f2cc-aeffdb4730.zip b/.yarn/cache/defu-npm-6.1.4-c791c7f2cc-aeffdb4730.zip
new file mode 100644
index 000000000..df708b6ab
Binary files /dev/null and b/.yarn/cache/defu-npm-6.1.4-c791c7f2cc-aeffdb4730.zip differ
diff --git a/.yarn/cache/dts-resolver-npm-2.1.3-5deb33a062-9dfa79be6f.zip b/.yarn/cache/dts-resolver-npm-2.1.3-5deb33a062-9dfa79be6f.zip
new file mode 100644
index 000000000..a095b39b8
Binary files /dev/null and b/.yarn/cache/dts-resolver-npm-2.1.3-5deb33a062-9dfa79be6f.zip differ
diff --git a/.yarn/cache/empathic-npm-2.0.0-440d97be6e-90f47d93f8.zip b/.yarn/cache/empathic-npm-2.0.0-440d97be6e-90f47d93f8.zip
new file mode 100644
index 000000000..daca95502
Binary files /dev/null and b/.yarn/cache/empathic-npm-2.0.0-440d97be6e-90f47d93f8.zip differ
diff --git a/.yarn/cache/esbuild-npm-0.27.2-7789e62c6d-7f1229328b.zip b/.yarn/cache/esbuild-npm-0.27.2-7789e62c6d-7f1229328b.zip
new file mode 100644
index 000000000..18b308c08
Binary files /dev/null and b/.yarn/cache/esbuild-npm-0.27.2-7789e62c6d-7f1229328b.zip differ
diff --git a/.yarn/cache/expect-type-npm-1.3.0-95a4384745-a5fada3d0c.zip b/.yarn/cache/expect-type-npm-1.3.0-95a4384745-a5fada3d0c.zip
new file mode 100644
index 000000000..20a597e23
Binary files /dev/null and b/.yarn/cache/expect-type-npm-1.3.0-95a4384745-a5fada3d0c.zip differ
diff --git a/.yarn/cache/fix-dts-default-cjs-exports-npm-1.0.1-421fe0399f-3324418bb6.zip b/.yarn/cache/fix-dts-default-cjs-exports-npm-1.0.1-421fe0399f-3324418bb6.zip
new file mode 100644
index 000000000..e5fb69592
Binary files /dev/null and b/.yarn/cache/fix-dts-default-cjs-exports-npm-1.0.1-421fe0399f-3324418bb6.zip differ
diff --git a/.yarn/cache/framer-plugin-npm-3.10.3-f82e9d58a5-c677a26146.zip b/.yarn/cache/framer-plugin-npm-3.10.3-f82e9d58a5-c677a26146.zip
new file mode 100644
index 000000000..0d9ed6da3
Binary files /dev/null and b/.yarn/cache/framer-plugin-npm-3.10.3-f82e9d58a5-c677a26146.zip differ
diff --git a/.yarn/cache/framer-plugin-tools-npm-1.0.0-09c6263530-5f6b731038.zip b/.yarn/cache/framer-plugin-tools-npm-1.0.0-09c6263530-5f6b731038.zip
deleted file mode 100644
index a03f5dc85..000000000
Binary files a/.yarn/cache/framer-plugin-tools-npm-1.0.0-09c6263530-5f6b731038.zip and /dev/null differ
diff --git a/.yarn/cache/get-tsconfig-npm-4.13.0-009b232bdd-3603c6da30.zip b/.yarn/cache/get-tsconfig-npm-4.13.0-009b232bdd-3603c6da30.zip
new file mode 100644
index 000000000..521c2617f
Binary files /dev/null and b/.yarn/cache/get-tsconfig-npm-4.13.0-009b232bdd-3603c6da30.zip differ
diff --git a/.yarn/cache/get-tsconfig-npm-4.13.2-7292f60580-7d7b2bd6ad.zip b/.yarn/cache/get-tsconfig-npm-4.13.2-7292f60580-7d7b2bd6ad.zip
new file mode 100644
index 000000000..2461938df
Binary files /dev/null and b/.yarn/cache/get-tsconfig-npm-4.13.2-7292f60580-7d7b2bd6ad.zip differ
diff --git a/.yarn/cache/hookable-npm-6.0.1-a0fed3cec4-9fccaaa07d.zip b/.yarn/cache/hookable-npm-6.0.1-a0fed3cec4-9fccaaa07d.zip
new file mode 100644
index 000000000..3d2a8b02e
Binary files /dev/null and b/.yarn/cache/hookable-npm-6.0.1-a0fed3cec4-9fccaaa07d.zip differ
diff --git a/.yarn/cache/import-without-cache-npm-0.2.5-ce20052738-b58c16deaa.zip b/.yarn/cache/import-without-cache-npm-0.2.5-ce20052738-b58c16deaa.zip
new file mode 100644
index 000000000..31d362c05
Binary files /dev/null and b/.yarn/cache/import-without-cache-npm-0.2.5-ce20052738-b58c16deaa.zip differ
diff --git a/.yarn/cache/joycon-npm-3.1.1-3033e0e5f4-4b36e34791.zip b/.yarn/cache/joycon-npm-3.1.1-3033e0e5f4-4b36e34791.zip
new file mode 100644
index 000000000..e36528235
Binary files /dev/null and b/.yarn/cache/joycon-npm-3.1.1-3033e0e5f4-4b36e34791.zip differ
diff --git a/.yarn/cache/jsesc-npm-3.1.0-2f4f998cd7-20bd37a142.zip b/.yarn/cache/jsesc-npm-3.1.0-2f4f998cd7-20bd37a142.zip
new file mode 100644
index 000000000..0701df326
Binary files /dev/null and b/.yarn/cache/jsesc-npm-3.1.0-2f4f998cd7-20bd37a142.zip differ
diff --git a/.yarn/cache/lilconfig-npm-3.1.3-74a77377bb-b932ce1af9.zip b/.yarn/cache/lilconfig-npm-3.1.3-74a77377bb-b932ce1af9.zip
new file mode 100644
index 000000000..8b9c8cd32
Binary files /dev/null and b/.yarn/cache/lilconfig-npm-3.1.3-74a77377bb-b932ce1af9.zip differ
diff --git a/.yarn/cache/lines-and-columns-npm-1.2.4-d6c7cc5799-0c37f9f7fa.zip b/.yarn/cache/lines-and-columns-npm-1.2.4-d6c7cc5799-0c37f9f7fa.zip
new file mode 100644
index 000000000..273106a73
Binary files /dev/null and b/.yarn/cache/lines-and-columns-npm-1.2.4-d6c7cc5799-0c37f9f7fa.zip differ
diff --git a/.yarn/cache/load-tsconfig-npm-0.2.5-70feef5c98-b3176f6f0c.zip b/.yarn/cache/load-tsconfig-npm-0.2.5-70feef5c98-b3176f6f0c.zip
new file mode 100644
index 000000000..b16725bda
Binary files /dev/null and b/.yarn/cache/load-tsconfig-npm-0.2.5-70feef5c98-b3176f6f0c.zip differ
diff --git a/.yarn/cache/lodash-npm-4.17.21-6382451519-c08619c038.zip b/.yarn/cache/lodash-npm-4.17.23-50bdb1c01a-82504c8825.zip
similarity index 65%
rename from .yarn/cache/lodash-npm-4.17.21-6382451519-c08619c038.zip
rename to .yarn/cache/lodash-npm-4.17.23-50bdb1c01a-82504c8825.zip
index 5c76f21a6..8050b2fb0 100644
Binary files a/.yarn/cache/lodash-npm-4.17.21-6382451519-c08619c038.zip and b/.yarn/cache/lodash-npm-4.17.23-50bdb1c01a-82504c8825.zip differ
diff --git a/.yarn/cache/magic-string-npm-0.30.21-9a226cb21e-57d5691f41.zip b/.yarn/cache/magic-string-npm-0.30.21-9a226cb21e-57d5691f41.zip
new file mode 100644
index 000000000..53485dc72
Binary files /dev/null and b/.yarn/cache/magic-string-npm-0.30.21-9a226cb21e-57d5691f41.zip differ
diff --git a/.yarn/cache/mlly-npm-1.8.0-448698f313-4db690a421.zip b/.yarn/cache/mlly-npm-1.8.0-448698f313-4db690a421.zip
new file mode 100644
index 000000000..0fd3cf77e
Binary files /dev/null and b/.yarn/cache/mlly-npm-1.8.0-448698f313-4db690a421.zip differ
diff --git a/.yarn/cache/mz-npm-2.7.0-ec3cef4ec2-8427de0ece.zip b/.yarn/cache/mz-npm-2.7.0-ec3cef4ec2-8427de0ece.zip
new file mode 100644
index 000000000..faf79cab2
Binary files /dev/null and b/.yarn/cache/mz-npm-2.7.0-ec3cef4ec2-8427de0ece.zip differ
diff --git a/.yarn/cache/obug-npm-2.1.1-029730d296-bdcf921336.zip b/.yarn/cache/obug-npm-2.1.1-029730d296-bdcf921336.zip
new file mode 100644
index 000000000..ca87e6388
Binary files /dev/null and b/.yarn/cache/obug-npm-2.1.1-029730d296-bdcf921336.zip differ
diff --git a/.yarn/cache/pirates-npm-4.0.7-5e4ee2f078-2427f37136.zip b/.yarn/cache/pirates-npm-4.0.7-5e4ee2f078-2427f37136.zip
new file mode 100644
index 000000000..09e815a1f
Binary files /dev/null and b/.yarn/cache/pirates-npm-4.0.7-5e4ee2f078-2427f37136.zip differ
diff --git a/.yarn/cache/pkg-types-npm-1.3.1-832c9cd162-6d491f2244.zip b/.yarn/cache/pkg-types-npm-1.3.1-832c9cd162-6d491f2244.zip
new file mode 100644
index 000000000..b0eafe50e
Binary files /dev/null and b/.yarn/cache/pkg-types-npm-1.3.1-832c9cd162-6d491f2244.zip differ
diff --git a/.yarn/cache/postcss-load-config-npm-6.0.1-50722afd05-1691cfc949.zip b/.yarn/cache/postcss-load-config-npm-6.0.1-50722afd05-1691cfc949.zip
new file mode 100644
index 000000000..6b91d0558
Binary files /dev/null and b/.yarn/cache/postcss-load-config-npm-6.0.1-50722afd05-1691cfc949.zip differ
diff --git a/.yarn/cache/prettier-npm-3.8.1-04022a855f-3da1cf8c1e.zip b/.yarn/cache/prettier-npm-3.8.1-04022a855f-3da1cf8c1e.zip
new file mode 100644
index 000000000..d29087728
Binary files /dev/null and b/.yarn/cache/prettier-npm-3.8.1-04022a855f-3da1cf8c1e.zip differ
diff --git a/.yarn/cache/quansync-npm-1.0.0-0707dd9045-fba7a8e87a.zip b/.yarn/cache/quansync-npm-1.0.0-0707dd9045-fba7a8e87a.zip
new file mode 100644
index 000000000..c024033bb
Binary files /dev/null and b/.yarn/cache/quansync-npm-1.0.0-0707dd9045-fba7a8e87a.zip differ
diff --git a/.yarn/cache/readdirp-npm-4.1.2-3440472afe-7b817c2659.zip b/.yarn/cache/readdirp-npm-4.1.2-3440472afe-7b817c2659.zip
new file mode 100644
index 000000000..7ea4264a0
Binary files /dev/null and b/.yarn/cache/readdirp-npm-4.1.2-3440472afe-7b817c2659.zip differ
diff --git a/.yarn/cache/readdirp-npm-5.0.0-82b01a282e-a17a591b51.zip b/.yarn/cache/readdirp-npm-5.0.0-82b01a282e-a17a591b51.zip
new file mode 100644
index 000000000..f62693664
Binary files /dev/null and b/.yarn/cache/readdirp-npm-5.0.0-82b01a282e-a17a591b51.zip differ
diff --git a/.yarn/cache/resolve-from-npm-5.0.0-15c9db4d33-be18a5e4d7.zip b/.yarn/cache/resolve-from-npm-5.0.0-15c9db4d33-be18a5e4d7.zip
new file mode 100644
index 000000000..eca225319
Binary files /dev/null and b/.yarn/cache/resolve-from-npm-5.0.0-15c9db4d33-be18a5e4d7.zip differ
diff --git a/.yarn/cache/resolve-pkg-maps-npm-1.0.0-135b70c854-0763150adf.zip b/.yarn/cache/resolve-pkg-maps-npm-1.0.0-135b70c854-0763150adf.zip
new file mode 100644
index 000000000..8e3561c41
Binary files /dev/null and b/.yarn/cache/resolve-pkg-maps-npm-1.0.0-135b70c854-0763150adf.zip differ
diff --git a/.yarn/cache/rolldown-npm-1.0.0-rc.3-6317e18d22-28c88da3dc.zip b/.yarn/cache/rolldown-npm-1.0.0-rc.3-6317e18d22-28c88da3dc.zip
new file mode 100644
index 000000000..4178d5744
Binary files /dev/null and b/.yarn/cache/rolldown-npm-1.0.0-rc.3-6317e18d22-28c88da3dc.zip differ
diff --git a/.yarn/cache/rolldown-plugin-dts-npm-0.22.1-538d81df8c-e180a99005.zip b/.yarn/cache/rolldown-plugin-dts-npm-0.22.1-538d81df8c-e180a99005.zip
new file mode 100644
index 000000000..3b04c7809
Binary files /dev/null and b/.yarn/cache/rolldown-plugin-dts-npm-0.22.1-538d81df8c-e180a99005.zip differ
diff --git a/.yarn/cache/rollup-npm-4.57.1-8b254d8a63-0451371339.zip b/.yarn/cache/rollup-npm-4.57.1-8b254d8a63-0451371339.zip
new file mode 100644
index 000000000..d3287fa34
Binary files /dev/null and b/.yarn/cache/rollup-npm-4.57.1-8b254d8a63-0451371339.zip differ
diff --git a/.yarn/cache/semver-npm-7.7.3-9cf7b3b46c-8dbc3168e0.zip b/.yarn/cache/semver-npm-7.7.3-9cf7b3b46c-8dbc3168e0.zip
new file mode 100644
index 000000000..c94393ee4
Binary files /dev/null and b/.yarn/cache/semver-npm-7.7.3-9cf7b3b46c-8dbc3168e0.zip differ
diff --git a/.yarn/cache/source-map-npm-0.7.6-a3854be193-c8d2da7c57.zip b/.yarn/cache/source-map-npm-0.7.6-a3854be193-c8d2da7c57.zip
new file mode 100644
index 000000000..84e26314f
Binary files /dev/null and b/.yarn/cache/source-map-npm-0.7.6-a3854be193-c8d2da7c57.zip differ
diff --git a/.yarn/cache/std-env-npm-3.10.0-30d3e2646f-19c9cda4f3.zip b/.yarn/cache/std-env-npm-3.10.0-30d3e2646f-19c9cda4f3.zip
new file mode 100644
index 000000000..8803cc08f
Binary files /dev/null and b/.yarn/cache/std-env-npm-3.10.0-30d3e2646f-19c9cda4f3.zip differ
diff --git a/.yarn/cache/sucrase-npm-3.35.1-9a5f68e2af-539f5c6ebc.zip b/.yarn/cache/sucrase-npm-3.35.1-9a5f68e2af-539f5c6ebc.zip
new file mode 100644
index 000000000..f1655abaa
Binary files /dev/null and b/.yarn/cache/sucrase-npm-3.35.1-9a5f68e2af-539f5c6ebc.zip differ
diff --git a/.yarn/cache/thenify-all-npm-1.6.0-96309bbc8b-dba7cc8a23.zip b/.yarn/cache/thenify-all-npm-1.6.0-96309bbc8b-dba7cc8a23.zip
new file mode 100644
index 000000000..59a2bc6ce
Binary files /dev/null and b/.yarn/cache/thenify-all-npm-1.6.0-96309bbc8b-dba7cc8a23.zip differ
diff --git a/.yarn/cache/thenify-npm-3.3.1-030bedb22c-486e1283a8.zip b/.yarn/cache/thenify-npm-3.3.1-030bedb22c-486e1283a8.zip
new file mode 100644
index 000000000..2c3ccdb73
Binary files /dev/null and b/.yarn/cache/thenify-npm-3.3.1-030bedb22c-486e1283a8.zip differ
diff --git a/.yarn/cache/tinyexec-npm-1.0.2-321b713e56-cb709ed424.zip b/.yarn/cache/tinyexec-npm-1.0.2-321b713e56-cb709ed424.zip
new file mode 100644
index 000000000..4e06ed7d2
Binary files /dev/null and b/.yarn/cache/tinyexec-npm-1.0.2-321b713e56-cb709ed424.zip differ
diff --git a/.yarn/cache/tinyrainbow-npm-3.0.3-06ed35d14d-169cc63c15.zip b/.yarn/cache/tinyrainbow-npm-3.0.3-06ed35d14d-169cc63c15.zip
new file mode 100644
index 000000000..55660b3fd
Binary files /dev/null and b/.yarn/cache/tinyrainbow-npm-3.0.3-06ed35d14d-169cc63c15.zip differ
diff --git a/.yarn/cache/tree-kill-npm-1.2.2-3da0e5a759-49117f5f41.zip b/.yarn/cache/tree-kill-npm-1.2.2-3da0e5a759-49117f5f41.zip
new file mode 100644
index 000000000..c9ef40137
Binary files /dev/null and b/.yarn/cache/tree-kill-npm-1.2.2-3da0e5a759-49117f5f41.zip differ
diff --git a/.yarn/cache/ts-interface-checker-npm-0.1.13-0c7b064494-9f7346b9e2.zip b/.yarn/cache/ts-interface-checker-npm-0.1.13-0c7b064494-9f7346b9e2.zip
new file mode 100644
index 000000000..be45589c8
Binary files /dev/null and b/.yarn/cache/ts-interface-checker-npm-0.1.13-0c7b064494-9f7346b9e2.zip differ
diff --git a/.yarn/cache/tsdown-npm-0.20.3-11b719e36b-3d511dea76.zip b/.yarn/cache/tsdown-npm-0.20.3-11b719e36b-3d511dea76.zip
new file mode 100644
index 000000000..c9168d04e
Binary files /dev/null and b/.yarn/cache/tsdown-npm-0.20.3-11b719e36b-3d511dea76.zip differ
diff --git a/.yarn/cache/tsup-npm-8.5.1-41f4f7d59b-f1927ec2dd.zip b/.yarn/cache/tsup-npm-8.5.1-41f4f7d59b-f1927ec2dd.zip
new file mode 100644
index 000000000..59f92d172
Binary files /dev/null and b/.yarn/cache/tsup-npm-8.5.1-41f4f7d59b-f1927ec2dd.zip differ
diff --git a/.yarn/cache/tsx-npm-4.21.0-3bc9626d81-7afedeff85.zip b/.yarn/cache/tsx-npm-4.21.0-3bc9626d81-7afedeff85.zip
new file mode 100644
index 000000000..27615df9a
Binary files /dev/null and b/.yarn/cache/tsx-npm-4.21.0-3bc9626d81-7afedeff85.zip differ
diff --git a/.yarn/cache/typescript-npm-5.9.3-48715be868-c089d9d3da.zip b/.yarn/cache/typescript-npm-5.9.3-48715be868-c089d9d3da.zip
new file mode 100644
index 000000000..0eabff58d
Binary files /dev/null and b/.yarn/cache/typescript-npm-5.9.3-48715be868-c089d9d3da.zip differ
diff --git a/.yarn/cache/typescript-patch-6fda4d02cf-696e1b017b.zip b/.yarn/cache/typescript-patch-6fda4d02cf-696e1b017b.zip
new file mode 100644
index 000000000..6cd392703
Binary files /dev/null and b/.yarn/cache/typescript-patch-6fda4d02cf-696e1b017b.zip differ
diff --git a/.yarn/cache/ufo-npm-1.6.3-29bac69d97-79803984f3.zip b/.yarn/cache/ufo-npm-1.6.3-29bac69d97-79803984f3.zip
new file mode 100644
index 000000000..a6554f993
Binary files /dev/null and b/.yarn/cache/ufo-npm-1.6.3-29bac69d97-79803984f3.zip differ
diff --git a/.yarn/cache/unconfig-core-npm-7.4.2-b40a0ca292-837d196508.zip b/.yarn/cache/unconfig-core-npm-7.4.2-b40a0ca292-837d196508.zip
new file mode 100644
index 000000000..3f7ff8f9e
Binary files /dev/null and b/.yarn/cache/unconfig-core-npm-7.4.2-b40a0ca292-837d196508.zip differ
diff --git a/.yarn/cache/unrun-npm-0.2.27-11dabf0dfa-70a589218b.zip b/.yarn/cache/unrun-npm-0.2.27-11dabf0dfa-70a589218b.zip
new file mode 100644
index 000000000..fc2ac917c
Binary files /dev/null and b/.yarn/cache/unrun-npm-0.2.27-11dabf0dfa-70a589218b.zip differ
diff --git a/.yarn/cache/vite-npm-7.3.1-330baf2f0d-62e48ffa42.zip b/.yarn/cache/vite-npm-7.3.1-330baf2f0d-62e48ffa42.zip
new file mode 100644
index 000000000..26ec0ade3
Binary files /dev/null and b/.yarn/cache/vite-npm-7.3.1-330baf2f0d-62e48ffa42.zip differ
diff --git a/.yarn/cache/vitest-npm-4.0.18-52f42bdace-6c6464ebcf.zip b/.yarn/cache/vitest-npm-4.0.18-52f42bdace-6c6464ebcf.zip
new file mode 100644
index 000000000..df66e7cbf
Binary files /dev/null and b/.yarn/cache/vitest-npm-4.0.18-52f42bdace-6c6464ebcf.zip differ
diff --git a/.yarn/cache/ws-npm-8.19.0-c967c046a5-26e4901e93.zip b/.yarn/cache/ws-npm-8.19.0-c967c046a5-26e4901e93.zip
new file mode 100644
index 000000000..457091195
Binary files /dev/null and b/.yarn/cache/ws-npm-8.19.0-c967c046a5-26e4901e93.zip differ
diff --git a/assets/code-link.png b/assets/code-link.png
new file mode 100644
index 000000000..0796fb10f
Binary files /dev/null and b/assets/code-link.png differ
diff --git a/package.json b/package.json
index 97e4a16c2..fe4600ccd 100644
--- a/package.json
+++ b/package.json
@@ -9,6 +9,7 @@
"plugins/*"
],
"scripts": {
+ "test:scripts": "vitest run scripts/",
"check": "turbo run --continue check-biome check-eslint check-prettier check-svelte check-typescript check-vitest",
"dev": "turbo run dev --concurrency=40",
"fix-biome": "turbo run --continue check-biome -- --write",
@@ -22,16 +23,22 @@
"g:dev": "cd $INIT_CWD && run g:vite",
"g:preview": "cd $INIT_CWD && run g:vite preview",
"g:vite": "cd $INIT_CWD && NODE_OPTIONS='--no-warnings=ExperimentalWarning' vite --config ${VITE_CONFIG_PATH:-$PROJECT_CWD/packages/vite-config/src/index.ts}",
+ "g:pack": "cd $INIT_CWD && framer-plugin-tools pack",
"preview": "turbo run preview --concurrency=40"
},
"devDependencies": {
"@biomejs/biome": "^2.2.4",
"@framer/eslint-config": "workspace:*",
"@framer/vite-config": "workspace:*",
+ "@types/node": "^22.15.21",
"eslint": "^9.35.0",
+ "framer-plugin-tools": "workspace:*",
"jiti": "^2.5.1",
+ "tsx": "^4.19.0",
"turbo": "^2.5.6",
"typescript": "^5.9.2",
- "vite": "^7.1.11"
+ "valibot": "^1.2.0",
+ "vite": "^7.1.11",
+ "vitest": "^3.2.4"
}
}
diff --git a/packages/code-link-cli/README.md b/packages/code-link-cli/README.md
new file mode 100644
index 000000000..66b29ab73
--- /dev/null
+++ b/packages/code-link-cli/README.md
@@ -0,0 +1,3 @@
+# Framer Code Link CLI
+
+Two-way syncing Framer of code components between Framer and your computer.
diff --git a/packages/code-link-cli/package.json b/packages/code-link-cli/package.json
new file mode 100644
index 000000000..4ef5ad6c3
--- /dev/null
+++ b/packages/code-link-cli/package.json
@@ -0,0 +1,40 @@
+{
+ "name": "framer-code-link",
+ "version": "0.7.0",
+ "description": "CLI tool for syncing Framer code components - controller-centric architecture",
+ "main": "dist/index.mjs",
+ "type": "module",
+ "bin": "./dist/index.mjs",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "dev": "NODE_ENV=development tsx src/index.ts",
+ "build": "tsdown",
+ "start": "node dist/index.mjs",
+ "test": "vitest run"
+ },
+ "keywords": [
+ "framer",
+ "sync",
+ "code-components"
+ ],
+ "author": "",
+ "license": "MIT",
+ "dependencies": {
+ "@typescript/ata": "^0.9.8",
+ "chokidar": "^5.0.0",
+ "commander": "^14.0.3",
+ "prettier": "^3.7.4",
+ "typescript": "^5.9.3",
+ "ws": "^8.18.3"
+ },
+ "devDependencies": {
+ "@code-link/shared": "workspace:*",
+ "@types/node": "^22.19.2",
+ "@types/ws": "^8.18.1",
+ "tsdown": "^0.20.1",
+ "tsx": "^4.21.0",
+ "vitest": "^4.0.15"
+ }
+}
diff --git a/packages/code-link-cli/src/controller.test.ts b/packages/code-link-cli/src/controller.test.ts
new file mode 100644
index 000000000..081accb6d
--- /dev/null
+++ b/packages/code-link-cli/src/controller.test.ts
@@ -0,0 +1,626 @@
+import { describe, expect, it } from "vitest"
+import type { WebSocket } from "ws"
+import { transition } from "./controller.ts"
+import { DEFAULT_REMOTE_DRIFT_MS, filterEchoedFiles } from "./helpers/files.ts"
+import { createHashTracker } from "./utils/hash-tracker.ts"
+
+// Readable coverage of core controller functionality
+
+const mockSocket = {} as WebSocket
+
+function disconnectedState() {
+ return {
+ mode: "disconnected" as const,
+ socket: null,
+ pendingRemoteChanges: [],
+ }
+}
+
+function watchingState() {
+ return {
+ mode: "watching" as const,
+ socket: mockSocket,
+ pendingRemoteChanges: [],
+ }
+}
+
+function handshakingState() {
+ return {
+ mode: "handshaking" as const,
+ socket: mockSocket,
+ pendingRemoteChanges: [],
+ }
+}
+
+function snapshotProcessingState() {
+ return {
+ mode: "snapshot_processing" as const,
+ socket: mockSocket,
+ pendingRemoteChanges: [],
+ }
+}
+
+function conflictResolutionState(
+ pendingConflicts: {
+ fileName: string
+ localContent: string | null
+ remoteContent: string | null
+ localModifiedAt?: number
+ remoteModifiedAt?: number
+ lastSyncedAt?: number
+ localClean?: boolean
+ }[]
+) {
+ return {
+ mode: "conflict_resolution" as const,
+ socket: mockSocket,
+ pendingConflicts,
+ pendingRemoteChanges: [],
+ }
+}
+
+describe("Code Link", () => {
+ // FIRST-TIME SYNC
+ // When CLI connects to a project for the first time
+
+ describe("First-Time Sync", () => {
+ it("downloads new files from Framer", () => {
+ const state = snapshotProcessingState()
+ const result = transition(state, {
+ type: "CONFLICTS_DETECTED",
+ conflicts: [],
+ safeWrites: [
+ { name: "Button.tsx", content: "export const Button = () => ", modifiedAt: Date.now() },
+ ],
+ localOnly: [],
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(result.effects.some(e => e.type === "WRITE_FILES")).toBe(true)
+ })
+
+ it("uploads new local files to Framer", () => {
+ const state = snapshotProcessingState()
+ const result = transition(state, {
+ type: "CONFLICTS_DETECTED",
+ conflicts: [],
+ safeWrites: [],
+ localOnly: [
+ {
+ name: "LocalComponent.tsx",
+ content: "export const Local = () =>
",
+ modifiedAt: Date.now(),
+ },
+ ],
+ })
+
+ expect(result.state.mode).toBe("watching")
+ const sendEffects = result.effects.filter(e => e.type === "SEND_MESSAGE")
+ expect(
+ sendEffects.some(
+ e => "payload" in e && (e as { payload: { type: string } }).payload.type === "file-change"
+ )
+ ).toBe(true)
+ })
+
+ it("detects conflicts when both sides have different content", () => {
+ const state = snapshotProcessingState()
+ const conflict = {
+ fileName: "Shared.tsx",
+ localContent: "local version",
+ remoteContent: "framer version",
+ localModifiedAt: Date.now(),
+ remoteModifiedAt: Date.now() + 1000,
+ }
+
+ const result = transition(state, {
+ type: "CONFLICTS_DETECTED",
+ conflicts: [conflict],
+ safeWrites: [],
+ localOnly: [],
+ })
+
+ expect(result.state.mode).toBe("conflict_resolution")
+ expect(result.effects.some(e => e.type === "REQUEST_CONFLICT_VERSIONS")).toBe(true)
+ })
+ })
+
+ // RECONNECT AFTER OFFLINE
+ // The 4 permutations: nothing changed, local only, remote only, both changed
+
+ describe("Reconnect After Offline", () => {
+ it("no-op when nothing changed on either side", () => {
+ const state = snapshotProcessingState()
+ const result = transition(state, {
+ type: "CONFLICTS_DETECTED",
+ conflicts: [],
+ safeWrites: [],
+ localOnly: [],
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(result.effects.filter(e => e.type === "WRITE_FILES")).toHaveLength(0)
+ expect(result.effects.filter(e => e.type === "SEND_MESSAGE")).toHaveLength(0)
+ expect(result.effects.some(e => e.type === "PERSIST_STATE")).toBe(true)
+ })
+
+ it("auto-uploads when only local changed", () => {
+ // Local edited, remote unchanged since last sync → upload local without prompt
+ const conflict = {
+ fileName: "Test.tsx",
+ localContent: "edited locally",
+ remoteContent: "unchanged in framer",
+ lastSyncedAt: 5_000,
+ localClean: false, // local was modified
+ }
+ const state = conflictResolutionState([conflict])
+
+ const result = transition(state, {
+ type: "CONFLICT_VERSION_RESPONSE",
+ versions: [{ fileName: "Test.tsx", latestRemoteVersionMs: 5_000 }], // remote unchanged
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(result.effects.some(e => e.type === "SEND_LOCAL_CHANGE")).toBe(true)
+ expect(result.effects.some(e => e.type === "PERSIST_STATE")).toBe(true)
+ })
+
+ it("auto-downloads when only remote changed", () => {
+ // Remote edited, local unchanged since last sync → download remote without prompt
+ const conflict = {
+ fileName: "Test.tsx",
+ localContent: "unchanged locally",
+ remoteContent: "edited in framer",
+ lastSyncedAt: 5_000,
+ localClean: true, // local matches last sync
+ }
+ const state = conflictResolutionState([conflict])
+
+ const result = transition(state, {
+ type: "CONFLICT_VERSION_RESPONSE",
+ versions: [{ fileName: "Test.tsx", latestRemoteVersionMs: 10_000 }], // remote changed
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(result.effects.some(e => e.type === "WRITE_FILES")).toBe(true)
+ expect(result.effects.some(e => e.type === "PERSIST_STATE")).toBe(true)
+ })
+
+ it("shows conflict UI when both sides changed", () => {
+ // Both edited → must ask user which to keep
+ const syncTime = 5_000
+ const conflict = {
+ fileName: "Test.tsx",
+ localContent: "edited locally",
+ remoteContent: "edited in framer",
+ lastSyncedAt: syncTime,
+ localClean: false, // local was modified
+ }
+ const state = conflictResolutionState([conflict])
+
+ const result = transition(state, {
+ type: "CONFLICT_VERSION_RESPONSE",
+ // Remote changed well after sync (beyond drift threshold)
+ versions: [{ fileName: "Test.tsx", latestRemoteVersionMs: syncTime + DEFAULT_REMOTE_DRIFT_MS + 1000 }],
+ })
+
+ expect(result.state.mode).toBe("conflict_resolution")
+ expect(result.effects.some(e => e.type === "REQUEST_CONFLICT_DECISIONS")).toBe(true)
+ })
+ })
+
+ // LIVE EDITING
+ // Real-time sync during active editing session
+
+ describe("Live Editing", () => {
+ it("pushes local saves to Framer", () => {
+ const state = watchingState()
+ const result = transition(state, {
+ type: "WATCHER_EVENT",
+ event: {
+ kind: "change",
+ relativePath: "Button.tsx",
+ content: "export const Button = () => ",
+ },
+ })
+
+ expect(result.effects.some(e => e.type === "SEND_LOCAL_CHANGE")).toBe(true)
+ const effect = result.effects.find(e => e.type === "SEND_LOCAL_CHANGE")
+ expect(effect).toMatchObject({ fileName: "Button.tsx" })
+ })
+
+ it("pulls Framer edits to disk", () => {
+ const state = watchingState()
+ const result = transition(state, {
+ type: "REMOTE_FILE_CHANGE",
+ file: { name: "Button.tsx", content: "updated from framer", modifiedAt: Date.now() },
+ })
+
+ expect(result.effects.some(e => e.type === "WRITE_FILES")).toBe(true)
+ })
+
+ it("ignores changes during initial sync", () => {
+ // Changes arriving during snapshot processing are ignored - snapshot handles reconciliation
+ const state = snapshotProcessingState()
+ const result = transition(state, {
+ type: "REMOTE_FILE_CHANGE",
+ file: { name: "Button.tsx", content: "late arrival", modifiedAt: Date.now() },
+ })
+
+ expect(result.state.pendingRemoteChanges).toHaveLength(0)
+ expect(result.effects.some(e => e.type === "WRITE_FILES")).toBe(false)
+ expect(result.effects.some(e => e.type === "LOG")).toBe(true)
+ })
+
+ it("creates new local file and uploads to Framer", () => {
+ const state = watchingState()
+ const result = transition(state, {
+ type: "WATCHER_EVENT",
+ event: {
+ kind: "add",
+ relativePath: "NewComponent.tsx",
+ content: "export const New = () => New
",
+ },
+ })
+
+ expect(result.effects.some(e => e.type === "SEND_LOCAL_CHANGE")).toBe(true)
+ })
+ })
+
+ // FOLDER STRUCTURES
+ // Nested paths like components/Button.tsx
+
+ describe("Folder Structures", () => {
+ it("downloads files to nested paths", () => {
+ const state = snapshotProcessingState()
+ const result = transition(state, {
+ type: "CONFLICTS_DETECTED",
+ conflicts: [],
+ safeWrites: [
+ {
+ name: "components/Button.tsx",
+ content: "export const Button = () => ",
+ modifiedAt: Date.now(),
+ },
+ ],
+ localOnly: [],
+ })
+
+ expect(result.state.mode).toBe("watching")
+ const writeEffect = result.effects.find(e => e.type === "WRITE_FILES")
+ expect(writeEffect).toMatchObject({
+ files: [{ name: "components/Button.tsx" }],
+ })
+ })
+
+ it("uploads local files from subdirectories", () => {
+ const state = snapshotProcessingState()
+ const result = transition(state, {
+ type: "CONFLICTS_DETECTED",
+ conflicts: [],
+ safeWrites: [],
+ localOnly: [
+ {
+ name: "hooks/useAuth.ts",
+ content: "export function useAuth() {}",
+ modifiedAt: Date.now(),
+ },
+ ],
+ })
+
+ expect(result.state.mode).toBe("watching")
+ const sendEffects = result.effects.filter(e => e.type === "SEND_MESSAGE")
+ expect(
+ sendEffects.some(
+ e =>
+ "payload" in e &&
+ (e as { payload: { fileName?: string } }).payload.fileName === "hooks/useAuth.ts"
+ )
+ ).toBe(true)
+ })
+
+ it("handles watcher events for nested paths", () => {
+ const state = watchingState()
+ const result = transition(state, {
+ type: "WATCHER_EVENT",
+ event: {
+ kind: "change",
+ relativePath: "components/ui/Card.tsx",
+ content: "export const Card = () => ",
+ },
+ })
+
+ const effect = result.effects.find(e => e.type === "SEND_LOCAL_CHANGE")
+ expect(effect).toMatchObject({ fileName: "components/ui/Card.tsx" })
+ })
+
+ it("handles remote changes to nested paths", () => {
+ const state = watchingState()
+ const result = transition(state, {
+ type: "REMOTE_FILE_CHANGE",
+ file: {
+ name: "lib/utils/format.ts",
+ content: "export function format() {}",
+ modifiedAt: Date.now(),
+ },
+ })
+
+ expect(result.effects.some(e => e.type === "WRITE_FILES")).toBe(true)
+ })
+
+ it("handles deletions of nested files", () => {
+ const state = watchingState()
+ const result = transition(state, {
+ type: "REMOTE_FILE_DELETE",
+ fileName: "components/deprecated/OldButton.tsx",
+ })
+
+ const effect = result.effects.find(e => e.type === "DELETE_LOCAL_FILES")
+ expect(effect).toMatchObject({ names: ["components/deprecated/OldButton.tsx"] })
+ })
+ })
+
+ // DELETION HANDLING
+ // Asymmetric by design: Framer is source of truth
+
+ describe("Deletion Handling", () => {
+ it("auto-applies Framer deletions locally", () => {
+ // Framer delete → immediately delete local (Local likely has version control, undos are easier)
+ const state = watchingState()
+ const result = transition(state, {
+ type: "REMOTE_FILE_DELETE",
+ fileName: "Removed.tsx",
+ })
+
+ expect(result.effects.some(e => e.type === "DELETE_LOCAL_FILES")).toBe(true)
+ expect(result.effects.some(e => e.type === "PERSIST_STATE")).toBe(true)
+ const effect = result.effects.find(e => e.type === "DELETE_LOCAL_FILES")
+ expect(effect).toMatchObject({ names: ["Removed.tsx"] })
+ })
+
+ it("prompts before propagating local deletes to Framer", () => {
+ // Local delete → ask user "Delete from Framer too?" (Must confirm as deletions in Framer as permanent)
+ const state = watchingState()
+ const result = transition(state, {
+ type: "WATCHER_EVENT",
+ event: { kind: "delete", relativePath: "Deleted.tsx" },
+ })
+
+ expect(result.effects.some(e => e.type === "LOCAL_INITIATED_FILE_DELETE")).toBe(true)
+ // Should NOT immediately send delete to Framer
+ expect(
+ result.effects.some(
+ e =>
+ e.type === "SEND_MESSAGE" &&
+ "payload" in e &&
+ (e as { payload: { type: string } }).payload.type === "file-delete"
+ )
+ ).toBe(false)
+ })
+
+ it("deletes from Framer after user confirms", () => {
+ const state = watchingState()
+ const result = transition(state, {
+ type: "LOCAL_DELETE_APPROVED",
+ fileName: "Deleted.tsx",
+ })
+
+ expect(result.effects.some(e => e.type === "DELETE_LOCAL_FILES")).toBe(true)
+ expect(result.effects.some(e => e.type === "PERSIST_STATE")).toBe(true)
+ })
+
+ it("restores file when user cancels local delete", () => {
+ const state = watchingState()
+ const result = transition(state, {
+ type: "LOCAL_DELETE_REJECTED",
+ fileName: "Restored.tsx",
+ content: "export const Restored = () => Back!
",
+ })
+
+ expect(result.effects.some(e => e.type === "WRITE_FILES")).toBe(true)
+ const effect = result.effects.find(e => e.type === "WRITE_FILES")
+ expect(effect).toMatchObject({ files: [{ name: "Restored.tsx" }] })
+ })
+ })
+
+ // CONFLICT RESOLUTION
+ // User picks which version to keep for all conflicts
+
+ describe("Conflict Resolution", () => {
+ it("applies all Framer versions when user picks remote", () => {
+ const state = conflictResolutionState([
+ {
+ fileName: "A.tsx",
+ localContent: "local A",
+ remoteContent: "framer A",
+ localModifiedAt: Date.now(),
+ remoteModifiedAt: Date.now(),
+ },
+ {
+ fileName: "B.tsx",
+ localContent: "local B",
+ remoteContent: "framer B",
+ localModifiedAt: Date.now(),
+ remoteModifiedAt: Date.now(),
+ },
+ ])
+
+ const result = transition(state, { type: "CONFLICTS_RESOLVED", resolution: "remote" })
+
+ expect(result.state.mode).toBe("watching")
+ expect(result.effects.some(e => e.type === "PERSIST_STATE")).toBe(true)
+ const writes = result.effects.filter(e => e.type === "WRITE_FILES")
+ expect(writes).toHaveLength(2)
+ })
+
+ it("uploads all local versions when user picks local", () => {
+ const state = conflictResolutionState([
+ {
+ fileName: "A.tsx",
+ localContent: "local A",
+ remoteContent: "framer A",
+ localModifiedAt: Date.now(),
+ remoteModifiedAt: Date.now(),
+ },
+ {
+ fileName: "B.tsx",
+ localContent: "local B",
+ remoteContent: "framer B",
+ localModifiedAt: Date.now(),
+ remoteModifiedAt: Date.now(),
+ },
+ ])
+
+ const result = transition(state, { type: "CONFLICTS_RESOLVED", resolution: "local" })
+
+ expect(result.state.mode).toBe("watching")
+ const sends = result.effects.filter(e => e.type === "SEND_MESSAGE")
+ expect(sends).toHaveLength(2)
+ })
+
+ it("handles deletions within conflicts - remote deleted, local has content", () => {
+ const state = conflictResolutionState([
+ { fileName: "Deleted.tsx", localContent: "still here locally", remoteContent: null, localClean: false },
+ ])
+
+ const result = transition(state, { type: "CONFLICTS_RESOLVED", resolution: "remote" })
+
+ // Remote is null → delete locally
+ expect(result.effects.some(e => e.type === "DELETE_LOCAL_FILES")).toBe(true)
+ })
+
+ it("handles deletion within conflicts - local deleted, remote has content", () => {
+ const state = conflictResolutionState([
+ { fileName: "Deleted.tsx", localContent: null, remoteContent: "still in framer" },
+ ])
+
+ const result = transition(state, { type: "CONFLICTS_RESOLVED", resolution: "local" })
+
+ // Local is null → request delete confirmation (don't auto-delete from Framer)
+ expect(result.effects.some(e => e.type === "LOCAL_INITIATED_FILE_DELETE")).toBe(true)
+ })
+ })
+
+ // ECHO PREVENTION
+ // Avoid infinite sync loops by tracking what we just sent/received
+
+ describe("Echo Prevention", () => {
+ it("skips inbound changes matching last outbound", () => {
+ const tracker = createHashTracker()
+ tracker.remember("Button.tsx", "content we just sent")
+
+ const filtered = filterEchoedFiles(
+ [{ name: "Button.tsx", content: "content we just sent", modifiedAt: Date.now() }],
+ tracker
+ )
+
+ expect(filtered).toHaveLength(0)
+ })
+
+ it("applies inbound changes with different content", () => {
+ const tracker = createHashTracker()
+ tracker.remember("Button.tsx", "old content")
+
+ const filtered = filterEchoedFiles(
+ [{ name: "Button.tsx", content: "new content from framer", modifiedAt: Date.now() }],
+ tracker
+ )
+
+ expect(filtered).toHaveLength(1)
+ })
+ })
+
+ // EDGE CASES
+ // Robustness: wrong modes, disconnected states, case sensitivity
+
+ describe("Edge Cases", () => {
+ it("ignores local changes when disconnected", () => {
+ const state = disconnectedState()
+ const result = transition(state, {
+ type: "WATCHER_EVENT",
+ event: { kind: "change", relativePath: "Test.tsx", content: "content" },
+ })
+
+ expect(result.effects.some(e => e.type === "SEND_LOCAL_CHANGE")).toBe(false)
+ })
+
+ it("ignores local changes during handshake", () => {
+ const state = handshakingState()
+ const result = transition(state, {
+ type: "WATCHER_EVENT",
+ event: { kind: "change", relativePath: "Test.tsx", content: "content" },
+ })
+
+ expect(result.effects.some(e => e.type === "SEND_LOCAL_CHANGE")).toBe(false)
+ })
+
+ it("rejects remote deletions while disconnected", () => {
+ const state = disconnectedState()
+ const result = transition(state, {
+ type: "REMOTE_FILE_DELETE",
+ fileName: "Test.tsx",
+ })
+
+ expect(result.effects.some(e => e.type === "DELETE_LOCAL_FILES")).toBe(false)
+ })
+
+ it("ignores conflict resolution when not in conflict mode", () => {
+ const state = watchingState()
+ const result = transition(state, {
+ type: "CONFLICTS_RESOLVED",
+ resolution: "remote",
+ })
+
+ expect(result.effects.some(e => e.type === "LOG" && e.level === "warn")).toBe(true)
+ })
+
+ it("persists state on disconnect", () => {
+ const state = watchingState()
+ const result = transition(state, { type: "DISCONNECT" })
+
+ expect(result.state.mode).toBe("disconnected")
+ expect(result.effects.some(e => e.type === "PERSIST_STATE")).toBe(true)
+ })
+ })
+
+ // CONNECTION LIFECYCLE
+ // Handshake flow and state transitions
+
+ describe("Connection Lifecycle", () => {
+ it("transitions disconnected → handshaking on connect", () => {
+ const state = disconnectedState()
+ const result = transition(state, {
+ type: "HANDSHAKE",
+ socket: mockSocket,
+ projectInfo: { projectId: "abc123", projectName: "My Project" },
+ })
+
+ expect(result.state.mode).toBe("handshaking")
+ expect(result.effects.some(e => e.type === "INIT_WORKSPACE")).toBe(true)
+ expect(result.effects.some(e => e.type === "SEND_MESSAGE")).toBe(true)
+ })
+
+ it("requests file list after handshake", () => {
+ const state = disconnectedState()
+ const result = transition(state, {
+ type: "HANDSHAKE",
+ socket: mockSocket,
+ projectInfo: { projectId: "abc123", projectName: "My Project" },
+ })
+
+ const sendEffect = result.effects.find(e => e.type === "SEND_MESSAGE")
+ expect(sendEffect).toMatchObject({ payload: { type: "request-files" } })
+ })
+
+ it("transitions handshaking → snapshot_processing on file list", () => {
+ const state = handshakingState()
+ const result = transition(state, {
+ type: "REMOTE_FILE_LIST",
+ files: [{ name: "Test.tsx", content: "content", modifiedAt: Date.now() }],
+ })
+
+ expect(result.state.mode).toBe("snapshot_processing")
+ expect(result.effects.some(e => e.type === "DETECT_CONFLICTS")).toBe(true)
+ })
+ })
+})
diff --git a/packages/code-link-cli/src/controller.ts b/packages/code-link-cli/src/controller.ts
new file mode 100644
index 000000000..03eac1002
--- /dev/null
+++ b/packages/code-link-cli/src/controller.ts
@@ -0,0 +1,1239 @@
+/**
+ * CLI Controller
+ *
+ * All runtime state and orchestration of the sync lifecycle.
+ * Helpers should provide data and never hold control.
+ */
+
+import type { CliToPluginMessage, PluginToCliMessage } from "@code-link/shared"
+import { pluralize, shortProjectHash } from "@code-link/shared"
+import fs from "fs/promises"
+import type { WebSocket } from "ws"
+import { initConnection, sendMessage } from "./helpers/connection.ts"
+import {
+ autoResolveConflicts,
+ deleteLocalFile,
+ detectConflicts,
+ filterEchoedFiles,
+ listFiles,
+ readFileSafe,
+ writeRemoteFiles,
+} from "./helpers/files.ts"
+import { Installer } from "./helpers/installer.ts"
+import { PluginUserPromptCoordinator } from "./helpers/plugin-prompts.ts"
+import { validateIncomingChange } from "./helpers/sync-validator.ts"
+import { initWatcher } from "./helpers/watcher.ts"
+import type { Config, Conflict, ConflictVersionData, FileInfo, WatcherEvent } from "./types.ts"
+import { FileMetadataCache, type FileSyncMetadata } from "./utils/file-metadata-cache.ts"
+import { createHashTracker } from "./utils/hash-tracker.ts"
+import {
+ cancelDisconnectMessage,
+ debug,
+ didShowDisconnect,
+ error,
+ fileDelete,
+ fileDown,
+ fileUp,
+ info,
+ resetDisconnectState,
+ scheduleDisconnectMessage,
+ status,
+ success,
+ warn,
+ wasRecentlyDisconnected,
+} from "./utils/logging.ts"
+import { findOrCreateProjectDir } from "./utils/project.ts"
+import { hashFileContent } from "./utils/state-persistence.ts"
+
+/**
+ * Explicit sync lifecycle modes
+ */
+export type SyncMode = "disconnected" | "handshaking" | "snapshot_processing" | "conflict_resolution" | "watching"
+
+/**
+ * Shared state that persists across all lifecycle modes
+ */
+interface SyncStateBase {
+ pendingRemoteChanges: FileInfo[]
+}
+
+type DisconnectedState = SyncStateBase & {
+ mode: "disconnected"
+ socket: null
+}
+
+type HandshakingState = SyncStateBase & {
+ mode: "handshaking"
+ socket: WebSocket
+}
+
+type SnapshotProcessingState = SyncStateBase & {
+ mode: "snapshot_processing"
+ socket: WebSocket
+}
+
+type ConflictResolutionState = SyncStateBase & {
+ mode: "conflict_resolution"
+ socket: WebSocket
+ pendingConflicts: Conflict[]
+}
+
+type WatchingState = SyncStateBase & {
+ mode: "watching"
+ socket: WebSocket
+}
+
+export type SyncState =
+ | DisconnectedState
+ | HandshakingState
+ | SnapshotProcessingState
+ | ConflictResolutionState
+ | WatchingState
+
+/**
+ * Events that drive state transitions
+ */
+type SyncEvent =
+ | {
+ type: "HANDSHAKE"
+ socket: WebSocket
+ projectInfo: { projectId: string; projectName: string }
+ }
+ | { type: "REQUEST_FILES" }
+ | { type: "REMOTE_FILE_LIST"; files: FileInfo[] }
+ | {
+ type: "CONFLICTS_DETECTED"
+ conflicts: Conflict[]
+ safeWrites: FileInfo[]
+ localOnly: FileInfo[]
+ }
+ | { type: "REMOTE_FILE_CHANGE"; file: FileInfo; fileMeta?: FileSyncMetadata }
+ | { type: "REMOTE_FILE_DELETE"; fileName: string }
+ | { type: "LOCAL_DELETE_APPROVED"; fileName: string }
+ | { type: "LOCAL_DELETE_REJECTED"; fileName: string; content: string }
+ | {
+ type: "CONFLICTS_RESOLVED"
+ resolution: "local" | "remote"
+ }
+ | {
+ type: "FILE_SYNCED_CONFIRMATION"
+ fileName: string
+ remoteModifiedAt: number
+ }
+ | { type: "DISCONNECT" }
+ | { type: "WATCHER_EVENT"; event: WatcherEvent }
+ | {
+ type: "CONFLICT_VERSION_RESPONSE"
+ versions: ConflictVersionData[]
+ }
+
+/**
+ * Side effects emitted by transitions
+ */
+type Effect =
+ | {
+ type: "INIT_WORKSPACE"
+ projectInfo: { projectId: string; projectName: string }
+ }
+ | { type: "LOAD_PERSISTED_STATE" }
+ | { type: "SEND_MESSAGE"; payload: CliToPluginMessage }
+ | { type: "LIST_LOCAL_FILES" }
+ | { type: "DETECT_CONFLICTS"; remoteFiles: FileInfo[] }
+ | {
+ type: "WRITE_FILES"
+ files: FileInfo[]
+ silent?: boolean
+ skipEcho?: boolean
+ }
+ | { type: "DELETE_LOCAL_FILES"; names: string[] }
+ | { type: "REQUEST_CONFLICT_DECISIONS"; conflicts: Conflict[] }
+ | { type: "REQUEST_CONFLICT_VERSIONS"; conflicts: Conflict[] }
+ | {
+ type: "UPDATE_FILE_METADATA"
+ fileName: string
+ remoteModifiedAt: number
+ }
+ | {
+ type: "SEND_LOCAL_CHANGE"
+ fileName: string
+ content: string
+ }
+ | {
+ type: "LOCAL_INITIATED_FILE_DELETE"
+ fileNames: string[]
+ }
+ | { type: "PERSIST_STATE" }
+ | {
+ type: "SYNC_COMPLETE"
+ totalCount: number
+ updatedCount: number
+ unchangedCount: number
+ }
+ | {
+ type: "LOG"
+ level: "info" | "debug" | "warn" | "success"
+ message: string
+ }
+
+/** Log helper */
+function log(level: "info" | "debug" | "warn" | "success", message: string): Effect {
+ return { type: "LOG", level, message }
+}
+
+/**
+ * Pure state transition function
+ * Takes current state + event, returns new state + effects to execute
+ */
+function transition(state: SyncState, event: SyncEvent): { state: SyncState; effects: Effect[] } {
+ const effects: Effect[] = []
+
+ switch (event.type) {
+ case "HANDSHAKE": {
+ if (state.mode !== "disconnected") {
+ effects.push(log("warn", `Received HANDSHAKE in mode ${state.mode}, ignoring`))
+ return { state, effects }
+ }
+
+ effects.push(
+ { type: "INIT_WORKSPACE", projectInfo: event.projectInfo },
+ { type: "LOAD_PERSISTED_STATE" },
+ { type: "SEND_MESSAGE", payload: { type: "request-files" } }
+ )
+
+ return {
+ state: {
+ ...state,
+ mode: "handshaking",
+ socket: event.socket,
+ },
+ effects,
+ }
+ }
+
+ case "FILE_SYNCED_CONFIRMATION": {
+ // Remote confirms they received our local change
+ effects.push(log("debug", `Remote confirmed sync: ${event.fileName}`), {
+ type: "UPDATE_FILE_METADATA",
+ fileName: event.fileName,
+ remoteModifiedAt: event.remoteModifiedAt,
+ })
+
+ return { state, effects }
+ }
+
+ case "DISCONNECT": {
+ effects.push({ type: "PERSIST_STATE" }, log("debug", "Disconnected, persisting state"))
+
+ if (state.mode === "conflict_resolution") {
+ const { pendingConflicts: _discarded, ...rest } = state
+ return {
+ state: {
+ ...rest,
+ mode: "disconnected",
+ socket: null,
+ },
+ effects,
+ }
+ }
+
+ return {
+ state: {
+ ...state,
+ mode: "disconnected",
+ socket: null,
+ },
+ effects,
+ }
+ }
+
+ case "REQUEST_FILES": {
+ // Plugin is asking for our local file list
+ // Valid in any mode except disconnected
+ if (state.mode === "disconnected") {
+ effects.push(log("warn", "Received REQUEST_FILES while disconnected, ignoring"))
+ return { state, effects }
+ }
+
+ effects.push(log("debug", "Plugin requested file list"), {
+ type: "LIST_LOCAL_FILES",
+ })
+
+ return { state, effects }
+ }
+
+ case "REMOTE_FILE_LIST": {
+ if (state.mode !== "handshaking") {
+ effects.push(log("warn", `Received REMOTE_FILE_LIST in mode ${state.mode}, ignoring`))
+ return { state, effects }
+ }
+
+ effects.push(log("debug", `Received file list: ${event.files.length} files`))
+
+ // During initial file list, detect conflicts between remote snapshot and local files
+ effects.push({
+ type: "DETECT_CONFLICTS",
+ remoteFiles: event.files,
+ })
+
+ // Transition to snapshot_processing - conflict detection effect will determine next mode
+ return {
+ state: {
+ ...state,
+ mode: "snapshot_processing",
+ pendingRemoteChanges: event.files,
+ },
+ effects,
+ }
+ }
+
+ case "CONFLICTS_DETECTED": {
+ if (state.mode !== "snapshot_processing") {
+ effects.push(log("warn", `Received CONFLICTS_DETECTED in mode ${state.mode}, ignoring`))
+ return { state, effects }
+ }
+
+ const { conflicts, safeWrites, localOnly } = event
+
+ // detectConflicts returns:
+ // - safeWrites = files we can apply (remote-only or local unchanged)
+ // - conflicts = files that need manual resolution (content or deletion conflicts)
+ // - localOnly = files to upload
+ // (unchanged files have metadata recorded in DETECT_CONFLICTS executor)
+
+ // Apply safe writes
+ if (safeWrites.length > 0) {
+ effects.push(log("debug", `Applying ${safeWrites.length} safe writes`), {
+ type: "WRITE_FILES",
+ files: safeWrites,
+ silent: true,
+ })
+ }
+
+ // Upload local-only files
+ if (localOnly.length > 0) {
+ effects.push(log("debug", `Uploading ${localOnly.length} local-only files`))
+ for (const file of localOnly) {
+ effects.push({
+ type: "SEND_MESSAGE",
+ payload: {
+ type: "file-change",
+ fileName: file.name,
+ content: file.content,
+ },
+ })
+ }
+ }
+
+ // If conflicts remain, request remote version data before surfacing to user
+ if (conflicts.length > 0) {
+ effects.push(log("debug", `${pluralize(conflicts.length, "conflict")} require version check`), {
+ type: "REQUEST_CONFLICT_VERSIONS",
+ conflicts,
+ })
+
+ return {
+ state: {
+ ...state,
+ mode: "conflict_resolution",
+ pendingConflicts: conflicts,
+ },
+ effects,
+ }
+ }
+
+ // No conflicts - transition to watching
+ const remoteTotal = state.pendingRemoteChanges.length
+ const totalCount = remoteTotal + localOnly.length
+ const updatedCount = safeWrites.length + localOnly.length
+ const unchangedCount = Math.max(0, remoteTotal - safeWrites.length)
+ effects.push(
+ { type: "PERSIST_STATE" },
+ {
+ type: "SYNC_COMPLETE",
+ totalCount,
+ updatedCount,
+ unchangedCount,
+ }
+ )
+
+ return {
+ state: {
+ ...state,
+ mode: "watching",
+ pendingRemoteChanges: [],
+ },
+ effects,
+ }
+ }
+
+ case "REMOTE_FILE_CHANGE": {
+ // Use helper to validate the incoming change
+ const validation = validateIncomingChange(event.fileMeta, state.mode)
+
+ if (validation.action === "queue") {
+ // Changes during initial sync are ignored - the snapshot handles reconciliation
+ effects.push(log("debug", `Ignoring file change during sync: ${event.file.name}`))
+ return { state, effects }
+ }
+
+ if (validation.action === "reject") {
+ effects.push(log("warn", `Rejected file change: ${event.file.name} (${validation.reason})`))
+ return { state, effects }
+ }
+
+ // Apply the change
+ effects.push(log("debug", `Applying remote change: ${event.file.name}`), {
+ type: "WRITE_FILES",
+ files: [event.file],
+ skipEcho: true,
+ })
+
+ return { state, effects }
+ }
+
+ case "REMOTE_FILE_DELETE": {
+ // Reject if not connected
+ if (state.mode === "disconnected") {
+ effects.push(log("warn", `Rejected delete while disconnected: ${event.fileName}`))
+ return { state, effects }
+ }
+
+ // Remote deletes should always be applied immediately
+ // (the file is already gone from Framer)
+ effects.push(
+ log("debug", `Remote delete applied: ${event.fileName}`),
+ { type: "DELETE_LOCAL_FILES", names: [event.fileName] },
+ { type: "PERSIST_STATE" }
+ )
+
+ return { state, effects }
+ }
+
+ case "LOCAL_DELETE_APPROVED": {
+ // User confirmed the delete - apply it
+ effects.push(
+ log("debug", `Delete confirmed: ${event.fileName}`),
+ { type: "DELETE_LOCAL_FILES", names: [event.fileName] },
+ { type: "PERSIST_STATE" }
+ )
+
+ return { state, effects }
+ }
+
+ case "LOCAL_DELETE_REJECTED": {
+ // User cancelled - restore the file
+ effects.push(log("debug", `Delete cancelled: ${event.fileName}`))
+ effects.push({
+ type: "WRITE_FILES",
+ files: [
+ {
+ name: event.fileName,
+ content: event.content,
+ modifiedAt: Date.now(),
+ },
+ ],
+ })
+
+ return { state, effects }
+ }
+
+ case "CONFLICTS_RESOLVED": {
+ // Only valid in conflict_resolution mode
+ if (state.mode !== "conflict_resolution") {
+ effects.push(log("warn", `Received CONFLICTS_RESOLVED in mode ${state.mode}, ignoring`))
+ return { state, effects }
+ }
+
+ // User picked one resolution for ALL conflicts
+ if (event.resolution === "remote") {
+ // Apply all remote versions (or delete locally if remote is null)
+ for (const conflict of state.pendingConflicts) {
+ if (conflict.remoteContent === null) {
+ // Remote deleted this file - delete locally
+ effects.push({
+ type: "DELETE_LOCAL_FILES",
+ names: [conflict.fileName],
+ })
+ } else {
+ effects.push({
+ type: "WRITE_FILES",
+ files: [
+ {
+ name: conflict.fileName,
+ content: conflict.remoteContent,
+ modifiedAt: conflict.remoteModifiedAt,
+ },
+ ],
+ silent: true,
+ })
+ }
+ }
+ effects.push(log("success", "Keeping Framer changes"))
+ } else {
+ // Send all local versions (or request delete confirmation if local is null)
+ const localDeletes: string[] = []
+ for (const conflict of state.pendingConflicts) {
+ if (conflict.localContent === null) {
+ localDeletes.push(conflict.fileName)
+ } else {
+ effects.push({
+ type: "SEND_MESSAGE",
+ payload: {
+ type: "file-change",
+ fileName: conflict.fileName,
+ content: conflict.localContent,
+ },
+ })
+ }
+ }
+ // Batch local deletes into single confirmation prompt
+ if (localDeletes.length > 0) {
+ effects.push({
+ type: "LOCAL_INITIATED_FILE_DELETE",
+ fileNames: localDeletes,
+ })
+ }
+ effects.push(log("success", "Keeping local changes"))
+ }
+
+ // All conflicts resolved - transition to watching
+ effects.push(
+ { type: "PERSIST_STATE" },
+ {
+ type: "SYNC_COMPLETE",
+ totalCount: state.pendingConflicts.length,
+ updatedCount: state.pendingConflicts.length,
+ unchangedCount: 0,
+ }
+ )
+
+ const { pendingConflicts: _discarded, ...rest } = state
+ return {
+ state: {
+ ...rest,
+ mode: "watching",
+ },
+ effects,
+ }
+ }
+
+ case "WATCHER_EVENT": {
+ // Local file system change detected
+ const { kind, relativePath, content } = event.event
+
+ // Only process changes in watching mode
+ if (state.mode !== "watching") {
+ effects.push(log("debug", `Ignoring watcher event in ${state.mode} mode: ${kind} ${relativePath}`))
+ return { state, effects }
+ }
+
+ switch (kind) {
+ case "add":
+ case "change": {
+ if (content === undefined) {
+ effects.push(log("warn", `Watcher event missing content: ${relativePath}`))
+ return { state, effects }
+ }
+
+ effects.push({
+ type: "SEND_LOCAL_CHANGE",
+ fileName: relativePath,
+ content,
+ })
+ break
+ }
+
+ case "delete": {
+ effects.push(log("debug", `Local delete detected: ${relativePath}`), {
+ type: "LOCAL_INITIATED_FILE_DELETE",
+ fileNames: [relativePath],
+ })
+ break
+ }
+ }
+
+ return { state, effects }
+ }
+
+ case "CONFLICT_VERSION_RESPONSE": {
+ if (state.mode !== "conflict_resolution") {
+ effects.push(log("warn", `Received CONFLICT_VERSION_RESPONSE in mode ${state.mode}, ignoring`))
+ return { state, effects }
+ }
+
+ const { autoResolvedLocal, autoResolvedRemote, remainingConflicts } = autoResolveConflicts(
+ state.pendingConflicts,
+ event.versions
+ )
+
+ if (autoResolvedLocal.length > 0) {
+ effects.push(log("debug", `Auto-resolved ${autoResolvedLocal.length} local changes`))
+ const localDeletes: string[] = []
+ for (const conflict of autoResolvedLocal) {
+ if (conflict.localContent === null) {
+ localDeletes.push(conflict.fileName)
+ } else {
+ effects.push({
+ type: "SEND_LOCAL_CHANGE",
+ fileName: conflict.fileName,
+ content: conflict.localContent,
+ })
+ }
+ }
+ // Batch local deletes into single confirmation prompt
+ if (localDeletes.length > 0) {
+ effects.push({
+ type: "LOCAL_INITIATED_FILE_DELETE",
+ fileNames: localDeletes,
+ })
+ }
+ }
+
+ if (autoResolvedRemote.length > 0) {
+ effects.push(log("debug", `Auto-resolved ${autoResolvedRemote.length} remote changes`))
+ for (const conflict of autoResolvedRemote) {
+ if (conflict.remoteContent === null) {
+ // Remote deleted - delete locally
+ effects.push({
+ type: "DELETE_LOCAL_FILES",
+ names: [conflict.fileName],
+ })
+ } else {
+ effects.push({
+ type: "WRITE_FILES",
+ files: [
+ {
+ name: conflict.fileName,
+ content: conflict.remoteContent,
+ modifiedAt: conflict.remoteModifiedAt ?? Date.now(),
+ },
+ ],
+ silent: true, // Auto-resolved during initial sync - no individual indicators
+ })
+ }
+ }
+ }
+
+ if (remainingConflicts.length > 0) {
+ effects.push(log("warn", `${pluralize(remainingConflicts.length, "conflict")} require resolution`), {
+ type: "REQUEST_CONFLICT_DECISIONS",
+ conflicts: remainingConflicts,
+ })
+
+ return {
+ state: {
+ ...state,
+ pendingConflicts: remainingConflicts,
+ },
+ effects,
+ }
+ }
+
+ const resolvedCount = autoResolvedLocal.length + autoResolvedRemote.length
+ effects.push(
+ { type: "PERSIST_STATE" },
+ {
+ type: "SYNC_COMPLETE",
+ totalCount: resolvedCount,
+ updatedCount: resolvedCount,
+ unchangedCount: 0,
+ }
+ )
+
+ const { pendingConflicts: _discarded, ...rest } = state
+ return {
+ state: {
+ ...rest,
+ mode: "watching",
+ pendingRemoteChanges: [],
+ },
+ effects,
+ }
+ }
+
+ default: {
+ effects.push(log("warn", `Unhandled event type in transition`))
+ return { state, effects }
+ }
+ }
+}
+
+/**
+ * Effect executor - interprets effects and calls helpers
+ * Returns additional events that should be processed (e.g., CONFLICTS_DETECTED after DETECT_CONFLICTS)
+ */
+async function executeEffect(
+ effect: Effect,
+ context: {
+ config: Config
+ hashTracker: ReturnType
+ installer: Installer | null
+ fileMetadataCache: FileMetadataCache
+ userActions: PluginUserPromptCoordinator
+ syncState: SyncState
+ }
+): Promise {
+ const { config, hashTracker, installer, fileMetadataCache, userActions, syncState } = context
+
+ switch (effect.type) {
+ case "INIT_WORKSPACE": {
+ // Initialize project directory if not already set
+ if (!config.projectDir) {
+ const projectName = config.explicitName ?? effect.projectInfo.projectName
+
+ config.projectDir = await findOrCreateProjectDir(config.projectHash, projectName, config.explicitDir)
+
+ // May allow customization of file directory in the future
+ config.filesDir = `${config.projectDir}/files`
+ debug(`Files directory: ${config.filesDir}`)
+ await fs.mkdir(config.filesDir, { recursive: true })
+ }
+ return []
+ }
+
+ case "LOAD_PERSISTED_STATE": {
+ if (config.projectDir) {
+ await fileMetadataCache.initialize(config.projectDir)
+ debug(`Loaded persisted metadata for ${fileMetadataCache.size()} files`)
+ }
+ return []
+ }
+
+ case "LIST_LOCAL_FILES": {
+ if (!config.filesDir) {
+ return []
+ }
+
+ // List all local files and send to plugin
+ const files = await listFiles(config.filesDir)
+
+ if (syncState.socket) {
+ await sendMessage(syncState.socket, {
+ type: "file-list",
+ files,
+ })
+ }
+
+ return []
+ }
+
+ case "DETECT_CONFLICTS": {
+ if (!config.filesDir) {
+ return []
+ }
+
+ // Use existing helper to detect conflicts
+ const { conflicts, writes, localOnly, unchanged } = await detectConflicts(
+ effect.remoteFiles,
+ config.filesDir,
+ { persistedState: fileMetadataCache.getPersistedState() }
+ )
+
+ // Record metadata for unchanged files so watcher add events get skipped
+ // (chokidar ignoreInitial=false fires late adds that would otherwise re-upload)
+ for (const file of unchanged) {
+ fileMetadataCache.recordRemoteWrite(file.name, file.content, file.modifiedAt ?? Date.now())
+ }
+
+ // Return CONFLICTS_DETECTED event to continue the flow
+ return [
+ {
+ type: "CONFLICTS_DETECTED",
+ conflicts,
+ safeWrites: writes,
+ localOnly,
+ },
+ ]
+ }
+
+ case "SEND_MESSAGE": {
+ if (syncState.socket) {
+ const sent = await sendMessage(syncState.socket, effect.payload)
+ if (!sent) {
+ warn(`Failed to send message: ${effect.payload.type}`)
+ }
+ } else {
+ warn(`No socket available to send: ${effect.payload.type}`)
+ }
+ return []
+ }
+
+ case "WRITE_FILES": {
+ if (config.filesDir) {
+ // skipEcho skip writes that match hashTracker (inbound echo)
+ // it is opt-in: some callers still need side-effects (metadata/logs)
+ // even when content matches the last hash tracked in-memory.
+ const filesToWrite =
+ effect.skipEcho === true ? filterEchoedFiles(effect.files, hashTracker) : effect.files
+
+ if (effect.skipEcho && filesToWrite.length !== effect.files.length) {
+ const skipped = effect.files.length - filesToWrite.length
+ debug(`Skipped ${pluralize(skipped, "echoed change")}`)
+ }
+
+ if (filesToWrite.length === 0) {
+ return []
+ }
+
+ await writeRemoteFiles(filesToWrite, config.filesDir, hashTracker, installer ?? undefined)
+ for (const file of filesToWrite) {
+ if (!effect.silent) {
+ fileDown(file.name)
+ }
+ const remoteTimestamp = file.modifiedAt ?? Date.now()
+ fileMetadataCache.recordRemoteWrite(file.name, file.content, remoteTimestamp)
+ }
+ }
+ return []
+ }
+
+ case "DELETE_LOCAL_FILES": {
+ if (config.filesDir) {
+ for (const fileName of effect.names) {
+ await deleteLocalFile(fileName, config.filesDir, hashTracker)
+ fileDelete(fileName)
+ fileMetadataCache.recordDelete(fileName)
+ }
+ }
+ return []
+ }
+
+ case "REQUEST_CONFLICT_DECISIONS": {
+ await userActions.requestConflictDecisions(syncState.socket, effect.conflicts)
+
+ return []
+ }
+
+ case "REQUEST_CONFLICT_VERSIONS": {
+ if (!syncState.socket) {
+ warn("Cannot request conflict versions without active socket")
+ return []
+ }
+
+ const persistedState = fileMetadataCache.getPersistedState()
+ const versionRequests = effect.conflicts.map(conflict => {
+ const persisted = persistedState.get(conflict.fileName)
+ return {
+ fileName: conflict.fileName,
+ lastSyncedAt: conflict.lastSyncedAt ?? persisted?.timestamp,
+ }
+ })
+
+ debug(`Requesting remote version data for ${pluralize(versionRequests.length, "file")}`)
+
+ await sendMessage(syncState.socket, {
+ type: "conflict-version-request",
+ conflicts: versionRequests,
+ })
+
+ return []
+ }
+
+ case "UPDATE_FILE_METADATA": {
+ if (!config.filesDir || !config.projectDir) {
+ return []
+ }
+
+ // Read current file content to compute hash
+ const currentContent = await readFileSafe(effect.fileName, config.filesDir)
+
+ if (currentContent !== null) {
+ const contentHash = hashFileContent(currentContent)
+ fileMetadataCache.recordSyncedSnapshot(effect.fileName, contentHash, effect.remoteModifiedAt)
+ }
+
+ return []
+ }
+
+ case "SEND_LOCAL_CHANGE": {
+ const contentHash = hashFileContent(effect.content)
+ const metadata = fileMetadataCache.get(effect.fileName)
+
+ // Skip if file matches last confirmed remote content
+ if (metadata?.lastSyncedHash === contentHash) {
+ debug(`Skipping local change for ${effect.fileName}: matches last synced content`)
+ return []
+ }
+
+ // Echo prevention: skip if we just wrote this exact content
+ if (hashTracker.shouldSkip(effect.fileName, effect.content)) {
+ return []
+ }
+
+ debug(`Local change detected: ${effect.fileName}`)
+
+ try {
+ // Send change to plugin
+ if (syncState.socket) {
+ await sendMessage(syncState.socket, {
+ type: "file-change",
+ fileName: effect.fileName,
+ content: effect.content,
+ })
+ fileUp(effect.fileName)
+ }
+
+ // Only remember hash after successful send (prevents re-sending on failure)
+ hashTracker.remember(effect.fileName, effect.content)
+
+ // Trigger type installer
+ if (installer) {
+ installer.process(effect.fileName, effect.content)
+ }
+ } catch (err) {
+ warn(`Failed to push ${effect.fileName}`)
+ }
+
+ return []
+ }
+
+ case "LOCAL_INITIATED_FILE_DELETE": {
+ // Echo prevention: filter out remote-initiated deletes
+ const filesToDelete = effect.fileNames.filter(fileName => {
+ const shouldSkip = hashTracker.shouldSkipDelete(fileName)
+ if (shouldSkip) {
+ hashTracker.clearDelete(fileName)
+ }
+ return !shouldSkip
+ })
+
+ if (filesToDelete.length === 0) {
+ return []
+ }
+
+ try {
+ const confirmedFiles = await userActions.requestDeleteDecision(syncState.socket, {
+ fileNames: filesToDelete,
+ requireConfirmation: !config.dangerouslyAutoDelete,
+ })
+
+ for (const fileName of confirmedFiles) {
+ hashTracker.forget(fileName)
+ fileMetadataCache.recordDelete(fileName)
+ }
+
+ if (confirmedFiles.length > 0 && syncState.socket) {
+ await sendMessage(syncState.socket, {
+ type: "file-delete",
+ fileNames: confirmedFiles,
+ })
+ }
+ } catch (err) {
+ console.warn(`Failed to handle deletion for ${filesToDelete.join(", ")}:`, err)
+ }
+
+ return []
+ }
+
+ case "PERSIST_STATE": {
+ await fileMetadataCache.flush()
+ return []
+ }
+
+ case "SYNC_COMPLETE": {
+ const wasDisconnected = wasRecentlyDisconnected()
+
+ // Notify plugin that sync is complete
+ if (syncState.socket) {
+ await sendMessage(syncState.socket, { type: "sync-complete" })
+ }
+
+ if (wasDisconnected) {
+ // Only show reconnect message if we actually showed the disconnect notice
+ if (didShowDisconnect()) {
+ success(
+ `Reconnected, synced ${effect.totalCount} files (${effect.updatedCount} updated, ${effect.unchangedCount} unchanged)`
+ )
+ status("Watching for changes...")
+ }
+ resetDisconnectState()
+ return []
+ }
+
+ success(
+ `Synced ${effect.totalCount} files (${effect.updatedCount} updated, ${effect.unchangedCount} unchanged)`
+ )
+ status("Watching for changes...")
+ return []
+ }
+
+ case "LOG": {
+ const logFns = { info, warn, success, debug }
+ const logFn = logFns[effect.level]
+ logFn(effect.message)
+ return []
+ }
+ }
+}
+
+/**
+ * Starts the sync controller with the given configuration
+ */
+export async function start(config: Config): Promise {
+ status("Waiting for Plugin connection...")
+
+ const hashTracker = createHashTracker()
+ const fileMetadataCache = new FileMetadataCache()
+ let installer: Installer | null = null
+
+ // State machine state
+ let syncState: SyncState = {
+ mode: "disconnected",
+ socket: null,
+ pendingRemoteChanges: [],
+ }
+
+ const userActions = new PluginUserPromptCoordinator()
+
+ // State Machine Helper
+ // Process events through state machine and execute effects recursively
+ async function processEvent(event: SyncEvent) {
+ const socketState = syncState.socket?.readyState
+ debug(`[STATE] Processing event: ${event.type} (mode: ${syncState.mode}, socket: ${socketState ?? "none"})`)
+
+ const result = transition(syncState, event)
+ syncState = result.state
+
+ if (result.effects.length > 0) {
+ debug(
+ `[STATE] Event produced ${result.effects.length} effects: ${result.effects.map(e => e.type).join(", ")}`
+ )
+ }
+
+ // Execute all effects and process any follow-up events
+ for (const effect of result.effects) {
+ // Check socket state before each effect
+ const currentSocketState = syncState.socket?.readyState
+ if (currentSocketState !== undefined && currentSocketState !== 1) {
+ debug(`[STATE] Socket not open (state: ${currentSocketState}) before executing ${effect.type}`)
+ }
+
+ const followUpEvents = await executeEffect(effect, {
+ config,
+ hashTracker,
+ installer,
+ fileMetadataCache,
+ userActions,
+ syncState,
+ })
+
+ // Recursively process follow-up events
+ for (const followUpEvent of followUpEvents) {
+ await processEvent(followUpEvent)
+ }
+ }
+ }
+
+ // WebSocket Connection
+ const connection = await initConnection(config.port)
+
+ // Handle initial handshake
+ connection.on("handshake", (client: WebSocket, message) => {
+ debug(`Received handshake: ${message.projectName} (${message.projectId})`)
+
+ // Validate project hash (normalize both to short hash for comparison)
+ const expectedShort = shortProjectHash(config.projectHash)
+ const receivedShort = shortProjectHash(message.projectId)
+ if (receivedShort !== expectedShort) {
+ warn(`Project ID mismatch: expected ${expectedShort}, got ${receivedShort}`)
+ client.close()
+ return
+ }
+
+ void (async () => {
+ // Process handshake through state machine
+ await processEvent({
+ type: "HANDSHAKE",
+ socket: client,
+ projectInfo: {
+ projectId: message.projectId,
+ projectName: message.projectName,
+ },
+ })
+
+ // Initialize installer if needed
+ if (config.projectDir && !installer) {
+ installer = new Installer({
+ projectDir: config.projectDir,
+ allowUnsupportedNpm: config.allowUnsupportedNpm,
+ })
+ await installer.initialize()
+ // Start file watcher now that we have a directory
+ startWatcher()
+ }
+
+ // Cancel any pending disconnect message (fast reconnect)
+ cancelDisconnectMessage()
+
+ // Only show "Connected" on initial connection, not reconnects
+ // Reconnect confirmation happens in SYNC_COMPLETE
+ const wasDisconnected = wasRecentlyDisconnected()
+ if (!wasDisconnected && !didShowDisconnect()) {
+ success(`Connected to ${message.projectName}`)
+ }
+ })()
+ })
+
+ // Message Handler
+ async function handleMessage(message: PluginToCliMessage) {
+ // Ensure project is initialized before handling messages
+ if (!config.projectDir || !installer) {
+ warn("Received message before handshake completed - ignoring")
+ return
+ }
+
+ let event: SyncEvent | null = null
+
+ // Map incoming messages to state machine events
+ switch (message.type) {
+ case "request-files":
+ event = { type: "REQUEST_FILES" }
+ break
+
+ case "file-list": {
+ debug(`Received file list: ${message.files.length} files`)
+ event = { type: "REMOTE_FILE_LIST", files: message.files }
+ break
+ }
+
+ case "file-change":
+ event = {
+ type: "REMOTE_FILE_CHANGE",
+ file: {
+ name: message.fileName,
+ content: message.content,
+ // Remote modifiedAt is expensive to compute (requires getVerions API call), so we
+ // use local receipt time. Conflict detection uses content hashes, not timestamps.
+ modifiedAt: Date.now(),
+ },
+ fileMeta: fileMetadataCache.get(message.fileName),
+ }
+ break
+
+ case "file-delete": {
+ // Remote deletes are always applied immediately (file is already gone from Framer)
+ for (const fileName of message.fileNames) {
+ await processEvent({
+ type: "REMOTE_FILE_DELETE",
+ fileName,
+ })
+ }
+ return
+ }
+
+ case "delete-confirmed": {
+ const unmatched: string[] = []
+
+ for (const fileName of message.fileNames) {
+ const handled = userActions.handleConfirmation(`delete:${fileName}`, true)
+
+ if (!handled) {
+ unmatched.push(fileName)
+ }
+ }
+
+ for (const fileName of unmatched) {
+ await processEvent({ type: "LOCAL_DELETE_APPROVED", fileName })
+ }
+
+ return
+ }
+
+ case "delete-cancelled": {
+ for (const file of message.files) {
+ userActions.handleConfirmation(`delete:${file.fileName}`, false)
+
+ await processEvent({
+ type: "LOCAL_DELETE_REJECTED",
+ fileName: file.fileName,
+ content: file.content,
+ })
+ }
+
+ return
+ }
+
+ case "file-synced":
+ event = {
+ type: "FILE_SYNCED_CONFIRMATION",
+ fileName: message.fileName,
+ remoteModifiedAt: message.remoteModifiedAt,
+ }
+ break
+
+ case "conflicts-resolved":
+ event = {
+ type: "CONFLICTS_RESOLVED",
+ resolution: message.resolution,
+ }
+ break
+
+ case "conflict-version-response":
+ event = {
+ type: "CONFLICT_VERSION_RESPONSE",
+ versions: message.versions,
+ }
+ break
+
+ default:
+ warn(`Unhandled message type: ${message.type}`)
+ return
+ }
+
+ await processEvent(event)
+ }
+
+ connection.on("message", (message: PluginToCliMessage) => {
+ void (async () => {
+ try {
+ await handleMessage(message)
+ } catch (err) {
+ error("Error handling message:", err)
+ }
+ })()
+ })
+
+ connection.on("disconnect", () => {
+ // Schedule disconnect message with delay - if reconnect happens quickly, we skip it
+ scheduleDisconnectMessage(() => {
+ status("Disconnected, waiting to reconnect...")
+ })
+ void (async () => {
+ await processEvent({ type: "DISCONNECT" })
+ userActions.cleanup()
+ })()
+ })
+
+ connection.on("error", err => {
+ error("Error on WebSocket connection:", err)
+ })
+
+ // File Watcher Setup
+ // Watcher will be initialized after handshake when filesDir is set
+ let watcher: ReturnType | null = null
+
+ const startWatcher = () => {
+ if (!config.filesDir || watcher) return
+ watcher = initWatcher(config.filesDir)
+
+ watcher.on("change", event => {
+ void processEvent({ type: "WATCHER_EVENT", event })
+ })
+ }
+
+ // Graceful shutdown
+ process.on("SIGINT", () => {
+ console.log() // newline after ^C
+ status("Shutting down...")
+ void (async () => {
+ if (watcher) {
+ await watcher.close()
+ }
+ connection.close()
+ process.exit(0)
+ })()
+ })
+}
+
+// Export for testing
+export { transition }
diff --git a/packages/code-link-cli/src/helpers/connection.ts b/packages/code-link-cli/src/helpers/connection.ts
new file mode 100644
index 000000000..e10720ba7
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/connection.ts
@@ -0,0 +1,174 @@
+/**
+ * WebSocket connection helper
+ *
+ * Wrapper around ws.Server that normalizes handshake and surfaces callbacks.
+ */
+
+import type { CliToPluginMessage, PluginToCliMessage } from "@code-link/shared"
+import { WebSocket, WebSocketServer } from "ws"
+import { debug, error } from "../utils/logging.ts"
+
+export interface ConnectionCallbacks {
+ onHandshake: (client: WebSocket, message: { projectId: string; projectName: string }) => void
+ onMessage: (message: PluginToCliMessage) => void
+ onDisconnect: () => void
+ onError: (error: Error) => void
+}
+
+export interface Connection {
+ on(event: "handshake", handler: ConnectionCallbacks["onHandshake"]): void
+ on(event: "message", handler: ConnectionCallbacks["onMessage"]): void
+ on(event: "disconnect", handler: ConnectionCallbacks["onDisconnect"]): void
+ on(event: "error", handler: ConnectionCallbacks["onError"]): void
+ close(): void
+}
+
+/**
+ * Initializes a WebSocket server and returns a connection interface
+ * Returns a Promise that resolves when the server is ready, or rejects on startup errors
+ */
+export function initConnection(port: number): Promise {
+ return new Promise((resolve, reject) => {
+ const wss = new WebSocketServer({ port })
+ const handlers: Partial = {}
+ let connectionId = 0
+ let isReady = false
+
+ // Handle server-level errors (e.g., EADDRINUSE)
+ wss.on("error", (err: NodeJS.ErrnoException) => {
+ if (!isReady) {
+ // Startup error - reject the promise with a helpful message
+ if (err.code === "EADDRINUSE") {
+ error(`Port ${port} is already in use.`)
+ error(`This usually means another instance of Code Link is already running.`)
+ error(``)
+ error(`To fix this:`)
+ error(` 1. Close any other terminal running Code Link for this project`)
+ error(` 2. Or run: lsof -i :${port} | grep LISTEN`)
+ error(` Then kill the process: kill -9 `)
+ reject(new Error(`Port ${port} is already in use`))
+ } else {
+ error(`Failed to start WebSocket server: ${err.message}`)
+ reject(err)
+ }
+ return
+ }
+ // Runtime error - log but don't crash
+ error(`WebSocket server error: ${err.message}`)
+ })
+
+ // Server is ready when it starts listening
+ wss.on("listening", () => {
+ isReady = true
+ debug(`WebSocket server listening on port ${port}`)
+
+ wss.on("connection", (ws: WebSocket) => {
+ const connId = ++connectionId
+ let handshakeReceived = false
+ debug(`Client connected (conn ${connId})`)
+
+ ws.on("message", (data: Buffer) => {
+ try {
+ const message = JSON.parse(data.toString()) as PluginToCliMessage
+
+ // Special handling for handshake
+ if (message.type === "handshake") {
+ debug(`Received handshake (conn ${connId})`)
+ handshakeReceived = true
+ handlers.onHandshake?.(ws, message)
+ } else if (handshakeReceived) {
+ handlers.onMessage?.(message)
+ } else {
+ // Ignore messages before handshake - plugin will send full snapshot after
+ debug(`Ignoring ${message.type} before handshake (conn ${connId})`)
+ }
+ } catch (err) {
+ error(`Failed to parse message:`, err)
+ }
+ })
+
+ ws.on("close", (code, reason) => {
+ debug(`Client disconnected (code: ${code}, reason: ${reason.toString()})`)
+ handlers.onDisconnect?.()
+ })
+
+ ws.on("error", err => {
+ error(`WebSocket error:`, err)
+ })
+ })
+
+ resolve({
+ on(event, handler) {
+ switch (event) {
+ case "handshake":
+ handlers.onHandshake = handler as ConnectionCallbacks["onHandshake"]
+ break
+ case "message":
+ handlers.onMessage = handler as ConnectionCallbacks["onMessage"]
+ break
+ case "disconnect":
+ handlers.onDisconnect = handler as ConnectionCallbacks["onDisconnect"]
+ break
+ case "error":
+ handlers.onError = handler as ConnectionCallbacks["onError"]
+ break
+ }
+ },
+
+ close(): void {
+ wss.close()
+ },
+ } satisfies Connection)
+ })
+ })
+}
+
+/**
+ * WebSocket readyState constants for reference
+ */
+const READY_STATE = {
+ CONNECTING: 0,
+ OPEN: 1,
+ CLOSING: 2,
+ CLOSED: 3,
+} as const
+
+function readyStateToString(state: number): string {
+ switch (state) {
+ case 0:
+ return "CONNECTING"
+ case 1:
+ return "OPEN"
+ case 2:
+ return "CLOSING"
+ case 3:
+ return "CLOSED"
+ default:
+ return `UNKNOWN(${state})`
+ }
+}
+
+/**
+ * Sends a message to a connected socket
+ * Returns false if the socket is not open (instead of throwing)
+ */
+export function sendMessage(socket: WebSocket, message: CliToPluginMessage): Promise {
+ return new Promise(resolve => {
+ // Check socket state before attempting to send
+ if (socket.readyState !== READY_STATE.OPEN) {
+ const stateStr = readyStateToString(socket.readyState)
+ debug(`Cannot send ${message.type}: socket is ${stateStr}`)
+ resolve(false)
+ return
+ }
+
+ socket.send(JSON.stringify(message), err => {
+ if (err) {
+ debug(`Send error for ${message.type}: ${err.message}`)
+ resolve(false)
+ } else {
+ resolve(true)
+ }
+ })
+ })
+}
diff --git a/packages/code-link-cli/src/helpers/files.test.ts b/packages/code-link-cli/src/helpers/files.test.ts
new file mode 100644
index 000000000..169c08c36
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/files.test.ts
@@ -0,0 +1,398 @@
+import fs from "fs/promises"
+import os from "os"
+import path from "path"
+import { describe, expect, it } from "vitest"
+import type { Conflict } from "../types.ts"
+import { hashFileContent } from "../utils/state-persistence.ts"
+import { autoResolveConflicts, DEFAULT_REMOTE_DRIFT_MS, detectConflicts } from "./files.ts"
+
+function makeConflict(overrides: Partial = {}): Conflict {
+ return {
+ fileName: overrides.fileName ?? "Test.tsx",
+ localContent: "localContent" in overrides ? overrides.localContent : "local",
+ remoteContent: "remoteContent" in overrides ? overrides.remoteContent : "remote",
+ localModifiedAt: overrides.localModifiedAt ?? Date.now(),
+ remoteModifiedAt: overrides.remoteModifiedAt ?? Date.now(),
+ lastSyncedAt: "lastSyncedAt" in overrides ? overrides.lastSyncedAt : Date.now(),
+ localClean: overrides.localClean,
+ }
+}
+
+// Auto-Resolve Conflicts Tests
+describe("autoResolveConflicts", () => {
+ it("classifies conflicts as local when remote unchanged and local changed", () => {
+ const conflict = makeConflict({
+ lastSyncedAt: 5_000,
+ localClean: false,
+ })
+
+ const result = autoResolveConflicts([conflict], [{ fileName: conflict.fileName, latestRemoteVersionMs: 5_000 }])
+
+ expect(result.autoResolvedLocal).toHaveLength(1)
+ expect(result.autoResolvedRemote).toHaveLength(0)
+ expect(result.remainingConflicts).toHaveLength(0)
+ })
+
+ it("classifies conflicts as remote when local is clean and remote changed", () => {
+ const conflict = makeConflict({
+ lastSyncedAt: 5_000,
+ localClean: true,
+ })
+
+ const result = autoResolveConflicts(
+ [conflict],
+ [{ fileName: conflict.fileName, latestRemoteVersionMs: 10_000 }]
+ )
+
+ expect(result.autoResolvedRemote).toHaveLength(1)
+ expect(result.autoResolvedLocal).toHaveLength(0)
+ })
+
+ it("keeps conflicts that have both sides changed", () => {
+ const syncTime = 5_000
+ const conflict = makeConflict({
+ lastSyncedAt: syncTime,
+ localClean: false,
+ })
+
+ // Remote changed well after sync (beyond drift threshold)
+ const remoteTime = syncTime + DEFAULT_REMOTE_DRIFT_MS + 1000
+ const result = autoResolveConflicts(
+ [conflict],
+ [{ fileName: conflict.fileName, latestRemoteVersionMs: remoteTime }]
+ )
+
+ expect(result.remainingConflicts).toHaveLength(1)
+ expect(result.autoResolvedLocal).toHaveLength(0)
+ expect(result.autoResolvedRemote).toHaveLength(0)
+ })
+
+ it("auto-resolves to remote when local is clean, even without version data", () => {
+ const conflict = makeConflict({
+ lastSyncedAt: 5_000,
+ localClean: true,
+ })
+
+ const result = autoResolveConflicts([conflict], [])
+
+ // Local is clean (unchanged), so safe to take remote without needing version data
+ expect(result.autoResolvedRemote).toHaveLength(1)
+ expect(result.remainingConflicts).toHaveLength(0)
+ })
+
+ it("keeps conflicts when version data is missing and local was modified", () => {
+ const conflict = makeConflict({
+ lastSyncedAt: 5_000,
+ localClean: false, // Local was modified
+ })
+
+ const result = autoResolveConflicts([conflict], [])
+
+ // Can't determine if remote also changed, so keep as conflict
+ expect(result.remainingConflicts).toHaveLength(1)
+ expect(result.autoResolvedLocal).toHaveLength(0)
+ })
+
+ it("auto-resolves remote deletion when local is clean", () => {
+ const conflict = makeConflict({
+ remoteContent: null, // Deleted in Framer
+ localClean: true,
+ })
+
+ const result = autoResolveConflicts([conflict], [])
+
+ // Remote deletion with clean local -> auto-resolve to remote (delete locally)
+ expect(result.autoResolvedRemote).toHaveLength(1)
+ expect(result.remainingConflicts).toHaveLength(0)
+ })
+
+ it("keeps conflict when remote deleted but local modified", () => {
+ const conflict = makeConflict({
+ remoteContent: null, // Deleted in Framer
+ localClean: false, // But local was modified
+ })
+
+ const result = autoResolveConflicts([conflict], [])
+
+ // User must decide: keep local changes or accept deletion
+ expect(result.remainingConflicts).toHaveLength(1)
+ expect(result.autoResolvedRemote).toHaveLength(0)
+ })
+
+ it("keeps conflict when localClean is undefined (no persisted state)", () => {
+ const conflict = makeConflict({
+ lastSyncedAt: undefined,
+ localClean: undefined, // No persisted state to compare against
+ })
+
+ const result = autoResolveConflicts(
+ [conflict],
+ [{ fileName: conflict.fileName, latestRemoteVersionMs: 10_000 }]
+ )
+
+ // Can't determine if local changed, so keep as conflict
+ expect(result.remainingConflicts).toHaveLength(1)
+ })
+
+ it("auto-resolves local deletion when remote unchanged", () => {
+ const syncTime = 5_000
+ const conflict = makeConflict({
+ localContent: null, // Deleted locally
+ lastSyncedAt: syncTime,
+ localClean: undefined, // N/A for deletions
+ })
+
+ // Remote version is same as last sync (within drift tolerance)
+ const result = autoResolveConflicts(
+ [conflict],
+ [{ fileName: conflict.fileName, latestRemoteVersionMs: syncTime }]
+ )
+
+ // Remote unchanged since sync, local deleted -> keep deletion (resolve to local)
+ expect(result.autoResolvedLocal).toHaveLength(1)
+ expect(result.remainingConflicts).toHaveLength(0)
+ })
+
+ it("keeps conflict when local deleted but remote changed", () => {
+ const syncTime = 5_000
+ const conflict = makeConflict({
+ localContent: null, // Deleted locally
+ lastSyncedAt: syncTime,
+ localClean: undefined,
+ })
+
+ // Remote was modified well after last sync (beyond drift threshold)
+ const remoteTime = syncTime + DEFAULT_REMOTE_DRIFT_MS + 1000
+ const result = autoResolveConflicts(
+ [conflict],
+ [{ fileName: conflict.fileName, latestRemoteVersionMs: remoteTime }]
+ )
+
+ // Both sides changed: local deleted, remote modified -> conflict
+ expect(result.remainingConflicts).toHaveLength(1)
+ })
+})
+
+// Detect Conflicts Tests
+describe("detectConflicts", () => {
+ it("marks conflicts as localClean when local matches persisted state", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ const localContent = "local content"
+ await fs.writeFile(path.join(filesDir, "Test.tsx"), localContent, "utf-8")
+
+ // Keys are normalized to lowercase for case-insensitive lookup
+ const persistedState = new Map([
+ ["test.tsx", { contentHash: hashFileContent(localContent), timestamp: 1_000 }],
+ ])
+
+ const result = await detectConflicts(
+ [
+ {
+ name: "Test.tsx",
+ content: "remote content",
+ modifiedAt: 2_000,
+ },
+ ],
+ filesDir,
+ { persistedState }
+ )
+
+ expect(result.writes).toHaveLength(0)
+ expect(result.conflicts).toHaveLength(1)
+ expect(result.conflicts[0]?.localClean).toBe(true)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("detects remote-only files as safe writes (new files to download)", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ // No local files, one remote file
+ const result = await detectConflicts(
+ [
+ {
+ name: "NewFromFramer.tsx",
+ content: "export const New = () => New
",
+ modifiedAt: Date.now(),
+ },
+ ],
+ filesDir,
+ { persistedState: new Map() }
+ )
+
+ // Remote-only file should be a safe write
+ expect(result.writes).toHaveLength(1)
+ expect(result.writes[0]?.name).toBe("NewFromFramer.tsx")
+ expect(result.conflicts).toHaveLength(0)
+ expect(result.localOnly).toHaveLength(0)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("detects local-only files (new files to upload)", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ // Create a local file that doesn't exist in remote
+ await fs.writeFile(
+ path.join(filesDir, "LocalOnly.tsx"),
+ "export const Local = () => Local
",
+ "utf-8"
+ )
+
+ const result = await detectConflicts(
+ [], // No remote files
+ filesDir,
+ { persistedState: new Map() }
+ )
+
+ // Local-only file should be detected
+ expect(result.localOnly).toHaveLength(1)
+ expect(result.localOnly[0]?.name).toBe("LocalOnly.tsx")
+ expect(result.writes).toHaveLength(0)
+ expect(result.conflicts).toHaveLength(0)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("handles case-insensitive file matching (macOS/Windows compat)", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ // Local file with different casing than remote
+ await fs.writeFile(path.join(filesDir, "mycomponent.tsx"), "local content", "utf-8")
+
+ const result = await detectConflicts(
+ [
+ {
+ name: "MyComponent.tsx", // Different casing
+ content: "remote content",
+ modifiedAt: Date.now(),
+ },
+ ],
+ filesDir,
+ { persistedState: new Map() }
+ )
+
+ // Should detect as conflict, not as two separate files
+ expect(result.conflicts).toHaveLength(1)
+ expect(result.localOnly).toHaveLength(0)
+ expect(result.writes).toHaveLength(0)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("detects local deletion while offline as conflict", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ // File was previously synced but now missing locally
+ // Keys are normalized to lowercase for case-insensitive lookup
+ const persistedState = new Map([
+ ["deletedlocally.tsx", { contentHash: hashFileContent("old content"), timestamp: 1_000 }],
+ ])
+
+ const result = await detectConflicts(
+ [
+ {
+ name: "DeletedLocally.tsx",
+ content: "remote content still exists",
+ modifiedAt: 2_000,
+ },
+ ],
+ filesDir,
+ { persistedState }
+ )
+
+ // Should be a conflict: local=null (deleted), remote=content
+ expect(result.conflicts).toHaveLength(1)
+ expect(result.conflicts[0]?.localContent).toBe(null)
+ expect(result.conflicts[0]?.remoteContent).toBe("remote content still exists")
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("detects remote deletion while offline as conflict", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ // Local file still exists
+ await fs.writeFile(path.join(filesDir, "DeletedRemotely.tsx"), "local content still exists", "utf-8")
+
+ // File was previously synced
+ // Keys are normalized to lowercase for case-insensitive lookup
+ const persistedState = new Map([
+ [
+ "deletedremotely.tsx",
+ {
+ contentHash: hashFileContent("local content still exists"),
+ timestamp: 1_000,
+ },
+ ],
+ ])
+
+ const result = await detectConflicts(
+ [], // File no longer in remote
+ filesDir,
+ { persistedState }
+ )
+
+ // Should be a conflict: local=content, remote=null (deleted)
+ expect(result.conflicts).toHaveLength(1)
+ expect(result.conflicts[0]?.localContent).toBe("local content still exists")
+ expect(result.conflicts[0]?.remoteContent).toBe(null)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("treats identical content as unchanged (no write needed)", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ const content = "export const Same = () => Same
"
+ await fs.writeFile(path.join(filesDir, "Same.tsx"), content, "utf-8")
+
+ const result = await detectConflicts(
+ [
+ {
+ name: "Same.tsx",
+ content, // Same content
+ modifiedAt: Date.now(),
+ },
+ ],
+ filesDir,
+ { persistedState: new Map() }
+ )
+
+ // No write needed, no conflict
+ expect(result.writes).toHaveLength(0)
+ expect(result.conflicts).toHaveLength(0)
+ expect(result.unchanged).toHaveLength(1)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+})
diff --git a/packages/code-link-cli/src/helpers/files.ts b/packages/code-link-cli/src/helpers/files.ts
new file mode 100644
index 000000000..5fe2a692e
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/files.ts
@@ -0,0 +1,431 @@
+/**
+ * File operations helper
+ *
+ * Single place that understands disk + conflicts. Provides:
+ * - listFiles: returns current filesystem state
+ * - detectConflicts: compares remote vs local and returns conflicts + safe writes
+ * - writeRemoteFiles: applies writes/deletes from remote
+ * - deleteLocalFile: removes a file from disk
+ *
+ * Controller decides WHEN to call these, but never computes conflicts itself.
+ */
+
+import { fileKeyForLookup, normalizePath, sanitizeFilePath } from "@code-link/shared"
+import fs from "fs/promises"
+import path from "path"
+import type { Conflict, ConflictResolution, ConflictVersionData, FileInfo } from "../types.ts"
+import type { createHashTracker, HashTracker } from "../utils/hash-tracker.ts"
+import { debug, warn } from "../utils/logging.ts"
+import { hashFileContent, type PersistedFileState } from "../utils/state-persistence.ts"
+
+const SUPPORTED_EXTENSIONS = [".ts", ".tsx", ".js", ".jsx", ".json"]
+const DEFAULT_EXTENSION = ".tsx"
+// Allow for clock drift + network latency when comparing timestamps
+export const DEFAULT_REMOTE_DRIFT_MS = 2000
+
+/**
+ * Lists all supported files in the files directory
+ */
+export async function listFiles(filesDir: string): Promise {
+ const files: FileInfo[] = []
+
+ async function walk(currentDir: string): Promise {
+ const entries = await fs.readdir(currentDir, { withFileTypes: true })
+
+ for (const entry of entries) {
+ const entryPath = path.join(currentDir, entry.name)
+
+ if (entry.isDirectory()) {
+ await walk(entryPath)
+ continue
+ }
+
+ if (!isSupportedExtension(entry.name)) continue
+
+ const relativePath = path.relative(filesDir, entryPath)
+ const normalizedPath = normalizePath(relativePath)
+ // Don't capitalize when listing existing files - preserve their actual names
+ const sanitizedPath = sanitizeFilePath(normalizedPath, false).path
+
+ try {
+ const [content, stats] = await Promise.all([fs.readFile(entryPath, "utf-8"), fs.stat(entryPath)])
+
+ files.push({
+ name: sanitizedPath,
+ content,
+ modifiedAt: stats.mtimeMs,
+ })
+ } catch (err) {
+ warn(`Failed to read ${entryPath}:`, err)
+ }
+ }
+ }
+
+ try {
+ await walk(filesDir)
+ } catch (err) {
+ warn("Failed to list files:", err)
+ }
+
+ return files
+}
+
+/**
+ * Detects conflicts between remote files and local filesystem
+ * Returns conflicts that need user resolution and safe writes that can be applied
+ */
+export interface ConflictDetectionOptions {
+ preferRemote?: boolean
+ detectConflicts?: boolean
+ persistedState?: Map
+}
+
+export async function detectConflicts(
+ remoteFiles: FileInfo[],
+ filesDir: string,
+ options: ConflictDetectionOptions = {}
+): Promise {
+ const conflicts: Conflict[] = []
+ const writes: FileInfo[] = []
+ const localOnly: FileInfo[] = []
+ const unchanged: FileInfo[] = []
+ const detect = options.detectConflicts ?? true
+ const preferRemote = options.preferRemote ?? false
+ const persistedState = options.persistedState
+
+ // Persisted state keys are normalized to lowercase for case-insensitive lookup
+ const getPersistedState = (fileName: string) => persistedState?.get(fileKeyForLookup(fileName))
+
+ debug(`Detecting conflicts for ${String(remoteFiles.length)} remote files`)
+
+ // Build a snapshot of all local files (keyed by lowercase for case-insensitive matching)
+ const localFiles = await listFiles(filesDir)
+ const localFileMap = new Map(localFiles.map(f => [fileKeyForLookup(f.name), f]))
+
+ // Build a set of remote file names for quick lookup (lowercase keys)
+ const remoteFileMap = new Map(
+ remoteFiles.map(f => {
+ const normalized = resolveRemoteReference(filesDir, f.name)
+ return [fileKeyForLookup(normalized.relativePath), f]
+ })
+ )
+
+ // Track which files we've processed (lowercase for case-insensitive matching)
+ const processedFiles = new Set()
+
+ // Process remote files (remote-only or both sides)
+ for (const remote of remoteFiles) {
+ const normalized = resolveRemoteReference(filesDir, remote.name)
+ const normalizedKey = fileKeyForLookup(normalized.relativePath)
+ const local = localFileMap.get(normalizedKey)
+ processedFiles.add(normalizedKey)
+
+ const persisted = getPersistedState(normalized.relativePath)
+ const localHash = local ? hashFileContent(local.content) : null
+ const localMatchesPersisted = !!persisted && !!local && localHash === persisted.contentHash
+
+ if (!local) {
+ // File exists in remote but not locally
+ if (persisted) {
+ // File was previously synced but now missing locally → deleted locally while offline
+ // This is a conflict: local=null (deleted), remote=content
+ debug(`Conflict: ${normalized.relativePath} deleted locally while offline`)
+ conflicts.push({
+ fileName: normalized.relativePath,
+ localContent: null,
+ remoteContent: remote.content,
+ remoteModifiedAt: remote.modifiedAt,
+ lastSyncedAt: persisted.timestamp,
+ })
+ } else {
+ // New file from remote (never synced before): download
+ writes.push({
+ name: normalized.relativePath,
+ content: remote.content,
+ modifiedAt: remote.modifiedAt,
+ })
+ }
+ continue
+ }
+
+ if (local.content === remote.content) {
+ // Content matches - no disk write needed but track for metadata
+ unchanged.push({
+ name: normalized.relativePath,
+ content: remote.content,
+ modifiedAt: remote.modifiedAt,
+ })
+ continue
+ }
+
+ if (!detect || preferRemote) {
+ writes.push({
+ name: normalized.relativePath,
+ content: remote.content,
+ modifiedAt: remote.modifiedAt,
+ })
+ continue
+ }
+
+ // Check if local file is "clean" (matches last persisted state)
+ // If so, we can safely overwrite it with remote changes
+ // Both sides have the file with different content -> conflict
+ const localClean = persisted ? localMatchesPersisted : undefined
+ conflicts.push({
+ fileName: normalized.relativePath,
+ localContent: local.content,
+ remoteContent: remote.content,
+ localModifiedAt: local.modifiedAt,
+ remoteModifiedAt: remote.modifiedAt,
+ lastSyncedAt: persisted?.timestamp,
+ localClean,
+ })
+ }
+
+ // Process local-only files (not present in remote)
+ for (const local of localFiles) {
+ const localKey = fileKeyForLookup(local.name)
+ if (!processedFiles.has(localKey)) {
+ const persisted = getPersistedState(local.name)
+ if (persisted) {
+ // File was previously synced but now missing from remote → deleted in Framer
+ const localHash = hashFileContent(local.content)
+ const localClean = localHash === persisted.contentHash
+ debug(`Conflict: ${local.name} deleted in Framer (localClean=${String(localClean)})`)
+ conflicts.push({
+ fileName: local.name,
+ localContent: local.content,
+ remoteContent: null,
+ localModifiedAt: local.modifiedAt,
+ lastSyncedAt: persisted.timestamp,
+ localClean,
+ })
+ } else {
+ // New local file (never synced before): upload later
+ localOnly.push({
+ name: local.name,
+ content: local.content,
+ modifiedAt: local.modifiedAt,
+ })
+ }
+ }
+ }
+
+ // Check for files in persisted state that are missing from BOTH sides
+ // These were deleted on both sides while offline - auto-clean them (no conflict)
+ if (persistedState) {
+ for (const fileName of persistedState.keys()) {
+ const normalizedKey = fileKeyForLookup(fileName)
+ const inLocal = localFileMap.has(normalizedKey)
+ const inRemote = remoteFileMap.has(normalizedKey)
+ if (!inLocal && !inRemote) {
+ debug(`[AUTO-RESOLVE] ${fileName}: deleted on both sides, no conflict`)
+ // No action needed - the file is gone from both sides
+ // The persisted state will be cleaned up when we persist
+ }
+ }
+ }
+
+ return { conflicts, writes, localOnly, unchanged }
+}
+
+export interface AutoResolveResult {
+ autoResolvedLocal: Conflict[]
+ autoResolvedRemote: Conflict[]
+ remainingConflicts: Conflict[]
+}
+
+export function autoResolveConflicts(
+ conflicts: Conflict[],
+ versions: ConflictVersionData[],
+ options: { remoteDriftMs?: number } = {}
+): AutoResolveResult {
+ const versionMap = new Map(versions.map(version => [version.fileName, version.latestRemoteVersionMs]))
+ const remoteDriftMs = options.remoteDriftMs ?? DEFAULT_REMOTE_DRIFT_MS
+
+ const autoResolvedLocal: Conflict[] = []
+ const autoResolvedRemote: Conflict[] = []
+ const remainingConflicts: Conflict[] = []
+
+ for (const conflict of conflicts) {
+ const latestRemoteVersionMs = versionMap.get(conflict.fileName)
+ const lastSyncedAt = conflict.lastSyncedAt
+ const localClean = conflict.localClean === true
+
+ debug(`Auto-resolve checking ${conflict.fileName}`)
+
+ // Remote deletion: file deleted in Framer
+ if (conflict.remoteContent === null) {
+ if (localClean) {
+ debug(` Remote deleted, local clean -> REMOTE (delete locally)`)
+ autoResolvedRemote.push(conflict)
+ } else {
+ debug(` Remote deleted, local modified -> conflict`)
+ remainingConflicts.push(conflict)
+ }
+ continue
+ }
+
+ // If local is clean (unchanged since last sync), we can safely take remote
+ // regardless of version data availability - local hasn't changed
+ if (localClean) {
+ debug(` Local clean -> REMOTE (safe to overwrite)`)
+ autoResolvedRemote.push(conflict)
+ continue
+ }
+
+ // From here, local has been modified. We need version data to determine
+ // if remote also changed (to avoid overwriting remote changes).
+ if (!latestRemoteVersionMs) {
+ debug(` Local modified, no remote version data -> conflict`)
+ remainingConflicts.push(conflict)
+ continue
+ }
+
+ if (!lastSyncedAt) {
+ debug(` Local modified, no sync timestamp -> conflict`)
+ remainingConflicts.push(conflict)
+ continue
+ }
+
+ debug(` Remote: ${new Date(latestRemoteVersionMs).toISOString()}`)
+ debug(` Synced: ${new Date(lastSyncedAt).toISOString()}`)
+
+ const remoteUnchanged = latestRemoteVersionMs <= lastSyncedAt + remoteDriftMs
+ const driftMargin = latestRemoteVersionMs - lastSyncedAt
+
+ if (remoteUnchanged) {
+ debug(` Remote unchanged, local changed -> LOCAL`)
+ if (driftMargin > 0) {
+ debug(` (within drift tolerance: ${driftMargin}ms < ${remoteDriftMs}ms threshold)`)
+ }
+ autoResolvedLocal.push(conflict)
+ } else {
+ debug(` Both changed -> conflict (remote ahead by ${driftMargin}ms, threshold: ${remoteDriftMs}ms)`)
+ remainingConflicts.push(conflict)
+ }
+ }
+
+ return {
+ autoResolvedLocal,
+ autoResolvedRemote,
+ remainingConflicts,
+ }
+}
+
+/**
+ * Writes remote files to disk and updates hash tracker to prevent echoes
+ * CRITICAL: Update hashTracker BEFORE writing to disk
+ */
+export async function writeRemoteFiles(
+ files: FileInfo[],
+ filesDir: string,
+ hashTracker: HashTracker,
+ installer?: { process: (fileName: string, content: string) => void }
+): Promise {
+ debug(`Writing ${files.length} remote files`)
+
+ for (const file of files) {
+ try {
+ const normalized = resolveRemoteReference(filesDir, file.name)
+ const fullPath = normalized.absolutePath
+
+ // Ensure directory exists
+ await fs.mkdir(path.dirname(fullPath), { recursive: true })
+
+ // CRITICAL ORDER: Update hash tracker FIRST (in memory)
+ hashTracker.remember(normalized.relativePath, file.content)
+
+ // THEN write to disk
+ await fs.writeFile(fullPath, file.content, "utf-8")
+
+ debug(`Wrote file: ${normalized.relativePath}`)
+
+ // Trigger type installer if available
+ installer?.process(normalized.relativePath, file.content)
+ } catch (err) {
+ warn(`Failed to write file ${file.name}:`, err)
+ }
+ }
+}
+
+/**
+ * Deletes a local file from disk
+ */
+export async function deleteLocalFile(fileName: string, filesDir: string, hashTracker: HashTracker): Promise {
+ const normalized = resolveRemoteReference(filesDir, fileName)
+
+ try {
+ // CRITICAL ORDER: Mark delete FIRST (in memory) to prevent echo
+ hashTracker.markDelete(normalized.relativePath)
+
+ // THEN delete from disk
+ await fs.unlink(normalized.absolutePath)
+
+ // Clear the hash immediately
+ hashTracker.forget(normalized.relativePath)
+
+ debug(`Deleted file: ${normalized.relativePath}`)
+ } catch (err) {
+ const nodeError = err as NodeJS.ErrnoException
+
+ if (nodeError.code === "ENOENT") {
+ // Treat missing files as already deleted to keep hash tracker in sync
+ hashTracker.forget(normalized.relativePath)
+ debug(`File already deleted: ${normalized.relativePath}`)
+ return
+ }
+
+ // Clear pending delete marker immediately on failure
+ hashTracker.clearDelete(normalized.relativePath)
+ warn(`Failed to delete file ${fileName}:`, err)
+ }
+}
+
+/**
+ * Reads a single file from disk (safe, returns null on error)
+ */
+export async function readFileSafe(fileName: string, filesDir: string): Promise {
+ const normalized = resolveRemoteReference(filesDir, fileName)
+
+ try {
+ return await fs.readFile(normalized.absolutePath, "utf-8")
+ } catch {
+ return null
+ }
+}
+
+/**
+ * Filter out files whose content matches the last remembered hash.
+ * Used to skip inbound echoes of our own local sends.
+ */
+export function filterEchoedFiles(files: FileInfo[], hashTracker: ReturnType): FileInfo[] {
+ return files.filter(file => {
+ return !hashTracker.shouldSkip(file.name, file.content)
+ })
+}
+
+function resolveRemoteReference(filesDir: string, rawName: string) {
+ const normalized = sanitizeRelativePath(rawName)
+ const absolutePath = path.join(filesDir, normalized.relativePath)
+ return { ...normalized, absolutePath }
+}
+
+function sanitizeRelativePath(relativePath: string) {
+ const trimmed = normalizePath(relativePath.trim())
+ const hasExtension = SUPPORTED_EXTENSIONS.some(ext => trimmed.toLowerCase().endsWith(ext))
+ const candidate = hasExtension ? trimmed : `${trimmed}${DEFAULT_EXTENSION}`
+ // Don't capitalize when processing remote files - preserve exact casing from Framer
+ const sanitized = sanitizeFilePath(candidate, false)
+ const normalized = normalizePath(sanitized.path)
+
+ return {
+ relativePath: normalized,
+ extension: sanitized.extension || path.extname(normalized) || DEFAULT_EXTENSION,
+ }
+}
+
+function isSupportedExtension(fileName: string) {
+ const lower = fileName.toLowerCase()
+ return SUPPORTED_EXTENSIONS.some(ext => lower.endsWith(ext))
+}
diff --git a/packages/code-link-cli/src/helpers/installer.ts b/packages/code-link-cli/src/helpers/installer.ts
new file mode 100644
index 000000000..259e04010
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/installer.ts
@@ -0,0 +1,565 @@
+/**
+ * Type installer helper using @typescript/ata
+ */
+
+import { setupTypeAcquisition } from "@typescript/ata"
+import fs from "fs/promises"
+import path from "path"
+import ts from "typescript"
+import { extractImports } from "../utils/imports.ts"
+import { debug, error, warn } from "../utils/logging.ts"
+
+export interface InstallerConfig {
+ projectDir: string
+ allowUnsupportedNpm?: boolean
+}
+
+/** npm registry package.json exports field value */
+interface NpmExportValue {
+ import?: string
+ require?: string
+ types?: string
+}
+
+/** npm registry API response for a single package version */
+interface NpmPackageVersion {
+ exports?: Record
+}
+
+/** npm registry API response */
+interface NpmRegistryResponse {
+ "dist-tags"?: { latest?: string }
+ versions?: Record
+}
+
+const FETCH_TIMEOUT_MS = 60_000
+const MAX_FETCH_RETRIES = 3
+const MAX_CONSECUTIVE_FAILURES = 10
+const REACT_TYPES_VERSION = "18.3.12"
+const REACT_DOM_TYPES_VERSION = "18.3.1"
+const CORE_LIBRARIES = ["framer-motion", "framer"]
+const JSON_EXTENSION_REGEX = /\.json$/i
+
+/**
+ * Packages that are officially supported for type acquisition.
+ * Use --unsupported-npm flag to allow other packages.
+ */
+const SUPPORTED_PACKAGES = new Set([
+ "framer",
+ "framer-motion",
+ "react",
+ "@types/react",
+ "eventemitter3",
+ "csstype",
+ "motion-dom",
+ "motion-utils",
+])
+
+/**
+ * Installer class for managing automatic type acquisition.
+ */
+export class Installer {
+ private projectDir: string
+ private allowUnsupportedNpm: boolean
+ private ata: ReturnType
+ private processedImports = new Set()
+ private initializationPromise: Promise | null = null
+
+ constructor(config: InstallerConfig) {
+ this.projectDir = config.projectDir
+ this.allowUnsupportedNpm = config.allowUnsupportedNpm ?? false
+
+ const seenPackages = new Set()
+
+ this.ata = setupTypeAcquisition({
+ projectName: "framer-code-link",
+ typescript: ts,
+ logger: console,
+ fetcher: fetchWithRetry,
+ delegate: {
+ started: () => {
+ seenPackages.clear()
+ debug("ATA: fetching type definitions...")
+ },
+ progress: () => {
+ // intentionally noop – progress noise is not helpful in CLI output
+ },
+ finished: files => {
+ if (files.size > 0) {
+ debug("ATA: type acquisition complete")
+ }
+ },
+ errorMessage: (message: string, error: Error) => {
+ warn(`ATA warning: ${message}`, error)
+ },
+ receivedFile: (code: string, receivedPath: string) => {
+ void (async () => {
+ const normalized = receivedPath.replace(/^\//, "")
+ const destination = path.join(this.projectDir, normalized)
+
+ const pkgMatch = /\/node_modules\/(@?[^/]+(?:\/[^/]+)?)\//.exec(receivedPath)
+
+ // Check if file already exists with same content
+ try {
+ const existing = await fs.readFile(destination, "utf-8")
+ if (existing === code) {
+ if (pkgMatch && !seenPackages.has(pkgMatch[1])) {
+ seenPackages.add(pkgMatch[1])
+ debug(`📦 Types: ${pkgMatch[1]} (from disk cache)`)
+ }
+ return // Skip write if identical
+ }
+ } catch {
+ // File doesn't exist or can't be read, proceed with write
+ }
+
+ if (pkgMatch && !seenPackages.has(pkgMatch[1])) {
+ seenPackages.add(pkgMatch[1])
+ debug(`📦 Types: ${pkgMatch[1]}`)
+ }
+
+ await this.writeTypeFile(receivedPath, code)
+ })()
+ },
+ },
+ })
+
+ debug("Type installer initialized")
+ }
+
+ /**
+ * Ensure the project scaffolding exists (tsconfig, declarations, etc.)
+ */
+ async initialize(): Promise {
+ if (this.initializationPromise) {
+ return this.initializationPromise
+ }
+
+ this.initializationPromise = this.initializeProject()
+ .then(() => {
+ debug("Type installer initialization complete")
+ })
+ .catch((err: unknown) => {
+ this.initializationPromise = null
+ throw err
+ })
+
+ return this.initializationPromise
+ }
+
+ /**
+ * Fire-and-forget processing of a component file to fetch missing types.
+ * JSON files are ignored.
+ */
+ process(fileName: string, content: string): void {
+ if (!content || JSON_EXTENSION_REGEX.test(fileName)) {
+ return
+ }
+
+ Promise.resolve()
+ .then(async () => {
+ await this.processImports(fileName, content)
+ })
+ .catch((err: unknown) => {
+ debug(`Type installer failed for ${fileName}`, err)
+ })
+ }
+
+ // Internal helpers
+
+ private async initializeProject(): Promise {
+ await Promise.all([
+ this.ensureTsConfig(),
+ this.ensurePrettierConfig(),
+ this.ensureFramerDeclarations(),
+ this.ensurePackageJson(),
+ ])
+
+ // Fire-and-forget type installation - don't block initialization
+ Promise.resolve()
+ .then(async () => {
+ await this.ensureReact18Types()
+
+ const coreImports = CORE_LIBRARIES.map(lib => `import "${lib}";`).join("\n")
+ await this.ata(coreImports)
+ })
+ .catch((err: unknown) => {
+ debug("Type installation failed", err)
+ })
+ }
+
+ private async processImports(fileName: string, content: string): Promise {
+ const allImports = extractImports(content).filter(i => i.type === "npm")
+
+ if (allImports.length === 0) return
+
+ // Filter to supported packages unless --unsupported-npm flag is set
+ const imports = this.allowUnsupportedNpm ? allImports : allImports.filter(i => this.isSupportedPackage(i.name))
+
+ const unsupportedCount = allImports.length - imports.length
+ if (unsupportedCount > 0 && !this.allowUnsupportedNpm) {
+ const unsupported = allImports.filter(i => !this.isSupportedPackage(i.name)).map(i => i.name)
+ debug(`Skipping unsupported packages: ${unsupported.join(", ")} (use --unsupported-npm to enable)`)
+ }
+
+ if (imports.length === 0) {
+ return
+ }
+
+ const hash = imports
+ .map(imp => imp.name)
+ .sort()
+ .join(",")
+
+ if (this.processedImports.has(hash)) {
+ return
+ }
+
+ this.processedImports.add(hash)
+ debug(`Processing imports for ${fileName} (${imports.length} packages)`)
+
+ // Build filtered content with only supported imports for ATA
+ const filteredContent = this.allowUnsupportedNpm ? content : this.buildFilteredImports(imports)
+
+ try {
+ await this.ata(filteredContent)
+ } catch (err) {
+ warn(`ATA failed for ${fileName}`, err as Error)
+ }
+ }
+
+ /**
+ * Check if a package is in the supported list.
+ * Also checks for subpath imports (e.g., "framer/build" -> "framer")
+ */
+ private isSupportedPackage(pkgName: string): boolean {
+ // Direct match
+ if (SUPPORTED_PACKAGES.has(pkgName)) {
+ return true
+ }
+
+ // Check if base package is supported (e.g., "framer-motion/dist" -> "framer-motion")
+ const basePkg = pkgName.startsWith("@") ? pkgName.split("/").slice(0, 2).join("/") : pkgName.split("/")[0]
+
+ return SUPPORTED_PACKAGES.has(basePkg)
+ }
+
+ /**
+ * Build synthetic import statements for ATA from filtered imports
+ */
+ private buildFilteredImports(imports: { name: string }[]): string {
+ return imports.map(imp => `import "${imp.name}";`).join("\n")
+ }
+
+ private async writeTypeFile(receivedPath: string, code: string): Promise {
+ const normalized = receivedPath.replace(/^\//, "")
+ const destination = path.join(this.projectDir, normalized)
+
+ try {
+ await fs.mkdir(path.dirname(destination), { recursive: true })
+ await fs.writeFile(destination, code, "utf-8")
+ } catch (err) {
+ warn(`Failed to write type file ${destination}`, err)
+ return
+ }
+
+ if (/node_modules\/@types\/[^/]+\/index\.d\.ts$/.exec(normalized)) {
+ await this.ensureTypesPackageJson(normalized)
+ }
+ }
+
+ private async ensureTypesPackageJson(normalizedPath: string): Promise {
+ const pkgMatch = /node_modules\/(@types\/[^/]+)\//.exec(normalizedPath)
+ if (!pkgMatch) return
+
+ const pkgName = pkgMatch[1]
+ const pkgDir = path.join(this.projectDir, "node_modules", pkgName)
+ const pkgJsonPath = path.join(pkgDir, "package.json")
+
+ try {
+ const response = await fetch(`https://registry.npmjs.org/${pkgName}`)
+ if (!response.ok) return
+
+ const npmData = (await response.json()) as NpmRegistryResponse
+ const version = npmData["dist-tags"]?.latest
+ if (!version || !npmData.versions?.[version]) return
+
+ const pkg = npmData.versions[version]
+ if (pkg.exports) {
+ for (const key of Object.keys(pkg.exports)) {
+ pkg.exports[key] = fixExportTypes(pkg.exports[key])
+ }
+ }
+
+ await fs.mkdir(pkgDir, { recursive: true })
+ await fs.writeFile(pkgJsonPath, JSON.stringify(pkg, null, 2))
+ } catch {
+ // best-effort
+ }
+ }
+
+ private async ensureTsConfig(): Promise {
+ const tsconfigPath = path.join(this.projectDir, "tsconfig.json")
+ try {
+ await fs.access(tsconfigPath)
+ debug("tsconfig.json already exists")
+ } catch {
+ const config = {
+ compilerOptions: {
+ noEmit: true,
+ target: "ES2021",
+ lib: ["ES2021", "DOM", "DOM.Iterable"],
+ module: "ESNext",
+ moduleResolution: "bundler",
+ customConditions: ["source"],
+ jsx: "react-jsx",
+ allowJs: true,
+ allowSyntheticDefaultImports: true,
+ strict: false,
+ allowImportingTsExtensions: true,
+ resolveJsonModule: true,
+ esModuleInterop: true,
+ skipLibCheck: true,
+ typeRoots: ["./node_modules/@types"],
+ },
+ include: ["files/**/*", "framer-modules.d.ts"],
+ }
+ await fs.writeFile(tsconfigPath, JSON.stringify(config, null, 2))
+ debug("Created tsconfig.json")
+ }
+ }
+
+ private async ensurePrettierConfig(): Promise {
+ const prettierPath = path.join(this.projectDir, ".prettierrc")
+ try {
+ await fs.access(prettierPath)
+ debug(".prettierrc already exists")
+ } catch {
+ const config = {
+ tabWidth: 4,
+ semi: false,
+ trailingComma: "es5",
+ }
+ await fs.writeFile(prettierPath, JSON.stringify(config, null, 2))
+ debug("Created .prettierrc")
+ }
+ }
+
+ private async ensureFramerDeclarations(): Promise {
+ const declarationsPath = path.join(this.projectDir, "framer-modules.d.ts")
+ try {
+ await fs.access(declarationsPath)
+ debug("framer-modules.d.ts already exists")
+ } catch {
+ const declarations = `// Type declarations for Framer URL imports
+declare module "https://framer.com/m/*"
+
+declare module "https://framerusercontent.com/*"
+
+declare module "*.json"
+`
+ await fs.writeFile(declarationsPath, declarations)
+ debug("Created framer-modules.d.ts")
+ }
+ }
+
+ private async ensurePackageJson(): Promise {
+ const packagePath = path.join(this.projectDir, "package.json")
+ try {
+ await fs.access(packagePath)
+ debug("package.json already exists")
+ } catch {
+ const pkg = {
+ name: path.basename(this.projectDir),
+ version: "1.0.0",
+ private: true,
+ description: "Framer files synced with framer-code-link",
+ }
+ await fs.writeFile(packagePath, JSON.stringify(pkg, null, 2))
+ debug("Created package.json")
+ }
+ }
+
+ // Code components in Framer use React 18
+ private async ensureReact18Types(): Promise {
+ const reactTypesDir = path.join(this.projectDir, "node_modules/@types/react")
+
+ const reactFiles = ["package.json", "index.d.ts", "global.d.ts", "jsx-runtime.d.ts", "jsx-dev-runtime.d.ts"]
+
+ if (await this.hasTypePackage(reactTypesDir, REACT_TYPES_VERSION, reactFiles)) {
+ debug("📦 React types (from cache)")
+ } else {
+ debug("Downloading React 18 types...")
+ await this.downloadTypePackage("@types/react", REACT_TYPES_VERSION, reactTypesDir, reactFiles)
+ }
+
+ const reactDomDir = path.join(this.projectDir, "node_modules/@types/react-dom")
+
+ const reactDomFiles = ["package.json", "index.d.ts", "client.d.ts"]
+
+ if (await this.hasTypePackage(reactDomDir, REACT_DOM_TYPES_VERSION, reactDomFiles)) {
+ debug("📦 React DOM types (from cache)")
+ } else {
+ await this.downloadTypePackage("@types/react-dom", REACT_DOM_TYPES_VERSION, reactDomDir, reactDomFiles)
+ }
+ }
+
+ private async hasTypePackage(destinationDir: string, version: string, files: string[]): Promise {
+ try {
+ const pkgJsonPath = path.join(destinationDir, "package.json")
+ const pkgJson = await fs.readFile(pkgJsonPath, "utf-8")
+ const parsed = JSON.parse(pkgJson) as { version?: string }
+
+ if (parsed.version !== version) {
+ return false
+ }
+
+ for (const file of files) {
+ if (file === "package.json") continue
+ await fs.access(path.join(destinationDir, file))
+ }
+
+ return true
+ } catch {
+ return false
+ }
+ }
+
+ private async downloadTypePackage(
+ pkgName: string,
+ version: string,
+ destinationDir: string,
+ files: string[]
+ ): Promise {
+ const baseUrl = `https://unpkg.com/${pkgName}@${version}`
+ await fs.mkdir(destinationDir, { recursive: true })
+
+ await Promise.all(
+ files.map(async file => {
+ const destination = path.join(destinationDir, file)
+
+ // Check if file already exists
+ try {
+ await fs.access(destination)
+ return // Skip if exists
+ } catch {
+ // File doesn't exist, download it
+ }
+
+ try {
+ const response = await fetch(`${baseUrl}/${file}`)
+ if (!response.ok) return
+ const content = await response.text()
+ await fs.writeFile(destination, content)
+ } catch {
+ // ignore per-file failures
+ }
+ })
+ )
+ }
+}
+
+// Helpers
+
+/**
+ * Transform package.json exports to include .d.ts type paths
+ */
+function fixExportTypes(value: string | NpmExportValue): string | NpmExportValue {
+ if (typeof value === "string") {
+ return {
+ types: value.replace(/\.js$/, ".d.ts").replace(/\.cjs$/, ".d.cts"),
+ }
+ }
+
+ if ((value.import ?? value.require) && !value.types) {
+ const base = value.import ?? value.require
+ value.types = base?.replace(/\.js$/, ".d.ts").replace(/\.cjs$/, ".d.cts")
+ }
+
+ return value
+}
+
+interface FetchError extends Error {
+ cause?: { code?: string }
+}
+
+/** Tracks consecutive network failures across all fetches */
+let consecutiveFailures = 0
+
+/** Reset failure counter on successful fetch */
+function resetFailureCounter(): void {
+ consecutiveFailures = 0
+}
+
+/** Check if we should give up due to persistent network issues */
+function checkFatalFailure(url: string): void {
+ consecutiveFailures++
+ if (consecutiveFailures >= MAX_CONSECUTIVE_FAILURES) {
+ error(
+ `Network unavailable - ${MAX_CONSECUTIVE_FAILURES} fetch failures.\n` +
+ ` Check your internet connection and try again.\n` +
+ ` Last failed URL: ${url}`
+ )
+ process.exit(1)
+ }
+}
+
+// ATA occasionally has some issues with larger packages e.g. framer-motion
+// We use a custom fetch handler to allow us to keep trying
+async function fetchWithRetry(
+ url: string | URL | Request,
+ init?: RequestInit,
+ retries = MAX_FETCH_RETRIES
+): Promise {
+ let urlString: string
+ if (typeof url === "string") {
+ urlString = url
+ } else if (url instanceof URL) {
+ urlString = url.href
+ } else {
+ urlString = url.url
+ }
+
+ for (let attempt = 1; attempt <= retries; attempt++) {
+ const controller = new AbortController()
+ const timeout = setTimeout(() => {
+ controller.abort()
+ }, FETCH_TIMEOUT_MS)
+
+ try {
+ const response = await fetch(url, {
+ ...init,
+ signal: controller.signal,
+ })
+ clearTimeout(timeout)
+ resetFailureCounter()
+ return response
+ } catch (err: unknown) {
+ clearTimeout(timeout)
+ const error = err as FetchError
+
+ const isRetryable =
+ error.cause?.code === "ECONNRESET" ||
+ error.cause?.code === "ETIMEDOUT" ||
+ error.cause?.code === "UND_ERR_CONNECT_TIMEOUT" ||
+ error.message.includes("timeout")
+
+ // Count every timeout, not just final failures - exits if too many across all fetches
+ if (isRetryable) {
+ checkFatalFailure(urlString)
+ }
+
+ if (attempt < retries && isRetryable) {
+ const delay = attempt * 1_000
+ warn(`Fetch failed (${error.cause?.code ?? error.message}) for ${urlString}, retrying in ${delay}ms...`)
+ await new Promise(resolve => setTimeout(resolve, delay))
+ continue
+ }
+
+ warn(`Fetch failed for ${urlString}`, error)
+ throw error
+ }
+ }
+
+ throw new Error(`Max retries exceeded for ${urlString}`)
+}
diff --git a/packages/code-link-cli/src/helpers/plugin-prompts.ts b/packages/code-link-cli/src/helpers/plugin-prompts.ts
new file mode 100644
index 000000000..dd440a5af
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/plugin-prompts.ts
@@ -0,0 +1,156 @@
+/**
+ * Plugin User Prompt Coordinator
+ *
+ * Provides a clean awaitable API for user confirmations via the Plugin UI.
+ * Manages pending promises that resolve when the user responds in Framer.
+ */
+
+import type { WebSocket } from "ws"
+import type { Conflict } from "../types.ts"
+import { debug } from "../utils/logging.ts"
+import { sendMessage } from "./connection.ts"
+
+class PluginDisconnectedError extends Error {
+ constructor() {
+ super("Plugin disconnected")
+ this.name = "PluginDisconnectedError"
+ }
+}
+
+interface PendingAction {
+ resolve: (value: unknown) => void
+ reject: (error: Error) => void
+}
+
+export class PluginUserPromptCoordinator {
+ private pendingActions = new Map()
+
+ /**
+ * Register a pending action and return a typed promise
+ */
+ private awaitAction(actionId: string, description: string): Promise {
+ return new Promise((resolve, reject) => {
+ this.pendingActions.set(actionId, {
+ resolve: resolve as (value: unknown) => void,
+ reject,
+ })
+ debug(`Awaiting ${description}: ${actionId}`)
+ })
+ }
+
+ /**
+ * Sends the delete request to the plugin and awaits the user's decision.
+ * Returns the list of fileNames that were confirmed for deletion.
+ */
+ async requestDeleteDecision(
+ socket: WebSocket | null,
+ { fileNames, requireConfirmation }: { fileNames: string[]; requireConfirmation: boolean }
+ ): Promise {
+ if (!socket) {
+ throw new Error("Cannot request delete decision: plugin not connected")
+ }
+
+ if (fileNames.length === 0) {
+ return []
+ }
+
+ if (requireConfirmation) {
+ // Create a promise for each file's confirmation
+ const confirmationPromises = fileNames.map(fileName =>
+ this.awaitAction(`delete:${fileName}`, "delete confirmation")
+ .then(confirmed => (confirmed ? fileName : null))
+ .catch(err => {
+ if (err instanceof PluginDisconnectedError) {
+ debug(`Plugin disconnected while waiting for delete confirmation: ${fileName}`)
+ return null
+ }
+ throw err
+ })
+ )
+
+ await sendMessage(socket, {
+ type: "file-delete",
+ fileNames,
+ requireConfirmation: true,
+ })
+
+ const results = await Promise.all(confirmationPromises)
+ return results.filter((name): name is string => name !== null)
+ }
+
+ await sendMessage(socket, {
+ type: "file-delete",
+ fileNames,
+ requireConfirmation: false,
+ })
+
+ return fileNames
+ }
+
+ /**
+ * Sends conflicts to the plugin and awaits user resolutions
+ */
+ async requestConflictDecisions(
+ socket: WebSocket | null,
+ conflicts: Conflict[]
+ ): Promise