diff --git a/.github/workflows/pr-package.yml b/.github/workflows/pr-package.yml new file mode 100644 index 0000000..9318be3 --- /dev/null +++ b/.github/workflows/pr-package.yml @@ -0,0 +1,167 @@ +name: Publish Preview Packages +on: + push: + branches: [main] + pull_request: + types: [opened, synchronize, reopened, closed] + +permissions: + contents: read + pull-requests: write + +env: + PR_PACKAGE_HOST: pkg.ing + NODE_VERSION: "24" + PKG_DIR: node-utils + +jobs: + # ── Compute tags once so the publish + comment jobs use the same set. ───── + tag: + if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.action != 'closed') + runs-on: ubuntu-latest + outputs: + tags: ${{ steps.tags.outputs.tags }} + short: ${{ steps.tags.outputs.short }} + steps: + - id: tags + env: + EVENT: ${{ github.event_name }} + BRANCH: ${{ github.event_name == 'pull_request' && github.head_ref || github.ref_name }} + PR_NUMBER: ${{ github.event.pull_request.number }} + # PR builds run on the merge commit by default; tag the actual head sha + # so consumers can pin to a specific PR commit. + SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} + run: | + set -euo pipefail + short="${SHA:0:7}" + long="$SHA" + tags=("$short" "$long" "$BRANCH") + if [ "$EVENT" = "pull_request" ]; then + tags+=("pr-${PR_NUMBER}") + fi + json=$(printf '%s\n' "${tags[@]}" | jq -R . | jq -s -c .) + echo "tags=$json" >> "$GITHUB_OUTPUT" + echo "short=$short" >> "$GITHUB_OUTPUT" + echo "Tags: $json" + + # ── Build + publish the package. ────────────────────────────────────────── + publish: + needs: tag + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + + - uses: actions/setup-node@v6 + with: + node-version: ${{ env.NODE_VERSION }} + + - uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + + - run: bun install --frozen-lockfile + + - run: bun run build + + - name: Publish + env: + TOKEN: ${{ secrets.PR_PACKAGE_TOKEN }} + TAGS: ${{ needs.tag.outputs.tags }} + SHORT_SHA: ${{ needs.tag.outputs.short }} + working-directory: packages/${{ env.PKG_DIR }} + run: | + set -euo pipefail + PKG_NAME=$(node -p "require('./package.json').name") + rm -f *.tgz + bun pm pack --destination . + tgz=$(ls *.tgz) + echo "Publishing ${PKG_NAME} (${tgz}) with tags ${TAGS}" + curl -fsSL --show-error -X PUT \ + "https://${PR_PACKAGE_HOST}/projects/${PKG_NAME}/packages" \ + -H "Authorization: Bearer ${TOKEN}" \ + -H "X-Tags: ${TAGS}" \ + -H "Content-Type: application/gzip" \ + --data-binary "@${tgz}" + + INSTALL_URL="https://${PR_PACKAGE_HOST}/${PKG_NAME}/${SHORT_SHA}" + echo "::notice title=${PKG_NAME}::Install: bun add ${INSTALL_URL}" + { + echo "### ${PKG_NAME}" + echo + echo '```sh' + echo "bun add ${INSTALL_URL}" + echo '```' + echo + } >> "$GITHUB_STEP_SUMMARY" + + # ── PR-only: sticky comment with install URL pinned to this commit. ─────── + comment: + needs: [tag, publish] + if: github.event_name == 'pull_request' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + + - name: Comment on PR + env: + GH_TOKEN: ${{ github.token }} + REPO: ${{ github.repository }} + PR_NUMBER: ${{ github.event.pull_request.number }} + SHORT_SHA: ${{ needs.tag.outputs.short }} + run: | + set -euo pipefail + MARKER="" + PKG_NAME=$(node -p "require('./packages/${PKG_DIR}/package.json').name") + + { + echo "$MARKER" + echo + echo "Install the package built from this commit:" + echo + echo "**${PKG_NAME}**" + echo '```sh' + echo "bun add https://${PR_PACKAGE_HOST}/${PKG_NAME}/${SHORT_SHA}" + echo '```' + } > /tmp/body.md + + BODY=$(cat /tmp/body.md) + + existing=$(gh api --paginate \ + "repos/${REPO}/issues/${PR_NUMBER}/comments" \ + --jq ".[] | select(.body | startswith(\"${MARKER}\")) | .id" \ + | head -1) + + if [ -n "$existing" ]; then + gh api -X PATCH "repos/${REPO}/issues/comments/${existing}" -f body="$BODY" + else + gh api -X POST "repos/${REPO}/issues/${PR_NUMBER}/comments" -f body="$BODY" + fi + + # ── main-only: post a commit comment with install URL pinned to this sha. ─ + commit-comment: + needs: [tag, publish] + if: github.event_name == 'push' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + + - name: Comment on commit + env: + GH_TOKEN: ${{ github.token }} + REPO: ${{ github.repository }} + SHA: ${{ github.sha }} + SHORT_SHA: ${{ needs.tag.outputs.short }} + run: | + set -euo pipefail + PKG_NAME=$(node -p "require('./packages/${PKG_DIR}/package.json').name") + { + echo "Package built from this commit (\`${SHORT_SHA}\`):" + echo + echo "**${PKG_NAME}**" + echo '```sh' + echo "bun add https://${PR_PACKAGE_HOST}/${PKG_NAME}/${SHORT_SHA}" + echo '```' + } > /tmp/body.md + + BODY=$(cat /tmp/body.md) + gh api -X POST "repos/${REPO}/commits/${SHA}/comments" -f body="$BODY" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..a575ca1 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,245 @@ +name: Release NPM Packages + +on: + workflow_dispatch: + inputs: + bump: + description: "Bump type" + required: true + type: choice + options: + - patch + - minor + - major + version-override: + description: "Exact version override (e.g. 0.5.0-alpha.0). Leave empty to use bump type." + required: false + type: string + +permissions: + contents: write + id-token: write + +env: + NODE_VERSION: "24" + # Files produced by the bump job and consumed by the commit-and-tag job. + # All publishable packages share one version, so all three package.jsons + # plus bun.lock and CHANGELOG.md get rewritten. + BUMP_ARTIFACT_PATHS: | + packages/cloudflare-rolldown-plugin/package.json + packages/cloudflare-runtime/package.json + packages/cloudflare-vite-plugin/package.json + bun.lock + CHANGELOG.md + +jobs: + # ── Step 1: Compute the next version and stage the bump in-memory. ── + # No git commit: a failed publish leaves no orphan commit behind. The staged + # files ride to commit-and-tag as an artifact. + bump: + runs-on: ubuntu-latest + outputs: + version: ${{ steps.bump.outputs.version }} + sha: ${{ steps.sha.outputs.sha }} + steps: + - uses: actions/checkout@v6 + with: + ref: ${{ github.ref }} + fetch-depth: 0 + + - id: sha + run: echo "sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT" + + - uses: actions/setup-node@v6 + with: + registry-url: https://registry.npmjs.org/ + node-version: ${{ env.NODE_VERSION }} + + - uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + + - uses: actions/cache@v4 + with: + path: ~/.bun/install/cache + key: bun-${{ runner.os }}-${{ hashFiles('bun.lock') }} + restore-keys: | + bun-${{ runner.os }}- + + - run: bun install + + - id: bump + run: | + set -euo pipefail + if [ -n "${{ inputs.version-override }}" ]; then + bun ./scripts/bump.ts "${{ inputs.version-override }}" + VERSION="${{ inputs.version-override }}" + else + bun ./scripts/bump.ts "${{ inputs.bump }}" + VERSION=$(node -p "require('./packages/cloudflare-rolldown-plugin/package.json').version") + fi + echo "Resolved version: $VERSION" + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + + - uses: actions/upload-artifact@v4 + with: + name: bump-files + path: ${{ env.BUMP_ARTIFACT_PATHS }} + if-no-files-found: error + retention-days: 7 + + # ── Step 2: Commit + tag BEFORE publishing. + # Committing first means the publishable source of truth is the tagged commit; + # publish jobs check it out by SHA. Durability: if a previous attempt already + # committed and tagged but failed during npm publish, this job detects HEAD is + # already at the tag and skips the commit/push. + commit-and-tag: + needs: bump + runs-on: ubuntu-latest + outputs: + sha: ${{ steps.commit.outputs.sha }} + steps: + - name: Generate bot token + id: bot-token + uses: actions/create-github-app-token@v1 + with: + app-id: ${{ secrets.ALCHEMY_VERSION_BOT_ID }} + private-key: ${{ secrets.ALCHEMY_VERSION_BOT_PRIVATE_KEY }} + + - uses: actions/checkout@v6 + with: + ref: ${{ github.ref }} + fetch-depth: 0 + token: ${{ steps.bot-token.outputs.token }} + + - uses: actions/download-artifact@v4 + with: + name: bump-files + + - name: Configure git + run: | + git config user.email "alchemy-version-bot[bot]@users.noreply.github.com" + git config user.name "alchemy-version-bot[bot]" + + - id: commit + name: Commit bump + changelog, tag, push + env: + VERSION: ${{ needs.bump.outputs.version }} + run: | + set -euo pipefail + TAG="v${VERSION}" + + # Durability: if HEAD already points at the release tag (resumed run + # after a previous attempt committed+tagged but publish failed), there + # is nothing to do. Use HEAD as the publish SHA. + HEAD_TAG=$(git describe --exact-match --tags HEAD 2>/dev/null || true) + if [ "$HEAD_TAG" = "$TAG" ]; then + echo "HEAD is already at ${TAG}; skipping commit/tag/push" + SHA=$(git rev-parse HEAD) + echo "sha=${SHA}" >> "$GITHUB_OUTPUT" + exit 0 + fi + + git add -A + + if git diff --cached --quiet; then + echo "No changes to commit (already on a release commit?)" + else + git commit -m "chore(release): ${VERSION}" + fi + + if ! git rev-parse --verify "refs/tags/${TAG}" >/dev/null 2>&1; then + git tag -a "${TAG}" -m "Release ${TAG}" + fi + + git push origin HEAD + if git ls-remote --exit-code --tags origin "refs/tags/${TAG}" >/dev/null 2>&1; then + echo "Tag ${TAG} already on remote, skipping tag push" + else + git push origin "refs/tags/${TAG}" + fi + + SHA=$(git rev-parse HEAD) + echo "sha=${SHA}" >> "$GITHUB_OUTPUT" + + publish-node-utils: + needs: [bump, commit-and-tag] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + with: + ref: ${{ needs.commit-and-tag.outputs.sha }} + + - uses: actions/setup-node@v6 + with: + registry-url: https://registry.npmjs.org/ + node-version: ${{ env.NODE_VERSION }} + + - name: Upgrade npm for OIDC trusted publishing + run: | + npm install -g npm@latest && npm --version + sed -i '/always-auth/d' ~/.npmrc 2>/dev/null || true + + - uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + + - run: bun install + + - run: bun run build + + - name: Publish + env: + VERSION: ${{ needs.bump.outputs.version }} + PKG_DIR: cloudflare-rolldown-plugin + run: | + set -euo pipefail + PKG_NAME=$(node -p "require('./packages/${PKG_DIR}/package.json').name") + echo "--- Publishing ${PKG_NAME}@${VERSION} ---" + if npm view "${PKG_NAME}@${VERSION}" version >/dev/null 2>&1; then + echo "${PKG_NAME}@${VERSION} already published, skipping" + exit 0 + fi + cd "packages/${PKG_DIR}" + bun pm pack --destination . + TARBALL=$(ls *.tgz | head -1) + TAG="latest" + if echo "${VERSION}" | grep -q '-'; then TAG="next"; fi + npm publish "${TARBALL}" --access public --tag "${TAG}" + rm -f *.tgz + echo "--- Published ${PKG_NAME}@${VERSION} ---" + + # ── Step 4: Create GitHub Release. ── + # Runs only after every publish job succeeded. Skipped for prerelease versions + # (those containing '-'). + finalize: + needs: [bump, commit-and-tag, publish-node-utils] + if: ${{ !contains(needs.bump.outputs.version, '-') }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + with: + ref: ${{ needs.commit-and-tag.outputs.sha }} + fetch-depth: 0 + + - uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + + - name: Create GitHub Release + env: + VERSION: ${{ needs.bump.outputs.version }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + set -euo pipefail + TAG="v${VERSION}" + if gh release view "${TAG}" >/dev/null 2>&1; then + echo "Release ${TAG} already exists, skipping" + exit 0 + fi + PREV_TAG=$(git describe --tags --abbrev=0 "${TAG}^" 2>/dev/null || echo "") + if [ -n "$PREV_TAG" ]; then + bunx changelogithub --from "$PREV_TAG" --to "${TAG}" + else + bunx changelogithub --to "${TAG}" + fi diff --git a/README.md b/README.md index 9355730..475930a 100644 --- a/README.md +++ b/README.md @@ -1,183 +1,78 @@ -# proper-lockfile +# node-utils -[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Coverage Status][codecov-image]][codecov-url] [![Dependency status][david-dm-image]][david-dm-url] [![Dev Dependency status][david-dm-dev-image]][david-dm-dev-url] +Modern ESM + TypeScript ports of a few classic Node CommonJS modules, vendored into a single zero-dependency package. -[npm-url]:https://npmjs.org/package/proper-lockfile -[downloads-image]:https://img.shields.io/npm/dm/proper-lockfile.svg -[npm-image]:https://img.shields.io/npm/v/proper-lockfile.svg -[travis-url]:https://travis-ci.org/moxystudio/node-proper-lockfile -[travis-image]:https://img.shields.io/travis/moxystudio/node-proper-lockfile/master.svg -[codecov-url]:https://codecov.io/gh/moxystudio/node-proper-lockfile -[codecov-image]:https://img.shields.io/codecov/c/github/moxystudio/node-proper-lockfile/master.svg -[david-dm-url]:https://david-dm.org/moxystudio/node-proper-lockfile -[david-dm-image]:https://img.shields.io/david/moxystudio/node-proper-lockfile.svg -[david-dm-dev-url]:https://david-dm.org/moxystudio/node-proper-lockfile?type=dev -[david-dm-dev-image]:https://img.shields.io/david/dev/moxystudio/node-proper-lockfile.svg +## Why? -An inter-process and inter-machine lockfile utility that works on a local or network file system. +The originals are stable but unmaintained, CJS-only, and pull in `@types/*` and transitive deps for code that's now smaller than its dependency tree. Vendoring + rewriting gives us: +- Pure ESM, native to Node 22+ and Bun. +- Real TypeScript types co-located with the implementation. +- No runtime npm dependencies — only Node built-ins. -## Installation +## What's vendored -`$ npm install proper-lockfile` +All of these live in `packages/node-utils/src/`: +| Original (CJS) | Rewritten as | +| ------------------------------------------------------------------------------------- | ------------------------------------- | +| [`proper-lockfile`](https://github.com/moxystudio/node-proper-lockfile) | `lockfile.ts` (+ `index.ts`) | +| [`retry`](https://github.com/tim-kos/node-retry) | `retry.ts` | +| [`signal-exit`](https://github.com/tapjs/signal-exit) | `exit-hook.ts` | +| [`graceful-fs`](https://github.com/isaacs/node-graceful-fs) sync/async shims | `adapter.ts` | -## Design +## `@alchemy.run/node-utils` -There are various ways to achieve [file locking](http://en.wikipedia.org/wiki/File_locking). +A `proper-lockfile`-equivalent file lock, with `retry`, `signal-exit`, and the `graceful-fs` adapter all bundled in. -This library utilizes the `mkdir` strategy which works atomically on any kind of file system, even network based ones. -The lockfile path is based on the file path you are trying to lock by suffixing it with `.lock`. - -When a lock is successfully acquired, the lockfile's `mtime` (modified time) is periodically updated to prevent staleness. This allows to effectively check if a lock is stale by checking its `mtime` against a stale threshold. If the update of the mtime fails several times, the lock might be compromised. The `mtime` is [supported](http://en.wikipedia.org/wiki/Comparison_of_file_systems) in almost every `filesystem`. - - -### Comparison - -This library is similar to [lockfile](https://github.com/isaacs/lockfile) but the latter has some drawbacks: - -- It relies on `open` with `O_EXCL` flag which has problems in network file systems. `proper-lockfile` uses `mkdir` which doesn't have this issue. - -> O_EXCL is broken on NFS file systems; programs which rely on it for performing locking tasks will contain a race condition. - -- The lockfile staleness check is done via `ctime` (creation time) which is unsuitable for long running processes. `proper-lockfile` constantly updates lockfiles `mtime` to do proper staleness check. - -- It does not check if the lockfile was compromised which can lead to undesirable situations. `proper-lockfile` checks the lockfile when updating the `mtime`. - -- It has a default value of `0` for the stale option which isn't good because any crash or process kill that the package can't handle gracefully will leave the lock active forever. - - -### Compromised - -`proper-lockfile` does not detect cases in which: - -- A `lockfile` is manually removed and someone else acquires the lock right after -- Different `stale`/`update` values are being used for the same file, possibly causing two locks to be acquired on the same file - -`proper-lockfile` detects cases in which: - -- Updates to the `lockfile` fail -- Updates take longer than expected, possibly causing the lock to become stale for a certain amount of time - - -As you see, the first two are a consequence of bad usage. Technically, it was possible to detect the first two but it would introduce complexity and eventual race conditions. - - -## Usage - -### .lock(file, [options]) - -Tries to acquire a lock on `file` or rejects the promise on error. - -If the lock succeeds, a `release` function is provided that should be called when you want to release the lock. The `release` function also rejects the promise on error (e.g. when the lock was already compromised). - -Available options: - -- `stale`: Duration in milliseconds in which the lock is considered stale, defaults to `10000` (minimum value is `5000`) -- `update`: The interval in milliseconds in which the lockfile's `mtime` will be updated, defaults to `stale/2` (minimum value is `1000`, maximum value is `stale/2`) -- `retries`: The number of retries or a [retry](https://www.npmjs.org/package/retry) options object, defaults to `0` -- `realpath`: Resolve symlinks using realpath, defaults to `true` (note that if `true`, the `file` must exist previously) -- `fs`: A custom fs to use, defaults to `graceful-fs` -- `onCompromised`: Called if the lock gets compromised, defaults to a function that simply throws the error which will probably cause the process to die -- `lockfilePath`: Custom lockfile path. e.g.: If you want to lock a directory and create the lock file inside it, you can pass `file` as `` and `options.lockfilePath` as `/dir.lock` - - -```js -const lockfile = require('proper-lockfile'); - -lockfile.lock('some/file') -.then((release) => { - // Do something while the file is locked - - // Call the provided release function when you're done, - // which will also return a promise - return release(); -}) -.catch((e) => { - // either lock could not be acquired - // or releasing it failed - console.error(e) -}); - -// Alternatively, you may use lockfile('some/file') directly. -``` - - -### .unlock(file, [options]) - -Releases a previously acquired lock on `file` or rejects the promise on error. - -Whenever possible you should use the `release` function instead (as exemplified above). Still there are cases in which it's hard to keep a reference to it around code. In those cases `unlock()` might be handy. - -Available options: - -- `realpath`: Resolve symlinks using realpath, defaults to `true` (note that if `true`, the `file` must exist previously) -- `fs`: A custom fs to use, defaults to `graceful-fs` -- `lockfilePath`: Custom lockfile path. e.g.: If you want to lock a directory and create the lock file inside it, you can pass `file` as `` and `options.lockfilePath` as `/dir.lock` - - -```js -const lockfile = require('proper-lockfile'); - -lockfile.lock('some/file') -.then(() => { - // Do something while the file is locked - - // Later.. - return lockfile.unlock('some/file'); -}); +```bash +bun add @alchemy.run/node-utils ``` -### .check(file, [options]) - -Check if the file is locked and its lockfile is not stale, rejects the promise on error. - -Available options: - -- `stale`: Duration in milliseconds in which the lock is considered stale, defaults to `10000` (minimum value is `5000`) -- `realpath`: Resolve symlinks using realpath, defaults to `true` (note that if `true`, the `file` must exist previously) -- `fs`: A custom fs to use, defaults to `graceful-fs` -- `lockfilePath`: Custom lockfile path. e.g.: If you want to lock a directory and create the lock file inside it, you can pass `file` as `` and `options.lockfilePath` as `/dir.lock` +```ts +import { lock, check } from '@alchemy.run/node-utils'; - -```js -const lockfile = require('proper-lockfile'); - -lockfile.check('some/file') -.then((isLocked) => { - // isLocked will be true if 'some/file' is locked, false otherwise -}); +const release = await lock('some/file'); +try { + // ...do work +} finally { + await release(); +} ``` -### .lockSync(file, [options]) - -Sync version of `.lock()`. -Returns the `release` function or throws on error. +Also exported: `unlock`, `lockSync`, `unlockSync`, `checkSync`. -### .unlockSync(file, [options]) +### How it works -Sync version of `.unlock()`. -Throws on error. +Locks are acquired with `mkdir` (atomic on every filesystem, including NFS), and the lockfile's `mtime` is refreshed on an interval so staleness can be detected. If the refresh fails repeatedly, `onCompromised` fires. -### .checkSync(file, [options]) +### Options (`lock`) -Sync version of `.check()`. -Returns a boolean or throws on error. +- `stale` — ms before a lock is considered stale. Default `10000`, min `5000`. +- `update` — ms between `mtime` refreshes. Default `stale / 2`. Min `1000`, max `stale / 2`. +- `retries` — number, or a [retry options object](packages/node-utils/src/retry.ts). Default `0`. +- `realpath` — resolve symlinks. Default `true` (file must exist). +- `fs` — custom fs. Defaults to a `graceful-fs`-style retrying wrapper. +- `onCompromised` — callback. Defaults to throwing. +- `lockfilePath` — override the `.lock` path. +`check` and `unlock` accept the relevant subset. -## Graceful exit +Sync variants (`lockSync` / `unlockSync` / `checkSync`) don't accept `retries`. -`proper-lockfile` automatically removes locks if the process exits, except if the process is killed with SIGKILL or it crashes due to a VM fatal error (e.g.: out of memory). +### Graceful exit +Locks are removed automatically on process exit, except on `SIGKILL` or fatal VM errors. Wired up via the vendored `signal-exit` port. -## Tests - -`$ npm test` -`$ npm test -- --watch` during development - -The test suite is very extensive. There's even a stress test to guarantee exclusiveness of locks. +## Development +```bash +bun install +bun test +bun run build # tsc -b +bun run format # oxfmt . +``` ## License -Released under the [MIT License](https://www.opensource.org/licenses/mit-license.php). +MIT. Vendored sources retain their original MIT licenses; see file headers. diff --git a/bun.lock b/bun.lock index 40fae03..ca7d0c0 100644 --- a/bun.lock +++ b/bun.lock @@ -9,22 +9,13 @@ "typescript": "^6.0.3", }, }, - "packages/exit-hook": { - "name": "@alchemy.run/exit-hook", + "packages/node-utils": { + "name": "@alchemy.run/node-utils", "version": "0.0.0", }, - "packages/lockfile": { - "name": "@alchemy.run/lockfile", - "version": "0.0.0", - "dependencies": { - "@alchemy.run/exit-hook": "workspace:*", - }, - }, }, "packages": { - "@alchemy.run/exit-hook": ["@alchemy.run/exit-hook@workspace:packages/exit-hook"], - - "@alchemy.run/lockfile": ["@alchemy.run/lockfile@workspace:packages/lockfile"], + "@alchemy.run/node-utils": ["@alchemy.run/node-utils@workspace:packages/node-utils"], "@types/bun": ["@types/bun@1.3.14", "", { "dependencies": { "bun-types": "1.3.14" } }, "sha512-h1hFqFVcvAvD9j9K7ZW7vd82aSA+rTdznZa+5bwvCwqSB1jmmfLcbIWhOLx1/+boy/xmjgCs/OMUL8hRJSmnPw=="], diff --git a/package.json b/package.json index 062fff3..7c0dcf5 100644 --- a/package.json +++ b/package.json @@ -18,5 +18,8 @@ "devDependencies": { "@types/bun": "^1.3.14", "typescript": "^6.0.3" + }, + "publishConfig": { + "access": "public" } } \ No newline at end of file diff --git a/packages/node-utils/package.json b/packages/node-utils/package.json index e0a7118..e665eb2 100644 --- a/packages/node-utils/package.json +++ b/packages/node-utils/package.json @@ -1,5 +1,5 @@ { - "name": "@alchemy.run/lockfile", + "name": "@alchemy.run/node-utils", "version": "0.0.0", "author": "Sam Goodwin (sam@alchemy.run)", "repository": { diff --git a/scripts/bump.ts b/scripts/bump.ts new file mode 100644 index 0000000..770ffcc --- /dev/null +++ b/scripts/bump.ts @@ -0,0 +1,334 @@ +import { execSync } from "node:child_process"; +import { existsSync } from "node:fs"; +import { readFile, readdir, writeFile } from "node:fs/promises"; +import { join } from "node:path"; + +const REPO = "alchemy-run/node-utils"; +const PACKAGES_DIR = join(process.cwd(), "packages"); + +async function checkNpmVersion( + packageName: string, + version: string, +): Promise { + try { + const response = await fetch( + `https://registry.npmjs.org/${packageName}/${version}`, + ); + return response.ok; + } catch { + return false; + } +} + +async function checkGithubTag(version: string): Promise { + try { + const response = await fetch( + `https://api.github.com/repos/${REPO}/git/refs/tags/v${version}`, + ); + return response.ok; + } catch { + return false; + } +} + +async function getPackageDirs(): Promise> { + const entries = await readdir(PACKAGES_DIR, { withFileTypes: true }); + const dirs: Array = []; + for (const entry of entries) { + if (entry.isDirectory()) { + const pkgJsonPath = join(PACKAGES_DIR, entry.name, "package.json"); + try { + await readFile(pkgJsonPath, "utf-8"); + dirs.push(entry.name); + } catch { + // not a package directory, skip + } + } + } + return dirs; +} + +// --- Main --- + +const versionInput = process.argv[2]; + +if (!versionInput) { + console.error( + "Please provide a version number or bump type (major, minor, patch)", + ); + process.exit(1); +} + +const rootPackageJsonPath = join(PACKAGES_DIR, "node-utils", "package.json"); +const rootPackageJson = JSON.parse( + await readFile(rootPackageJsonPath, "utf-8"), +); + +let newVersion = ""; + +if (["major", "minor", "patch"].includes(versionInput)) { + const currentVersion = rootPackageJson.version; + const versionMatch = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)/); + + if (!versionMatch) { + console.error(`Invalid current version format: ${currentVersion}`); + process.exit(1); + } + + const [, major, minor, patch] = versionMatch.map(Number); + + switch (versionInput) { + case "major": + newVersion = `${major + 1}.0.0`; + break; + case "minor": + newVersion = `${major}.${minor + 1}.0`; + break; + case "patch": + newVersion = `${major}.${minor}.${patch + 1}`; + break; + default: + throw new Error(`Invalid bump type: ${versionInput}`); + } + + console.log( + `Bumping ${versionInput} version: ${currentVersion} -> ${newVersion}`, + ); +} else { + if (!/^\d+\.\d+\.\d+(-[\w.]+)?$/.test(versionInput)) { + console.error( + "Version must be in format x.y.z or x.y.z-pre or use 'major', 'minor', 'patch'", + ); + process.exit(1); + } + + newVersion = versionInput; + console.log(`Setting specific version: ${newVersion}`); +} + +// Check if version already exists on npm (check rolldown plugin as representative) +const npmExists = await checkNpmVersion(rootPackageJson.name, newVersion); +if (npmExists) { + console.error(`Version ${newVersion} already exists on npm`); + process.exit(1); +} + +const githubTagExists = await checkGithubTag(newVersion); +if (githubTagExists) { + console.error(`Tag v${newVersion} already exists on GitHub`); + process.exit(1); +} + +// Update version in ALL packages and collect their npm names +const packageDirs = await getPackageDirs(); +const packageNames: Set = new Set(); + +for (const dir of packageDirs) { + const pkgJsonPath = join(PACKAGES_DIR, dir, "package.json"); + const pkgJson = JSON.parse(await readFile(pkgJsonPath, "utf-8")); + if (pkgJson.private) { + console.log(` Skipped ${pkgJson.name} (private)`); + continue; + } + pkgJson.version = newVersion; + await writeFile(pkgJsonPath, `${JSON.stringify(pkgJson, null, 2)}\n`); + packageNames.add(pkgJson.name); + console.log(` Updated ${pkgJson.name} to ${newVersion}`); +} + +// Update workspace versions in bun.lock so bun pm pack resolves workspace:* correctly. +// Only target entries whose "name" matches one of our workspace packages. +// The lockfile format has "name": "..." immediately before "version": "..." for each +// workspace entry, so we match that pair specifically. +const lockfilePath = join(process.cwd(), "bun.lock"); +try { + const lockfile = await readFile(lockfilePath, "utf-8"); + const lines = lockfile.split("\n"); + let updated = false; + + for (let i = 1; i < lines.length; i++) { + const versionMatch = lines[i].match(/^(\s*"version": ")[^"]*(".*)/); + if (!versionMatch) continue; + + // Look at the previous line for a "name" field matching one of our packages + const nameMatch = lines[i - 1].match(/"name": "([^"]*)"/); + if (!nameMatch || !packageNames.has(nameMatch[1])) continue; + + lines[i] = `${versionMatch[1]}${newVersion}${versionMatch[2]}`; + updated = true; + console.log(` Updated ${nameMatch[1]} version in bun.lock`); + } + + if (updated) { + await writeFile(lockfilePath, lines.join("\n")); + } +} catch { + // bun.lock may not exist (e.g. fresh clone before install) + console.log(` Skipped bun.lock update (file not found)`); +} + +// --- Update CHANGELOG.md --- + +/** + * Parse conventional commits since the last git tag and generate a changelog + * entry matching the format used in CHANGELOG.md (mirroring changelogithub output). + */ +function generateChangelogEntry(version: string): string { + // Find the previous tag + let prevTag = ""; + try { + prevTag = execSync("git describe --tags --abbrev=0 HEAD", { + encoding: "utf-8", + }).trim(); + } catch { + // No previous tag — include all commits + } + + // Get commits since last tag (or all commits if no tag) + const range = prevTag ? `${prevTag}...HEAD` : "HEAD"; + let logOutput = ""; + try { + logOutput = execSync(`git log ${range} --no-merges --format="%H %s" -- .`, { + encoding: "utf-8", + }).trim(); + } catch { + // git log failed — empty changelog + } + + if (!logOutput) { + const date = new Date().toISOString().slice(0, 10); + return `## [v${version}](https://github.com/${REPO}/releases/tag/v${version}) (${date})\n\n*No significant changes*\n`; + } + + const features: Array = []; + const fixes: Array = []; + + for (const line of logOutput.split("\n")) { + if (!line.trim()) continue; + + const spaceIdx = line.indexOf(" "); + const hash = line.slice(0, spaceIdx); + const shortHash = hash.slice(0, 7); + const subject = line.slice(spaceIdx + 1); + + // Skip release commits + if (subject.startsWith("chore(release)")) continue; + // Skip non-feat/fix commits (chore, ci, docs, etc.) + if ( + !subject.startsWith("feat") && + !subject.startsWith("fix") && + !subject.startsWith("perf") + ) + continue; + + // Parse conventional commit: type(scope): description (#PR) + const match = subject.match(/^(feat|fix|perf)(?:\(([^)]*)\))?\s*:\s*(.+)$/); + if (!match) continue; + + const [, type, scope, description] = match; + + // Extract PR number if present + const prMatch = description.match(/\(#(\d+)\)\s*$/); + const prNum = prMatch ? prMatch[1] : null; + const descClean = prMatch + ? description.slice(0, prMatch.index).trim() + : description.trim(); + + // Extract author from git log + let author = ""; + try { + author = execSync(`git log -1 --format="%an" ${hash}`, { + encoding: "utf-8", + }).trim(); + } catch { + // skip author + } + + // Build the line + let entry = scope ? `- **${scope}**: ${descClean}` : `- ${descClean}`; + + if (author) { + // Try to get GitHub username from commit + let ghUser = ""; + try { + const email = execSync(`git log -1 --format="%ae" ${hash}`, { + encoding: "utf-8", + }).trim(); + // GitHub noreply emails have the format: user@users.noreply.github.com or ID+user@... + const noreplyMatch = email.match( + /(?:\d+\+)?([^@]+)@users\.noreply\.github\.com/, + ); + if (noreplyMatch) { + ghUser = noreplyMatch[1]; + } + } catch { + // skip + } + + if (ghUser) { + entry += ` - by @${ghUser}`; + } else { + entry += ` - by ${author}`; + } + } + + if (prNum) { + entry += ` in [#${prNum}](https://github.com/${REPO}/pull/${prNum})`; + } + + entry += ` [(${shortHash})](https://github.com/${REPO}/commit/${hash})`; + + if (type === "feat") { + features.push(entry); + } else { + fixes.push(entry); + } + } + + const date = new Date().toISOString().slice(0, 10); + const parts: Array = [ + `## [v${version}](https://github.com/${REPO}/releases/tag/v${version}) (${date})`, + ]; + + if (features.length === 0 && fixes.length === 0) { + parts.push("", "*No significant changes*"); + } else { + if (features.length > 0) { + parts.push("", "### Features", "", ...features); + } + if (fixes.length > 0) { + parts.push("", "### Bug Fixes", "", ...fixes); + } + } + + return parts.join("\n") + "\n"; +} + +const changelogPath = join(process.cwd(), "CHANGELOG.md"); +const changelogEntry = generateChangelogEntry(newVersion); + +try { + if (existsSync(changelogPath)) { + const existing = await readFile(changelogPath, "utf-8"); + // Insert new entry after the "# Changelog" header + const headerEnd = existing.indexOf("\n\n"); + if (headerEnd !== -1) { + const header = existing.slice(0, headerEnd); + const rest = existing.slice(headerEnd + 2); + await writeFile(changelogPath, `${header}\n\n${changelogEntry}\n${rest}`); + } else { + // No double newline found — just prepend after first line + const firstNewline = existing.indexOf("\n"); + const header = existing.slice(0, firstNewline); + const rest = existing.slice(firstNewline + 1); + await writeFile(changelogPath, `${header}\n\n${changelogEntry}\n${rest}`); + } + } else { + await writeFile(changelogPath, `# Changelog\n\n${changelogEntry}`); + } + console.log(` Updated CHANGELOG.md`); +} catch (err) { + console.error(` Failed to update CHANGELOG.md: ${err}`); +} + +console.log(`\nUpdated all packages to version ${newVersion}`);