diff --git a/.github/workflows/publish.reusable.yml b/.github/workflows/publish.reusable.yml index 3ebf9216d..e93848579 100644 --- a/.github/workflows/publish.reusable.yml +++ b/.github/workflows/publish.reusable.yml @@ -33,15 +33,23 @@ jobs: exit 1 fi + - name: Generate Packages (deprecated) + id: generate-packages-deprecated + run: node packages/@postgrestools/postgrestools/scripts/generate-packages.mjs + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + RELEASE_TAG: ${{ inputs.release-tag }} + PRERELEASE: ${{ inputs.is-prerelease }} + - name: Generate Packages id: generate-packages - run: node packages/@postgrestools/postgrestools/scripts/generate-packages.mjs + run: node packages/@postgres-language-server/cli/scripts/generate-packages.mjs env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} RELEASE_TAG: ${{ inputs.release-tag }} PRERELEASE: ${{ inputs.is-prerelease }} - - name: Publish npm packages as nightly + - name: Publish npm packages as nightly (deprecated) if: inputs.is-prerelease == 'true' run: | for package in packages/@postgrestools/*; do @@ -50,7 +58,16 @@ jobs: env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} # - - name: Publish npm packages as latest + - name: Publish npm packages as nightly + if: inputs.is-prerelease == 'true' + run: | + for package in packages/@postgres-language-server/*; do + npm publish "$package" --tag nightly --access public --provenance + done + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} # + + - name: Publish npm packages as latest (deprecated) if: inputs.is-prerelease != 'true' run: | for package in packages/@postgrestools/*; do @@ -66,3 +83,20 @@ jobs: done env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + + - name: Publish npm packages as latest + if: inputs.is-prerelease != 'true' + run: | + for package in packages/@postgres-language-server/*; do + version="${{ inputs.release-tag }}" + npm_package_name=$(jq -r ".name" "$package/package.json") + + if npm view "$npm_package_name@$version" version 2>/dev/null; then + echo "Package $npm_package_name@$version already exists, skipping..." + else + echo "Publishing $npm_package_name@$version..." + npm publish "$package" --tag latest --access public --provenance + fi + done + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7ccc53055..f113c6ae2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -47,9 +47,6 @@ jobs: runs-on: ${{ matrix.config.os }} - outputs: - artifact_url: ${{ steps.upload-artifacts.outputs.artifact-url }} - steps: - uses: actions/checkout@v4 with: @@ -96,17 +93,22 @@ jobs: if: matrix.config.os == 'windows-2022' run: | mkdir dist + # deprecated cp target/${{ matrix.config.target }}/release/postgrestools.exe ./dist/postgrestools_${{ matrix.config.target }} + cp target/${{ matrix.config.target }}/release/postgres-language-server.exe ./dist/postgres-language-server_${{ matrix.config.target }} + - name: 👦 Name the Binary if: matrix.config.os != 'windows-2022' run: | mkdir dist + # deprecated cp target/${{ matrix.config.target }}/release/postgrestools ./dist/postgrestools_${{ matrix.config.target }} + cp target/${{ matrix.config.target }}/release/postgres-language-server ./dist/postgres-language-server_${{ matrix.config.target }} # It is not possible to return the artifacts from the matrix jobs individually: Matrix outputs overwrite each other. # A common workaround is to upload and download the resulting artifacts. - - name: 👆 Upload Artifacts - id: upload-artifacts + - name: 👆 Upload Artifacts (deprecated) + id: upload-artifacts-deprecated uses: actions/upload-artifact@v4 with: name: postgrestools_${{ matrix.config.target }} @@ -117,6 +119,20 @@ jobs: compression-level: 2 if-no-files-found: error + # It is not possible to return the artifacts from the matrix jobs individually: Matrix outputs overwrite each other. + # A common workaround is to upload and download the resulting artifacts. + - name: 👆 Upload Artifacts + id: upload-artifacts + uses: actions/upload-artifact@v4 + with: + name: postgres-language-server_${{ matrix.config.target }} + path: ./dist/postgres-language-server_* + # The default compression level is 6; this took the binary down from 350 to 330MB. + # It is recommended to use a lower level for binaries, since the compressed result is not much smaller, + # and the higher levels of compression take much longer. + compression-level: 2 + if-no-files-found: error + create_changelog_and_release: runs-on: ubuntu-latest needs: [extract_version, build_and_test] # make sure that tests & build work correctly @@ -142,12 +158,19 @@ jobs: echo "Tag does not match: ${{ steps.create_changelog.outputs.version }} vs ${{ needs.extract_version.outputs.version }}" exit 1 + - name: 👇 Download Artifacts (deprecated) + uses: actions/download-artifact@v4 + id: download-deprecated + with: + merge-multiple: true + pattern: postgrestools_* + - name: 👇 Download Artifacts uses: actions/download-artifact@v4 id: download with: merge-multiple: true - pattern: postgrestools_* + pattern: postgres-language-server_* - name: 📂 Create Release uses: softprops/action-gh-release@v2 diff --git a/crates/pgt_cli/Cargo.toml b/crates/pgt_cli/Cargo.toml index 6c39107d5..fb20036fe 100644 --- a/crates/pgt_cli/Cargo.toml +++ b/crates/pgt_cli/Cargo.toml @@ -61,3 +61,7 @@ doctest = false [[bin]] name = "postgrestools" path = "src/main.rs" + +[[bin]] +name = "postgres-language-server" +path = "src/main.rs" diff --git a/packages/@postgres-language-server/backend-jsonrpc/package.json b/packages/@postgres-language-server/backend-jsonrpc/package.json new file mode 100644 index 000000000..293e8430f --- /dev/null +++ b/packages/@postgres-language-server/backend-jsonrpc/package.json @@ -0,0 +1,32 @@ +{ + "name": "@postgres-language-server/backend-jsonrpc", + "version": "", + "main": "dist/index.js", + "scripts": { + "test": "bun test", + "test:ci": "bun build && bun test", + "build": "bun build ./src/index.ts --outdir ./dist --target node" + }, + "files": ["dist/", "README.md"], + "repository": { + "type": "git", + "url": "git+https://github.com/supabase-community/postgres-language-server.git", + "directory": "packages/@postgres-language-server/backend-jsonrpc" + }, + "author": "Supabase Community", + "bugs": "ttps://github.com/supabase-community/postgres-language-server/issues", + "description": "Bindings to the JSON-RPC Workspace API of the Postgres Language Tools daemon", + "keywords": ["TypeScript", "Postgres"], + "license": "MIT", + "publishConfig": { + "provenance": true + }, + "optionalDependencies": { + "@postgres-language-server/cli-win32-x64": "", + "@postgres-language-server/cli-win32-arm64": "", + "@postgres-language-server/cli-darwin-x64": "", + "@postgres-language-server/cli-darwin-arm64": "", + "@postgres-language-server/cli-linux-x64": "", + "@postgres-language-server/cli-linux-arm64": "" + } +} diff --git a/packages/@postgres-language-server/backend-jsonrpc/src/command.ts b/packages/@postgres-language-server/backend-jsonrpc/src/command.ts new file mode 100644 index 000000000..ed5f9a155 --- /dev/null +++ b/packages/@postgres-language-server/backend-jsonrpc/src/command.ts @@ -0,0 +1,74 @@ +import { execSync } from "node:child_process"; + +/** + * Gets the path of the binary for the current platform + * + * @returns Filesystem path to the binary, or null if no prebuilt distribution exists for the current platform + */ +export function getCommand(): string | null { + const { platform, arch } = process; + + const PLATFORMS: Partial< + Record< + NodeJS.Platform | "linux-musl", + Partial> + > + > = { + win32: { + x64: "@postgres-language-server/cli-x86_64-windows-msvc/postgres-language-server.exe", + arm64: "@postgres-language-server/cli-aarch64-windows-msvc/postgres-language-server.exe", + }, + darwin: { + x64: "@postgres-language-server/cli-x86_64-apple-darwin/postgres-language-server", + arm64: "@postgres-language-server/cli-aarch64-apple-darwin/postgres-language-server", + }, + linux: { + x64: "@postgres-language-server/cli-x86_64-linux-gnu/postgres-language-server", + arm64: "@postgres-language-server/cli-aarch64-linux-gnu/postgres-language-server", + }, + "linux-musl": { + x64: "@postgres-language-server/cli-x86_64-linux-musl/postgres-language-server", + // no arm64 build for musl + }, + }; + + function isMusl() { + let stderr = ""; + try { + stderr = execSync("ldd --version", { + stdio: [ + "ignore", // stdin + "pipe", // stdout – glibc systems print here + "pipe", // stderr – musl systems print here + ], + }).toString(); + } catch (err: unknown) { + if (hasStdErr(err)) { + stderr = err.stderr; + } + } + if (stderr.indexOf("musl") > -1) { + return true; + } + return false; + } + + function getPlatform(): NodeJS.Platform | "linux-musl" { + if (platform === "linux") { + return isMusl() ? "linux-musl" : "linux"; + } + + return platform; + } + + const binPath = PLATFORMS?.[getPlatform()]?.[arch]; + if (!binPath) { + return null; + } + + return require.resolve(binPath); +} + +function hasStdErr(err: unknown): err is { stderr: string } { + return !!(err as any)?.stderr; +} diff --git a/packages/@postgres-language-server/backend-jsonrpc/src/index.ts b/packages/@postgres-language-server/backend-jsonrpc/src/index.ts new file mode 100644 index 000000000..f0f11151f --- /dev/null +++ b/packages/@postgres-language-server/backend-jsonrpc/src/index.ts @@ -0,0 +1,46 @@ +import { getCommand } from "./command"; +import { createSocket } from "./socket"; +import { Transport } from "./transport"; +import { type Workspace, createWorkspace as wrapTransport } from "./workspace"; + +/** + * Create an instance of the Workspace client connected to a remote daemon + * instance through the JSON-RPC protocol + * + * @returns A Workspace client, or null if the underlying platform is not supported + */ +export async function createWorkspace(): Promise { + const command = getCommand(); + if (!command) { + return null; + } + + return createWorkspaceWithBinary(command); +} + +/** + * Create an instance of the Workspace client connected to a remote daemon + * instance through the JSON-RPC protocol, using the provided command to spawn + * the daemon if necessary + * + * @param command Path to the binary + * @returns A Workspace client, or null if the underlying platform is not supported + */ +export async function createWorkspaceWithBinary( + command: string, +): Promise { + const socket = await createSocket(command); + const transport = new Transport(socket); + + await transport.request("initialize", { + capabilities: {}, + client_info: { + name: "@postgres-language-server/backend-jsonrpc", + version: "0.0.0", + }, + }); + + return wrapTransport(transport); +} + +export * from "./workspace"; diff --git a/packages/@postgres-language-server/backend-jsonrpc/src/socket.ts b/packages/@postgres-language-server/backend-jsonrpc/src/socket.ts new file mode 100644 index 000000000..6fd2902f9 --- /dev/null +++ b/packages/@postgres-language-server/backend-jsonrpc/src/socket.ts @@ -0,0 +1,47 @@ +import { spawn } from "node:child_process"; +import { type Socket, connect } from "node:net"; + +function getSocket(command: string): Promise { + return new Promise((resolve, reject) => { + const process = spawn(command, ["__print_socket"], { + stdio: "pipe", + }); + + process.on("error", reject); + + let pipeName = ""; + process.stdout.on("data", (data) => { + pipeName += data.toString("utf-8"); + }); + + process.on("exit", (code) => { + if (code === 0) { + resolve(pipeName.trimEnd()); + } else { + reject( + new Error( + `Command '${command} __print_socket' exited with code ${code}`, + ), + ); + } + }); + }); +} + +/** + * Ensure the daemon server is running and create a Socket connected to the RPC channel + * + * @param command Path to the daemon binary + * @returns Socket instance connected to the daemon + */ +export async function createSocket(command: string): Promise { + const path = await getSocket(command); + const socket = connect(path); + + await new Promise((resolve, reject) => { + socket.once("error", reject); + socket.once("ready", resolve); + }); + + return socket; +} diff --git a/packages/@postgres-language-server/backend-jsonrpc/src/transport.ts b/packages/@postgres-language-server/backend-jsonrpc/src/transport.ts new file mode 100644 index 000000000..b1cdad445 --- /dev/null +++ b/packages/@postgres-language-server/backend-jsonrpc/src/transport.ts @@ -0,0 +1,293 @@ +interface Socket { + on(event: "data", fn: (data: Buffer) => void): void; + write(data: Buffer): void; + destroy(): void; +} + +enum ReaderStateKind { + Header = 0, + Body = 1, +} + +interface ReaderStateHeader { + readonly kind: ReaderStateKind.Header; + contentLength?: number; + contentType?: string; +} + +interface ReaderStateBody { + readonly kind: ReaderStateKind.Body; + readonly contentLength: number; + readonly contentType?: string; +} + +type ReaderState = ReaderStateHeader | ReaderStateBody; + +interface JsonRpcRequest { + jsonrpc: "2.0"; + id: number; + method: string; + params: unknown; +} + +function isJsonRpcRequest(message: JsonRpcMessage): message is JsonRpcRequest { + return ( + "id" in message && + typeof message.id === "number" && + "method" in message && + typeof message.method === "string" && + "params" in message + ); +} + +interface JsonRpcNotification { + jsonrpc: "2.0"; + method: string; + params: unknown; +} + +function isJsonRpcNotification( + message: JsonRpcMessage, +): message is JsonRpcNotification { + return ( + !("id" in message) && + "method" in message && + typeof message.method === "string" && + "params" in message + ); +} + +type JsonRpcResponse = + | { + jsonrpc: "2.0"; + id: number; + result: unknown; + } + | { + jsonrpc: "2.0"; + id: number; + error: unknown; + }; + +function isJsonRpcResponse( + message: JsonRpcMessage, +): message is JsonRpcResponse { + return ( + "id" in message && + typeof message.id === "number" && + !("method" in message) && + ("result" in message || "error" in message) + ); +} + +type JsonRpcMessage = JsonRpcRequest | JsonRpcNotification | JsonRpcResponse; + +function isJsonRpcMessage(message: unknown): message is JsonRpcMessage { + return ( + typeof message === "object" && + message !== null && + "jsonrpc" in message && + message.jsonrpc === "2.0" + ); +} + +interface PendingRequest { + resolve(result: unknown): void; + reject(error: unknown): void; +} + +const MIME_JSONRPC = "application/vscode-jsonrpc"; + +/** + * Implements the daemon server JSON-RPC protocol over a Socket instance + */ +export class Transport { + /** + * Counter incremented for each outgoing request to generate a unique ID + */ + private nextRequestId = 0; + + /** + * Storage for the promise resolver functions of pending requests, + * keyed by ID of the request + */ + private pendingRequests: Map = new Map(); + + constructor(private socket: Socket) { + socket.on("data", (data) => { + this.processIncoming(data); + }); + } + + /** + * Send a request to the remote server + * + * @param method Name of the remote method to call + * @param params Parameters object the remote method should be called with + * @return Promise resolving with the value returned by the remote method, or rejecting with an RPC error if the remote call failed + */ + // biome-ignore lint/suspicious/noExplicitAny: if i change it to Promise typescript breaks + request(method: string, params: unknown): Promise { + return new Promise((resolve, reject) => { + const id = this.nextRequestId++; + this.pendingRequests.set(id, { resolve, reject }); + this.sendMessage({ + jsonrpc: "2.0", + id, + method, + params, + }); + }); + } + + /** + * Send a notification message to the remote server + * + * @param method Name of the remote method to call + * @param params Parameters object the remote method should be called with + */ + notify(method: string, params: unknown) { + this.sendMessage({ + jsonrpc: "2.0", + method, + params, + }); + } + + /** + * Destroy the internal socket instance for this Transport + */ + destroy() { + this.socket.destroy(); + } + + private sendMessage(message: JsonRpcMessage) { + const body = Buffer.from(JSON.stringify(message)); + const headers = Buffer.from( + `Content-Length: ${body.length}\r\nContent-Type: ${MIME_JSONRPC};charset=utf-8\r\n\r\n`, + ); + this.socket.write(Buffer.concat([headers, body])); + } + + private pendingData = Buffer.from(""); + private readerState: ReaderState = { + kind: ReaderStateKind.Header, + }; + + private processIncoming(data: Buffer) { + this.pendingData = Buffer.concat([this.pendingData, data]); + + while (this.pendingData.length > 0) { + if (this.readerState.kind === ReaderStateKind.Header) { + const lineBreakIndex = this.pendingData.indexOf("\n"); + if (lineBreakIndex < 0) { + break; + } + + const header = this.pendingData.subarray(0, lineBreakIndex + 1); + this.pendingData = this.pendingData.subarray(lineBreakIndex + 1); + this.processIncomingHeader(this.readerState, header.toString("utf-8")); + } else if (this.pendingData.length >= this.readerState.contentLength) { + const body = this.pendingData.subarray( + 0, + this.readerState.contentLength, + ); + this.pendingData = this.pendingData.subarray( + this.readerState.contentLength, + ); + this.processIncomingBody(body); + + this.readerState = { + kind: ReaderStateKind.Header, + }; + } else { + break; + } + } + } + + private processIncomingHeader(readerState: ReaderStateHeader, line: string) { + if (line === "\r\n") { + const { contentLength, contentType } = readerState; + if (typeof contentLength !== "number") { + throw new Error( + "incoming message from the remote workspace is missing the Content-Length header", + ); + } + + this.readerState = { + kind: ReaderStateKind.Body, + contentLength, + contentType, + }; + return; + } + + const colonIndex = line.indexOf(":"); + if (colonIndex < 0) { + throw new Error(`could not find colon token in "${line}"`); + } + + const headerName = line.substring(0, colonIndex); + const headerValue = line.substring(colonIndex + 1).trim(); + + switch (headerName) { + case "Content-Length": { + const value = Number.parseInt(headerValue); + readerState.contentLength = value; + break; + } + case "Content-Type": { + if (!headerValue.startsWith(MIME_JSONRPC)) { + throw new Error( + `invalid value for Content-Type expected "${MIME_JSONRPC}", got "${headerValue}"`, + ); + } + + readerState.contentType = headerValue; + break; + } + default: + console.warn(`ignoring unknown header "${headerName}"`); + } + } + + private processIncomingBody(buffer: Buffer) { + const data = buffer.toString("utf-8"); + const body = JSON.parse(data); + + if (isJsonRpcMessage(body)) { + if (isJsonRpcRequest(body)) { + // TODO: Not implemented at the moment + return; + } + + if (isJsonRpcNotification(body)) { + // TODO: Not implemented at the moment + return; + } + + if (isJsonRpcResponse(body)) { + const pendingRequest = this.pendingRequests.get(body.id); + if (pendingRequest) { + this.pendingRequests.delete(body.id); + const { resolve, reject } = pendingRequest; + if ("result" in body) { + resolve(body.result); + } else { + reject(body.error); + } + } else { + throw new Error( + `could not find any pending request matching RPC response ID ${body.id}`, + ); + } + return; + } + } + + throw new Error( + `failed to deserialize incoming message from remote workspace, "${data}" is not a valid JSON-RPC message body`, + ); + } +} diff --git a/packages/@postgres-language-server/backend-jsonrpc/src/workspace.ts b/packages/@postgres-language-server/backend-jsonrpc/src/workspace.ts new file mode 100644 index 000000000..24ea5d503 --- /dev/null +++ b/packages/@postgres-language-server/backend-jsonrpc/src/workspace.ts @@ -0,0 +1,636 @@ +// Generated file, do not edit by hand, see `xtask/codegen` +import type { Transport } from "./transport"; +export interface IsPathIgnoredParams { + pgt_path: PgTPath; +} +export interface PgTPath { + /** + * Determines the kind of the file inside Postgres Tools. Some files are considered as configuration files, others as manifest files, and others as files to handle + */ + kind: FileKind; + path: string; + /** + * Whether this path (usually a file) was fixed as a result of a format/lint/check command with the `--write` filag. + */ + was_written: boolean; +} +export type FileKind = FileKind2[]; +/** + * The priority of the file + */ +export type FileKind2 = "Config" | "Ignore" | "Inspectable" | "Handleable"; +export interface RegisterProjectFolderParams { + path?: string; + setAsCurrentWorkspace: boolean; +} +export type ProjectKey = string; +export interface GetFileContentParams { + path: PgTPath; +} +export interface PullDiagnosticsParams { + categories: RuleCategories; + max_diagnostics: number; + only: RuleCode[]; + path: PgTPath; + skip: RuleCode[]; +} +export type RuleCategories = RuleCategory[]; +export type RuleCode = string; +export type RuleCategory = "Lint" | "Action" | "Transformation"; +export interface PullDiagnosticsResult { + diagnostics: Diagnostic[]; + errors: number; + skipped_diagnostics: number; +} +/** + * Serializable representation for a [Diagnostic](super::Diagnostic). + */ +export interface Diagnostic { + advices: Advices; + category?: Category; + description: string; + location: Location; + message: MarkupBuf; + severity: Severity; + source?: Diagnostic; + tags: DiagnosticTags; + verboseAdvices: Advices; +} +/** + * Implementation of [Visitor] collecting serializable [Advice] into a vector. + */ +export interface Advices { + advices: Advice[]; +} +export type Category = + | "lint/safety/addingFieldWithDefault" + | "lint/safety/addingForeignKeyConstraint" + | "lint/safety/addingNotNullField" + | "lint/safety/addingPrimaryKeyConstraint" + | "lint/safety/addingRequiredField" + | "lint/safety/banCharField" + | "lint/safety/banConcurrentIndexCreationInTransaction" + | "lint/safety/banDropColumn" + | "lint/safety/banDropDatabase" + | "lint/safety/banDropNotNull" + | "lint/safety/banDropTable" + | "lint/safety/banTruncateCascade" + | "lint/safety/changingColumnType" + | "lint/safety/constraintMissingNotValid" + | "lint/safety/disallowUniqueConstraint" + | "lint/safety/preferBigInt" + | "lint/safety/preferBigintOverInt" + | "lint/safety/preferBigintOverSmallint" + | "lint/safety/preferIdentity" + | "lint/safety/preferJsonb" + | "lint/safety/preferRobustStmts" + | "lint/safety/preferTextField" + | "lint/safety/preferTimestamptz" + | "lint/safety/renamingColumn" + | "lint/safety/renamingTable" + | "lint/safety/requireConcurrentIndexCreation" + | "lint/safety/requireConcurrentIndexDeletion" + | "lint/safety/transactionNesting" + | "stdin" + | "check" + | "configuration" + | "database/connection" + | "internalError/io" + | "internalError/runtime" + | "internalError/fs" + | "flags/invalid" + | "project" + | "typecheck" + | "plpgsql_check" + | "internalError/panic" + | "syntax" + | "dummy" + | "lint" + | "lint/performance" + | "lint/safety"; +export interface Location { + path?: Resource_for_String; + sourceCode?: string; + span?: TextRange; +} +export type MarkupBuf = MarkupNodeBuf[]; +/** + * The severity to associate to a diagnostic. + */ +export type Severity = "hint" | "information" | "warning" | "error" | "fatal"; +export type DiagnosticTags = DiagnosticTag[]; +/** + * Serializable representation of a [Diagnostic](super::Diagnostic) advice + +See the [Visitor] trait for additional documentation on all the supported advice types. + */ +export type Advice = + | { log: [LogCategory, MarkupBuf] } + | { list: MarkupBuf[] } + | { frame: Location } + | { diff: TextEdit } + | { backtrace: [MarkupBuf, Backtrace] } + | { command: string } + | { group: [MarkupBuf, Advices] }; +/** + * Represents the resource a diagnostic is associated with. + */ +export type Resource_for_String = "argv" | "memory" | { file: string }; +export type TextRange = [TextSize, TextSize]; +export interface MarkupNodeBuf { + content: string; + elements: MarkupElement[]; +} +/** + * Internal enum used to automatically generate bit offsets for [DiagnosticTags] and help with the implementation of `serde` and `schemars` for tags. + */ +export type DiagnosticTag = + | "fixable" + | "internal" + | "unnecessaryCode" + | "deprecatedCode" + | "verbose"; +/** + * The category for a log advice, defines how the message should be presented to the user. + */ +export type LogCategory = "none" | "info" | "warn" | "error"; +export interface TextEdit { + dictionary: string; + ops: CompressedOp[]; +} +export type Backtrace = BacktraceFrame[]; +export type TextSize = number; +/** + * Enumeration of all the supported markup elements + */ +export type MarkupElement = + | "Emphasis" + | "Dim" + | "Italic" + | "Underline" + | "Error" + | "Success" + | "Warn" + | "Info" + | "Debug" + | "Trace" + | "Inverse" + | { Hyperlink: { href: string } }; +export type CompressedOp = + | { diffOp: DiffOp } + | { equalLines: { line_count: number } }; +/** + * Serializable representation of a backtrace frame. + */ +export interface BacktraceFrame { + ip: number; + symbols: BacktraceSymbol[]; +} +export type DiffOp = + | { equal: { range: TextRange } } + | { insert: { range: TextRange } } + | { delete: { range: TextRange } }; +/** + * Serializable representation of a backtrace frame symbol. + */ +export interface BacktraceSymbol { + colno?: number; + filename?: string; + lineno?: number; + name?: string; +} +export interface GetCompletionsParams { + /** + * The File for which a completion is requested. + */ + path: PgTPath; + /** + * The Cursor position in the file for which a completion is requested. + */ + position: TextSize; +} +export interface CompletionsResult { + items: CompletionItem[]; +} +export interface CompletionItem { + completion_text?: CompletionText; + description: string; + detail?: string; + kind: CompletionItemKind; + label: string; + preselected: boolean; + /** + * String used for sorting by LSP clients. + */ + sort_text: string; +} +/** + * The text that the editor should fill in. If `None`, the `label` should be used. Tables, for example, might have different completion_texts: + +label: "users", description: "Schema: auth", completion_text: "auth.users". + */ +export interface CompletionText { + is_snippet: boolean; + /** + * A `range` is required because some editors replace the current token, others naively insert the text. Having a range where start == end makes it an insertion. + */ + range: TextRange; + text: string; +} +export type CompletionItemKind = + | "table" + | "function" + | "column" + | "schema" + | "policy" + | "role"; +export interface UpdateSettingsParams { + configuration: PartialConfiguration; + gitignore_matches: string[]; + vcs_base_path?: string; + workspace_directory?: string; +} +/** + * The configuration that is contained inside the configuration file. + */ +export interface PartialConfiguration { + /** + * A field for the [JSON schema](https://json-schema.org/) specification + */ + $schema?: string; + /** + * The configuration of the database connection + */ + db?: PartialDatabaseConfiguration; + /** + * A list of paths to other JSON files, used to extends the current configuration. + */ + extends?: StringSet; + /** + * The configuration of the filesystem + */ + files?: PartialFilesConfiguration; + /** + * The configuration for the linter + */ + linter?: PartialLinterConfiguration; + /** + * Configure migrations + */ + migrations?: PartialMigrationsConfiguration; + /** + * The configuration for type checking + */ + plpgsqlCheck?: PartialPlPgSqlCheckConfiguration; + /** + * The configuration for type checking + */ + typecheck?: PartialTypecheckConfiguration; + /** + * The configuration of the VCS integration + */ + vcs?: PartialVcsConfiguration; +} +/** + * The configuration of the database connection. + */ +export interface PartialDatabaseConfiguration { + allowStatementExecutionsAgainst?: StringSet; + /** + * The connection timeout in seconds. + */ + connTimeoutSecs?: number; + /** + * The name of the database. + */ + database?: string; + /** + * The host of the database. Required if you want database-related features. All else falls back to sensible defaults. + */ + host?: string; + /** + * The password to connect to the database. + */ + password?: string; + /** + * The port of the database. + */ + port?: number; + /** + * The username to connect to the database. + */ + username?: string; +} +export type StringSet = string[]; +/** + * The configuration of the filesystem + */ +export interface PartialFilesConfiguration { + /** + * A list of Unix shell style patterns. Will ignore files/folders that will match these patterns. + */ + ignore?: StringSet; + /** + * A list of Unix shell style patterns. Will handle only those files/folders that will match these patterns. + */ + include?: StringSet; + /** + * The maximum allowed size for source code files in bytes. Files above this limit will be ignored for performance reasons. Defaults to 1 MiB + */ + maxSize?: number; +} +export interface PartialLinterConfiguration { + /** + * if `false`, it disables the feature and the linter won't be executed. `true` by default + */ + enabled?: boolean; + /** + * A list of Unix shell style patterns. The formatter will ignore files/folders that will match these patterns. + */ + ignore?: StringSet; + /** + * A list of Unix shell style patterns. The formatter will include files/folders that will match these patterns. + */ + include?: StringSet; + /** + * List of rules + */ + rules?: Rules; +} +/** + * The configuration of the filesystem + */ +export interface PartialMigrationsConfiguration { + /** + * Ignore any migrations before this timestamp + */ + after?: number; + /** + * The directory where the migration files are stored + */ + migrationsDir?: string; +} +/** + * The configuration for type checking. + */ +export interface PartialPlPgSqlCheckConfiguration { + /** + * if `false`, it disables the feature and pglpgsql_check won't be executed. `true` by default + */ + enabled?: boolean; +} +/** + * The configuration for type checking. + */ +export interface PartialTypecheckConfiguration { + /** + * if `false`, it disables the feature and the typechecker won't be executed. `true` by default + */ + enabled?: boolean; + /** + * Default search path schemas for type checking. Can be a list of schema names or glob patterns like ["public", "app_*"]. If not specified, defaults to ["public"]. + */ + searchPath?: StringSet; +} +/** + * Set of properties to integrate with a VCS software. + */ +export interface PartialVcsConfiguration { + /** + * The kind of client. + */ + clientKind?: VcsClientKind; + /** + * The main branch of the project + */ + defaultBranch?: string; + /** + * Whether we should integrate itself with the VCS client + */ + enabled?: boolean; + /** + * The folder where we should check for VCS files. By default, we will use the same folder where `postgres-language-server.jsonc` was found. + +If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted + */ + root?: string; + /** + * Whether we should use the VCS ignore file. When [true], we will ignore the files specified in the ignore file. + */ + useIgnoreFile?: boolean; +} +export interface Rules { + /** + * It enables ALL rules. The rules that belong to `nursery` won't be enabled. + */ + all?: boolean; + /** + * It enables the lint rules recommended by Postgres Tools. `true` by default. + */ + recommended?: boolean; + safety?: Safety; +} +export type VcsClientKind = "git"; +/** + * A list of rules that belong to this group + */ +export interface Safety { + /** + * Adding a column with a DEFAULT value may lead to a table rewrite while holding an ACCESS EXCLUSIVE lock. + */ + addingFieldWithDefault?: RuleConfiguration_for_Null; + /** + * Adding a foreign key constraint requires a table scan and a SHARE ROW EXCLUSIVE lock on both tables, which blocks writes. + */ + addingForeignKeyConstraint?: RuleConfiguration_for_Null; + /** + * Setting a column NOT NULL blocks reads while the table is scanned. + */ + addingNotNullField?: RuleConfiguration_for_Null; + /** + * Adding a primary key constraint results in locks and table rewrites. + */ + addingPrimaryKeyConstraint?: RuleConfiguration_for_Null; + /** + * Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required. + */ + addingRequiredField?: RuleConfiguration_for_Null; + /** + * It enables ALL rules for this group. + */ + all?: boolean; + /** + * Using CHAR(n) or CHARACTER(n) types is discouraged. + */ + banCharField?: RuleConfiguration_for_Null; + /** + * Concurrent index creation is not allowed within a transaction. + */ + banConcurrentIndexCreationInTransaction?: RuleConfiguration_for_Null; + /** + * Dropping a column may break existing clients. + */ + banDropColumn?: RuleConfiguration_for_Null; + /** + * Dropping a database may break existing clients (and everything else, really). + */ + banDropDatabase?: RuleConfiguration_for_Null; + /** + * Dropping a NOT NULL constraint may break existing clients. + */ + banDropNotNull?: RuleConfiguration_for_Null; + /** + * Dropping a table may break existing clients. + */ + banDropTable?: RuleConfiguration_for_Null; + /** + * Using TRUNCATE's CASCADE option will truncate any tables that are also foreign-keyed to the specified tables. + */ + banTruncateCascade?: RuleConfiguration_for_Null; + /** + * Changing a column type may break existing clients. + */ + changingColumnType?: RuleConfiguration_for_Null; + /** + * Adding constraints without NOT VALID blocks all reads and writes. + */ + constraintMissingNotValid?: RuleConfiguration_for_Null; + /** + * Disallow adding a UNIQUE constraint without using an existing index. + */ + disallowUniqueConstraint?: RuleConfiguration_for_Null; + /** + * Prefer BIGINT over smaller integer types. + */ + preferBigInt?: RuleConfiguration_for_Null; + /** + * Prefer BIGINT over INT/INTEGER types. + */ + preferBigintOverInt?: RuleConfiguration_for_Null; + /** + * Prefer BIGINT over SMALLINT types. + */ + preferBigintOverSmallint?: RuleConfiguration_for_Null; + /** + * Prefer using IDENTITY columns over serial columns. + */ + preferIdentity?: RuleConfiguration_for_Null; + /** + * Prefer JSONB over JSON types. + */ + preferJsonb?: RuleConfiguration_for_Null; + /** + * Prefer statements with guards for robustness in migrations. + */ + preferRobustStmts?: RuleConfiguration_for_Null; + /** + * Prefer using TEXT over VARCHAR(n) types. + */ + preferTextField?: RuleConfiguration_for_Null; + /** + * Prefer TIMESTAMPTZ over TIMESTAMP types. + */ + preferTimestamptz?: RuleConfiguration_for_Null; + /** + * It enables the recommended rules for this group + */ + recommended?: boolean; + /** + * Renaming columns may break existing queries and application code. + */ + renamingColumn?: RuleConfiguration_for_Null; + /** + * Renaming tables may break existing queries and application code. + */ + renamingTable?: RuleConfiguration_for_Null; + /** + * Creating indexes non-concurrently can lock the table for writes. + */ + requireConcurrentIndexCreation?: RuleConfiguration_for_Null; + /** + * Dropping indexes non-concurrently can lock the table for reads. + */ + requireConcurrentIndexDeletion?: RuleConfiguration_for_Null; + /** + * Detects problematic transaction nesting that could lead to unexpected behavior. + */ + transactionNesting?: RuleConfiguration_for_Null; +} +export type RuleConfiguration_for_Null = + | RulePlainConfiguration + | RuleWithOptions_for_Null; +export type RulePlainConfiguration = "warn" | "error" | "info" | "off"; +export interface RuleWithOptions_for_Null { + /** + * The severity of the emitted diagnostics by the rule + */ + level: RulePlainConfiguration; + /** + * Rule's options + */ + options: null; +} +export interface OpenFileParams { + content: string; + path: PgTPath; + version: number; +} +export interface ChangeFileParams { + content: string; + path: PgTPath; + version: number; +} +export interface CloseFileParams { + path: PgTPath; +} +export type Configuration = PartialConfiguration; +export interface Workspace { + isPathIgnored(params: IsPathIgnoredParams): Promise; + registerProjectFolder( + params: RegisterProjectFolderParams, + ): Promise; + getFileContent(params: GetFileContentParams): Promise; + pullDiagnostics( + params: PullDiagnosticsParams, + ): Promise; + getCompletions(params: GetCompletionsParams): Promise; + updateSettings(params: UpdateSettingsParams): Promise; + openFile(params: OpenFileParams): Promise; + changeFile(params: ChangeFileParams): Promise; + closeFile(params: CloseFileParams): Promise; + destroy(): void; +} +export function createWorkspace(transport: Transport): Workspace { + return { + isPathIgnored(params) { + return transport.request("pgt/is_path_ignored", params); + }, + registerProjectFolder(params) { + return transport.request("pgt/register_project_folder", params); + }, + getFileContent(params) { + return transport.request("pgt/get_file_content", params); + }, + pullDiagnostics(params) { + return transport.request("pgt/pull_diagnostics", params); + }, + getCompletions(params) { + return transport.request("pgt/get_completions", params); + }, + updateSettings(params) { + return transport.request("pgt/update_settings", params); + }, + openFile(params) { + return transport.request("pgt/open_file", params); + }, + changeFile(params) { + return transport.request("pgt/change_file", params); + }, + closeFile(params) { + return transport.request("pgt/close_file", params); + }, + destroy() { + transport.destroy(); + }, + }; +} diff --git a/packages/@postgres-language-server/backend-jsonrpc/tests/transport.test.mjs b/packages/@postgres-language-server/backend-jsonrpc/tests/transport.test.mjs new file mode 100644 index 000000000..32a103eea --- /dev/null +++ b/packages/@postgres-language-server/backend-jsonrpc/tests/transport.test.mjs @@ -0,0 +1,160 @@ +import { describe, expect, it, mock } from "bun:test"; + +import { Transport } from "../src/transport"; + +function makeMessage(body) { + const content = JSON.stringify(body); + return Buffer.from( + `Content-Length: ${content.length}\r\nContent-Type: application/vscode-jsonrpc;charset=utf-8\r\n\r\n${content}`, + ); +} + +describe("Transport Layer", () => { + it("should encode requests into the socket", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + const result = transport.request("method", "params"); + + expect(socket.write).toHaveBeenCalledWith( + makeMessage({ + jsonrpc: "2.0", + id: 0, + method: "method", + params: "params", + }), + ); + + onData( + makeMessage({ + jsonrpc: "2.0", + id: 0, + result: "result", + }), + ); + + const response = await result; + expect(response).toBe("result"); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); + + it("should throw on missing Content-Length headers", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + expect(() => onData(Buffer.from("\r\n"))).toThrowError( + "incoming message from the remote workspace is missing the Content-Length header", + ); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); + + it("should throw on missing colon token", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + expect(() => onData(Buffer.from("Content-Length\r\n"))).toThrowError( + 'could not find colon token in "Content-Length\r\n"', + ); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); + + it("should throw on invalid Content-Type", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + expect(() => + onData(Buffer.from("Content-Type: text/plain\r\n")), + ).toThrowError( + 'invalid value for Content-Type expected "application/vscode-jsonrpc", got "text/plain"', + ); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); + + it("should throw on unknown request ID", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + expect(() => + onData(makeMessage({ jsonrpc: "2.0", id: 0, result: "result" })), + ).toThrowError( + "could not find any pending request matching RPC response ID 0", + ); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); + + it("should throw on invalid messages", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + expect(() => onData(makeMessage({}))).toThrowError( + 'failed to deserialize incoming message from remote workspace, "{}" is not a valid JSON-RPC message body', + ); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); +}); diff --git a/packages/@postgres-language-server/backend-jsonrpc/tests/workspace.test.mjs b/packages/@postgres-language-server/backend-jsonrpc/tests/workspace.test.mjs new file mode 100644 index 000000000..6f586d8d9 --- /dev/null +++ b/packages/@postgres-language-server/backend-jsonrpc/tests/workspace.test.mjs @@ -0,0 +1,57 @@ +import { resolve } from "node:path"; +import { fileURLToPath } from "node:url"; +import { describe, expect, it } from "vitest"; + +import { createWorkspaceWithBinary } from "../src"; + +describe("Workspace API", () => { + it("should process remote requests", async () => { + const extension = process.platform === "win32" ? ".exe" : ""; + const command = resolve( + fileURLToPath(import.meta.url), + "../../../../..", + `target/release/postgres-language-server${extension}`, + ); + + const workspace = await createWorkspaceWithBinary(command); + workspace.registerProjectFolder({ + setAsCurrentWorkspace: true, + }); + await workspace.openFile({ + path: { + path: "test.sql", + was_written: false, + kind: ["Handleable"], + }, + content: "select 1 from", + version: 0, + }); + + const { diagnostics } = await workspace.pullDiagnostics({ + only: [], + skip: [], + max_diagnostics: 100, + categories: [], + path: { + path: "test.sql", + was_written: false, + kind: ["Handleable"], + }, + }); + + expect(diagnostics).toHaveLength(1); + expect(diagnostics[0].description).toBe( + "Invalid statement: syntax error at end of input", + ); + + await workspace.closeFile({ + path: { + path: "test.sql", + was_written: false, + kind: ["Handleable"], + }, + }); + + workspace.destroy(); + }); +}); diff --git a/packages/@postgres-language-server/cli/bin/postgres-language-server b/packages/@postgres-language-server/cli/bin/postgres-language-server new file mode 100755 index 000000000..ed898fb64 --- /dev/null +++ b/packages/@postgres-language-server/cli/bin/postgres-language-server @@ -0,0 +1,73 @@ +#!/usr/bin/env node +const { platform, arch, env } = process; + +const PLATFORMS = { + win32: { + x64: "@postgres-language-server/cli-x86_64-windows-msvc/postgres-language-server.exe", + arm64: "@postgres-language-server/cli-aarch64-windows-msvc/postgres-language-server.exe", + }, + darwin: { + x64: "@postgres-language-server/cli-x86_64-apple-darwin/postgres-language-server", + arm64: "@postgres-language-server/cli-aarch64-apple-darwin/postgres-language-server", + }, + linux: { + x64: "@postgres-language-server/cli-x86_64-linux-gnu/postgres-language-server", + arm64: "@postgres-language-server/cli-aarch64-linux-gnu/postgres-language-server", + }, + "linux-musl": { + x64: "@postgres-language-server/cli-x86_64-linux-musl/postgres-language-server", + // no arm64 build for musl + }, +}; + +function isMusl() { + let stdout; + try { + stdout = execSync("ldd --version", { + stdio: [ + "ignore", // stdin + "pipe", // stdout – glibc systems print here + "pipe", // stderr – musl systems print here + ], + }); + } catch (err) { + stdout = err.stderr; + } + if (typeof stdout === 'string' && stdout.indexOf("musl") > -1) { + return true; + } + return false; +} + +function getPlatform() { + if (platform === "linux") { + return isMusl() ? "linux-musl" : "linux"; + } + + return platform; +} + +const binPath = env.PGLS_BINARY || PLATFORMS?.[getPlatform()]?.[arch]; + +if (binPath) { + const result = require("child_process").spawnSync( + require.resolve(binPath), + process.argv.slice(2), + { + shell: false, + stdio: "inherit", + env, + } + ); + + if (result.error) { + throw result.error; + } + + process.exitCode = result.status; +} else { + console.error( + "The Postgres Language Server CLI package doesn't ship with prebuilt binaries for your platform yet. Please file an issue in the main repository." + ); + process.exitCode = 1; +} diff --git a/packages/@postgres-language-server/cli/package.json b/packages/@postgres-language-server/cli/package.json new file mode 100644 index 000000000..fe9b64535 --- /dev/null +++ b/packages/@postgres-language-server/cli/package.json @@ -0,0 +1,48 @@ +{ + "name": "@postgres-language-server/cli", + "version": "", + "bin": { + "postgres-language-server": "bin/postgres-language-server" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/supabase-community/postgres-language-server.git", + "directory": "packages/@postgres-language-server/cli" + }, + "author": "Supabase Community", + "contributors": [ + { + "name": "Philipp Steinrötter", + "url": "https://github.com/psteinroe" + }, + { + "name": "Julian Domke", + "url": "https://github.com/juleswritescode" + } + ], + "license": "MIT or Apache-2.0", + "description": "A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling.", + "files": [ + "bin/postgres-language-server", + "schema.json", + "README.md" + ], + "engines": { + "node": ">=20" + }, + "publishConfig": { + "provenance": true + }, + "optionalDependencies": { + "@postgres-language-server/cli-x86_64-windows-msvc": "", + "@postgres-language-server/cli-aarch64-windows-msvc": "", + "@postgres-language-server/cli-x86_64-apple-darwin": "", + "@postgres-language-server/cli-aarch64-apple-darwin": "", + "@postgres-language-server/cli-x86_64-linux-gnu": "", + "@postgres-language-server/cli-aarch64-linux-gnu": "", + "@postgres-language-server/cli-x86_64-linux-musl": "" + }, + "scripts": { + "test": "bun test" + } +} diff --git a/packages/@postgres-language-server/cli/scripts/generate-packages.mjs b/packages/@postgres-language-server/cli/scripts/generate-packages.mjs new file mode 100644 index 000000000..440304025 --- /dev/null +++ b/packages/@postgres-language-server/cli/scripts/generate-packages.mjs @@ -0,0 +1,293 @@ +import assert from "node:assert"; +import * as fs from "node:fs"; +import { pipeline } from "node:stream"; +import { resolve } from "node:path"; +import { fileURLToPath } from "node:url"; +import { promisify } from "node:util"; +const streamPipeline = promisify(pipeline); + +const CLI_ROOT = resolve(fileURLToPath(import.meta.url), "../.."); +const PACKAGES_PGLS_ROOT = resolve(CLI_ROOT, ".."); +const PGLS_ROOT = resolve(PACKAGES_PGLS_ROOT, "../.."); +const MANIFEST_PATH = resolve(CLI_ROOT, "package.json"); + +function platformArchCombinations() { + const SUPPORTED_PLATFORMS = [ + "pc-windows-msvc", + "apple-darwin", + "unknown-linux-gnu", + "unknown-linux-musl", + ]; + + const SUPPORTED_ARCHITECTURES = ["x86_64", "aarch64"]; + + return SUPPORTED_PLATFORMS.flatMap((platform) => { + return SUPPORTED_ARCHITECTURES.flatMap((arch) => { + // we do not support MUSL builds on aarch64, as this would + // require difficult cross compilation and most aarch64 users should + // have sufficiently modern glibc versions + if (platform.endsWith("musl") && arch === "aarch64") { + return []; + } + + return { + platform, + arch, + }; + }); + }); +} + +async function downloadSchema(releaseTag, githubToken) { + const assetUrl = `https://github.com/supabase-community/postgres-language-server/releases/download/${releaseTag}/schema.json`; + + const response = await fetch(assetUrl.trim(), { + headers: { + Authorization: `token ${githubToken}`, + Accept: "application/octet-stream", + }, + }); + + if (!response.ok) { + throw new Error(`Failed to Fetch Asset from ${assetUrl}`); + } + + // download to root. + const fileStream = fs.createWriteStream( + resolve(PGLS_ROOT, "schema.json") + ); + + await streamPipeline(response.body, fileStream); + + console.log(`Downloaded schema for ${releaseTag}`); +} + +async function downloadBinary(platform, arch, os, releaseTag, githubToken) { + const buildName = getBuildName(platform, arch); + + const assetUrl = `https://github.com/supabase-community/postgres-language-server/releases/download/${releaseTag}/${buildName}`; + + const response = await fetch(assetUrl.trim(), { + headers: { + Authorization: `token ${githubToken}`, + Accept: "application/octet-stream", + }, + }); + + if (!response.ok) { + const error = await response.text(); + throw new Error( + `Failed to Fetch Asset from ${assetUrl} (Reason: ${error})` + ); + } + + // just download to root. + const fileStream = fs.createWriteStream( + getBinarySource(platform, arch, os) + ); + + await streamPipeline(response.body, fileStream); + + console.log(`Downloaded asset for ${buildName} (v${releaseTag})`); +} + +async function writeManifest(packagePath, version) { + const manifestPath = resolve( + PACKAGES_PGLS_ROOT, + packagePath, + "package.json" + ); + + const manifestData = JSON.parse( + fs.readFileSync(manifestPath).toString("utf-8") + ); + + const nativePackages = platformArchCombinations().map( + ({ platform, arch }) => [getPackageName(platform, arch), version] + ); + + manifestData.version = version; + manifestData.optionalDependencies = Object.fromEntries(nativePackages); + + console.log(`Update manifest ${manifestPath}`); + const content = JSON.stringify(manifestData, null, 2); + + /** + * writeFileSync seemed to not work reliably? + */ + await new Promise((res, rej) => { + fs.writeFile(manifestPath, content, (e) => (e ? rej(e) : res())); + }); +} + +async function makePackageDir(platform, arch) { + const buildName = getBuildName(platform, arch); + const packageRoot = resolve(PACKAGES_PGLS_ROOT, buildName); + + await new Promise((res, rej) => { + fs.mkdir(packageRoot, {}, (e) => (e ? rej(e) : res())); + }); +} + +function copyBinaryToNativePackage(platform, arch, os) { + // Update the package.json manifest + const buildName = getBuildName(platform, arch); + const packageRoot = resolve(PACKAGES_PGLS_ROOT, buildName); + const packageName = getPackageName(platform, arch); + + const { version, license, repository, engines } = rootManifest(); + + /** + * We need to map rust triplets to NPM-known values. + * Otherwise, npm will abort the package installation. + */ + const npm_arch = arch === "aarch64" ? "arm64" : "x64"; + let libc = undefined; + let npm_os = undefined; + + switch (os) { + case "linux": { + libc = platform.endsWith("musl") ? "musl" : "gnu"; + npm_os = "linux"; + break; + } + case "windows": { + libc = "msvc"; + npm_os = "win32"; + break; + } + case "darwin": { + libc = undefined; + npm_os = "darwin"; + break; + } + default: { + throw new Error(`Unsupported os: ${os}`); + } + } + + const manifest = JSON.stringify( + { + name: packageName, + version, + license, + repository, + engines, + os: [npm_os], + cpu: [npm_arch], + libc, + }, + null, + 2 + ); + + const ext = getBinaryExt(os); + const manifestPath = resolve(packageRoot, "package.json"); + console.info(`Update manifest ${manifestPath}`); + fs.writeFileSync(manifestPath, manifest); + + // Copy the CLI binary + const binarySource = getBinarySource(platform, arch, os); + const binaryTarget = resolve(packageRoot, `postgres-language-server${ext}`); + + if (!fs.existsSync(binarySource)) { + console.error( + `Source for binary for ${buildName} not found at: ${binarySource}` + ); + process.exit(1); + } + + console.info(`Copy binary ${binaryTarget}`); + fs.copyFileSync(binarySource, binaryTarget); + fs.chmodSync(binaryTarget, 0o755); +} + +function copySchemaToNativePackage(platform, arch) { + const buildName = getBuildName(platform, arch); + const packageRoot = resolve(PACKAGES_PGLS_ROOT, buildName); + + const schemaSrc = resolve(PGLS_ROOT, "schema.json"); + const schemaTarget = resolve(packageRoot, "schema.json"); + + if (!fs.existsSync(schemaSrc)) { + console.error(`schema.json not found at: ${schemaSrc}`); + process.exit(1); + } + + console.info("Copying schema.json"); + fs.copyFileSync(schemaSrc, schemaTarget); + fs.chmodSync(schemaTarget, 0o666); +} + +function copyReadmeToPackage(packagePath) { + const packageRoot = resolve(PACKAGES_PGLS_ROOT, packagePath); + const readmeSrc = resolve(PGLS_ROOT, "README.md"); + const readmeTarget = resolve(packageRoot, "README.md"); + + if (!fs.existsSync(readmeSrc)) { + console.error(`README.md not found at: ${readmeSrc}`); + process.exit(1); + } + + console.info(`Copying README.md to ${packagePath}`); + fs.copyFileSync(readmeSrc, readmeTarget); + fs.chmodSync(readmeTarget, 0o666); +} + +const rootManifest = () => + JSON.parse(fs.readFileSync(MANIFEST_PATH).toString("utf-8")); + +function getBinaryExt(os) { + return os === "windows" ? ".exe" : ""; +} + +function getBinarySource(platform, arch, os) { + const ext = getBinaryExt(os); + return resolve(PGLS_ROOT, `${getBuildName(platform, arch)}${ext}`); +} + +function getBuildName(platform, arch) { + return `postgres-language-server_${arch}-${platform}`; +} + +function getPackageName(platform, arch) { + // trim the "unknown" from linux and the "pc" from windows + const platformName = platform.split("-").slice(-2).join("-"); + return `@postgres-language-server/cli-${arch}-${platformName}`; +} + +function getOs(platform) { + return platform.split("-").find((_, idx) => idx === 1); +} + +function getVersion(releaseTag, isPrerelease) { + return releaseTag + (isPrerelease ? "-rc" : ""); +} + +(async function main() { + const githubToken = process.env.GITHUB_TOKEN; + const releaseTag = process.env.RELEASE_TAG; + assert(githubToken, "GITHUB_TOKEN not defined!"); + assert(releaseTag, "RELEASE_TAG not defined!"); + + const isPrerelease = process.env.PRERELEASE === "true"; + + await downloadSchema(releaseTag, githubToken); + const version = getVersion(releaseTag, isPrerelease); + await writeManifest("cli", version); + await writeManifest("backend-jsonrpc", version); + + // Copy README to main packages + copyReadmeToPackage("cli"); + copyReadmeToPackage("backend-jsonrpc"); + + for (const { platform, arch } of platformArchCombinations()) { + const os = getOs(platform); + await makePackageDir(platform, arch); + await downloadBinary(platform, arch, os, releaseTag, githubToken); + copyBinaryToNativePackage(platform, arch, os); + copySchemaToNativePackage(platform, arch); + } + + process.exit(0); +})(); diff --git a/packages/@postgres-language-server/cli/test/bin.test.js b/packages/@postgres-language-server/cli/test/bin.test.js new file mode 100644 index 000000000..921869e53 --- /dev/null +++ b/packages/@postgres-language-server/cli/test/bin.test.js @@ -0,0 +1,62 @@ +import { describe, it, expect } from "bun:test"; +import { spawn } from "child_process"; +import { join, dirname } from "path"; +import { fileURLToPath } from "url"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const binPath = join(__dirname, "../bin/postgres-language-server"); +const testSqlPath = join(__dirname, "test.sql"); + +describe("postgres-language-server bin", () => { + + it("should check a SQL file successfully", async () => { + const result = await new Promise((resolve) => { + const proc = spawn("node", [binPath, "check", testSqlPath], { + env: { ...process.env }, + }); + + let stdout = ""; + let stderr = ""; + + proc.stdout.on("data", (data) => { + stdout += data.toString(); + }); + + proc.stderr.on("data", (data) => { + stderr += data.toString(); + }); + + proc.on("close", (code) => { + resolve({ code, stdout, stderr }); + }); + }); + + expect(result.code).toBe(0); + expect(result.stderr).toBe(""); + }); + + it("should fail when file doesn't exist", async () => { + const result = await new Promise((resolve) => { + const proc = spawn("node", [binPath, "check", "nonexistent.sql"], { + env: { ...process.env }, + }); + + let stdout = ""; + let stderr = ""; + + proc.stdout.on("data", (data) => { + stdout += data.toString(); + }); + + proc.stderr.on("data", (data) => { + stderr += data.toString(); + }); + + proc.on("close", (code) => { + resolve({ code, stdout, stderr }); + }); + }); + + expect(result.code).not.toBe(0); + }); +}); diff --git a/packages/@postgres-language-server/cli/test/test.sql b/packages/@postgres-language-server/cli/test/test.sql new file mode 100644 index 000000000..9e13a3eff --- /dev/null +++ b/packages/@postgres-language-server/cli/test/test.sql @@ -0,0 +1 @@ +select 1; \ No newline at end of file diff --git a/packages/@postgrestools/postgrestools/package.json b/packages/@postgrestools/postgrestools/package.json index 247ba4157..22450371e 100644 --- a/packages/@postgrestools/postgrestools/package.json +++ b/packages/@postgrestools/postgrestools/package.json @@ -24,7 +24,8 @@ "description": "A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling.", "files": [ "bin/postgrestools", - "schema.json" + "schema.json", + "README.md" ], "engines": { "node": ">=20" diff --git a/packages/@postgrestools/postgrestools/scripts/generate-packages.mjs b/packages/@postgrestools/postgrestools/scripts/generate-packages.mjs index 883ba6fd9..75865c291 100644 --- a/packages/@postgrestools/postgrestools/scripts/generate-packages.mjs +++ b/packages/@postgrestools/postgrestools/scripts/generate-packages.mjs @@ -219,6 +219,35 @@ function copySchemaToNativePackage(platform, arch) { fs.chmodSync(schemaTarget, 0o666); } +function copyReadmeToPackage(packagePath) { + const packageRoot = resolve(PACKAGES_POSTGRESTOOLS_ROOT, packagePath); + const readmeSrc = resolve(POSTGRESTOOLS_ROOT, "README.md"); + const readmeTarget = resolve(packageRoot, "README.md"); + + if (!fs.existsSync(readmeSrc)) { + console.error(`README.md not found at: ${readmeSrc}`); + process.exit(1); + } + + console.info(`Copying README.md to ${packagePath}`); + + // Read the original README content + const originalReadme = fs.readFileSync(readmeSrc, 'utf-8'); + + // Add deprecation notice for @postgrestools packages + const deprecationNotice = `> [!WARNING] +> **This package is deprecated.** Please use [\`@postgres-language-server/cli\`](https://www.npmjs.com/package/@postgres-language-server/cli) instead. +> +> The \`@postgrestools\` namespace is being phased out in favor of \`@postgres-language-server\`. All future updates and development will happen in the new package. + +`; + + const modifiedReadme = deprecationNotice + originalReadme; + + fs.writeFileSync(readmeTarget, modifiedReadme, 'utf-8'); + fs.chmodSync(readmeTarget, 0o666); +} + const rootManifest = () => JSON.parse(fs.readFileSync(MANIFEST_PATH).toString("utf-8")); @@ -262,6 +291,10 @@ function getVersion(releaseTag, isPrerelease) { await writeManifest("postgrestools", version); await writeManifest("backend-jsonrpc", version); + // Copy README to main packages + copyReadmeToPackage("postgrestools"); + copyReadmeToPackage("backend-jsonrpc"); + for (const { platform, arch } of platformArchCombinations()) { const os = getOs(platform); await makePackageDir(platform, arch);