From 654610ea099979de8028fc9e19a9dd17887ffa3b Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 17:56:47 +0300 Subject: [PATCH 01/36] chore(repo): configure TypeScript project references --- packages/core/postgrest-js/test/db/docker-compose.yml | 2 +- packages/core/postgrest-js/test/transforms.test.ts | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/core/postgrest-js/test/db/docker-compose.yml b/packages/core/postgrest-js/test/db/docker-compose.yml index c69a68e2e..285733de0 100644 --- a/packages/core/postgrest-js/test/db/docker-compose.yml +++ b/packages/core/postgrest-js/test/db/docker-compose.yml @@ -29,7 +29,7 @@ services: depends_on: - db db: - image: supabase/postgres:15.1.0.37 + image: postgis/postgis:15-3.4-alpine ports: - '5432:5432' volumes: diff --git a/packages/core/postgrest-js/test/transforms.test.ts b/packages/core/postgrest-js/test/transforms.test.ts index 0294b6746..f775ff2e8 100644 --- a/packages/core/postgrest-js/test/transforms.test.ts +++ b/packages/core/postgrest-js/test/transforms.test.ts @@ -405,7 +405,6 @@ test('explain with options', async () => { "Plan": Any, "Query Identifier": Any, "Settings": Object { - "effective_cache_size": "128MB", "search_path": "\\"public\\", \\"extensions\\"", }, }, From 533ab0666e77d8158a87e2ac990b4efa48174bdb Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 14:09:23 +0300 Subject: [PATCH 02/36] chore(repo): configure TypeScript project references --- package-lock.json | 8 ++++++++ packages/core/functions-js/jest.config.ts | 5 +++++ packages/core/functions-js/tsconfig.test.json | 9 +++++++++ 3 files changed, 22 insertions(+) create mode 100644 packages/core/functions-js/tsconfig.test.json diff --git a/package-lock.json b/package-lock.json index fd5c568a7..60a27b84c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -37209,6 +37209,14 @@ "version": "4.0.0", "dev": true, "license": "ISC" + }, + "packages/utils/fetch": { + "name": "@supabase/utils-fetch", + "version": "0.0.1", + "extraneous": true, + "dependencies": { + "tslib": "^2.3.0" + } } } } diff --git a/packages/core/functions-js/jest.config.ts b/packages/core/functions-js/jest.config.ts index 12e789a6d..1ce27bfe2 100644 --- a/packages/core/functions-js/jest.config.ts +++ b/packages/core/functions-js/jest.config.ts @@ -39,5 +39,10 @@ const config: Config.InitialOptions = { statements: 0, }, }, + globals: { + 'ts-jest': { + tsconfig: 'tsconfig.test.json', + }, + }, } export default config diff --git a/packages/core/functions-js/tsconfig.test.json b/packages/core/functions-js/tsconfig.test.json new file mode 100644 index 000000000..24980bda7 --- /dev/null +++ b/packages/core/functions-js/tsconfig.test.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "include": ["src", "test"], + "compilerOptions": { + "composite": false, + "outDir": "dist/test", + "rootDir": "." + } +} From 3fa5101709ad59ca761b9fc6031295d27fdfece3 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 9 Oct 2025 13:25:32 +0300 Subject: [PATCH 03/36] feat(repo): new integrations library --- jest.config.ts | 6 + jest.preset.js | 3 + nx.json | 3 +- package-lock.json | 271 ++++++++---------- package.json | 8 +- .../storage-vectors-js/.eslintrc.json | 30 ++ .../storage-vectors-js/.spec.swcrc | 22 ++ .../integrations/storage-vectors-js/README.md | 11 + .../storage-vectors-js/jest.config.ts | 19 ++ .../storage-vectors-js/package.json | 27 ++ .../storage-vectors-js/src/index.ts | 1 + .../src/lib/storage-vectors-js.spec.ts | 7 + .../src/lib/storage-vectors-js.ts | 3 + .../storage-vectors-js/tsconfig.json | 13 + .../storage-vectors-js/tsconfig.lib.json | 15 + .../storage-vectors-js/tsconfig.spec.json | 14 + 16 files changed, 304 insertions(+), 149 deletions(-) create mode 100644 jest.config.ts create mode 100644 jest.preset.js create mode 100644 packages/integrations/storage-vectors-js/.eslintrc.json create mode 100644 packages/integrations/storage-vectors-js/.spec.swcrc create mode 100644 packages/integrations/storage-vectors-js/README.md create mode 100644 packages/integrations/storage-vectors-js/jest.config.ts create mode 100644 packages/integrations/storage-vectors-js/package.json create mode 100644 packages/integrations/storage-vectors-js/src/index.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.spec.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.ts create mode 100644 packages/integrations/storage-vectors-js/tsconfig.json create mode 100644 packages/integrations/storage-vectors-js/tsconfig.lib.json create mode 100644 packages/integrations/storage-vectors-js/tsconfig.spec.json diff --git a/jest.config.ts b/jest.config.ts new file mode 100644 index 000000000..3a5809fbf --- /dev/null +++ b/jest.config.ts @@ -0,0 +1,6 @@ +import type { Config } from 'jest' +import { getJestProjectsAsync } from '@nx/jest' + +export default async (): Promise => ({ + projects: await getJestProjectsAsync(), +}) diff --git a/jest.preset.js b/jest.preset.js new file mode 100644 index 000000000..8cd53f8e9 --- /dev/null +++ b/jest.preset.js @@ -0,0 +1,3 @@ +const nxPreset = require('@nx/jest/preset').default + +module.exports = { ...nxPreset } diff --git a/nx.json b/nx.json index 1f0242833..cf49db157 100644 --- a/nx.json +++ b/nx.json @@ -248,10 +248,11 @@ } ], "release": { - "projects": ["packages/core/*"], + "projects": ["packages/core/*", "storage-vectors-js"], "projectsRelationship": "fixed", "releaseTagPatternCheckAllBranchesWhen": true, "version": { + "preVersionCommand": "npx nx run-many -t build", "conventionalCommits": true }, "changelog": { diff --git a/package-lock.json b/package-lock.json index 60a27b84c..adc538541 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,7 +15,8 @@ "packages/core/postgrest-js", "packages/core/realtime-js", "packages/core/storage-js", - "packages/core/supabase-js" + "packages/core/supabase-js", + "packages/integrations/*" ], "devDependencies": { "@commitlint/cli": "^19.8.1", @@ -30,8 +31,10 @@ "@nx/web": "21.6.2", "@nx/webpack": "21.6.2", "@supabase/node-fetch": "2.6.15", + "@swc-node/register": "~1.9.1", "@swc/core": "~1.5.7", "@swc/helpers": "~0.5.11", + "@swc/jest": "~0.2.36", "@types/faker": "^5.1.6", "@types/jest": "^29.5.14", "@types/jsonwebtoken": "^8.5.8", @@ -50,7 +53,9 @@ "faker": "^5.3.1", "husky": "^9.1.7", "jest": "^29.7.0", + "jest-environment-node": "^29.7.0", "jest-mock-server": "^0.1.0", + "jest-util": "^29.7.0", "jiti": "2.4.2", "jsonc-eslint-parser": "^2.1.0", "jsonwebtoken": "^9.0.0", @@ -58,6 +63,7 @@ "prettier": "^3.6.2", "rimraf": "^6.0.1", "semantic-release-plugin-update-version-in-files": "^1.1.0", + "ts-jest": "^29.1.0", "ts-node": "^10.9.1", "tslib": "^2.3.0", "typedoc": "^0.27.9", @@ -4225,6 +4231,38 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, + "node_modules/@jest/create-cache-key-function": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/create-cache-key-function/-/create-cache-key-function-30.2.0.tgz", + "integrity": "sha512-44F4l4Enf+MirJN8X/NhdGkl71k5rBYiwdVlo4HxOwbu0sHV8QKrGEedb1VUU4K3W7fBKE0HGfbn7eZm0Ti3zg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/create-cache-key-function/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, "node_modules/@jest/diff-sequences": { "version": "30.0.1", "resolved": "https://registry.npmjs.org/@jest/diff-sequences/-/diff-sequences-30.0.1.tgz", @@ -6575,10 +6613,77 @@ "resolved": "packages/core/storage-js", "link": true }, + "node_modules/@supabase/storage-vectors-js": { + "resolved": "packages/integrations/storage-vectors-js", + "link": true + }, "node_modules/@supabase/supabase-js": { "resolved": "packages/core/supabase-js", "link": true }, + "node_modules/@swc-node/core": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@swc-node/core/-/core-1.14.1.tgz", + "integrity": "sha512-jrt5GUaZUU6cmMS+WTJEvGvaB6j1YNKPHPzC2PUi2BjaFbtxURHj6641Az6xN7b665hNniAIdvjxWcRml5yCnw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Brooooooklyn" + }, + "peerDependencies": { + "@swc/core": ">= 1.13.3", + "@swc/types": ">= 0.1" + } + }, + "node_modules/@swc-node/register": { + "version": "1.9.2", + "resolved": "https://registry.npmjs.org/@swc-node/register/-/register-1.9.2.tgz", + "integrity": "sha512-BBjg0QNuEEmJSoU/++JOXhrjWdu3PTyYeJWsvchsI0Aqtj8ICkz/DqlwtXbmZVZ5vuDPpTfFlwDBZe81zgShMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@swc-node/core": "^1.13.1", + "@swc-node/sourcemap-support": "^0.5.0", + "colorette": "^2.0.20", + "debug": "^4.3.4", + "pirates": "^4.0.6", + "tslib": "^2.6.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Brooooooklyn" + }, + "peerDependencies": { + "@swc/core": ">= 1.4.13", + "typescript": ">= 4.3" + } + }, + "node_modules/@swc-node/sourcemap-support": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/@swc-node/sourcemap-support/-/sourcemap-support-0.5.1.tgz", + "integrity": "sha512-JxIvIo/Hrpv0JCHSyRpetAdQ6lB27oFYhv0PKCNf1g2gUXOjpeR1exrXccRxLMuAV5WAmGFBwRnNOJqN38+qtg==", + "dev": true, + "license": "MIT", + "dependencies": { + "source-map-support": "^0.5.21", + "tslib": "^2.6.3" + } + }, + "node_modules/@swc-node/sourcemap-support/node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, "node_modules/@swc/core": { "version": "1.5.29", "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.5.29.tgz", @@ -6805,6 +6910,24 @@ "tslib": "^2.8.0" } }, + "node_modules/@swc/jest": { + "version": "0.2.39", + "resolved": "https://registry.npmjs.org/@swc/jest/-/jest-0.2.39.tgz", + "integrity": "sha512-eyokjOwYd0Q8RnMHri+8/FS1HIrIUKK/sRrFp8c1dThUOfNeCWbLmBP1P5VsKdvmkd25JaH+OKYwEYiAYg9YAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/create-cache-key-function": "^30.0.0", + "@swc/counter": "^0.1.3", + "jsonc-parser": "^3.2.0" + }, + "engines": { + "npm": ">= 7.0.0" + }, + "peerDependencies": { + "@swc/core": "*" + } + }, "node_modules/@swc/types": { "version": "0.1.25", "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.25.tgz", @@ -11128,14 +11251,6 @@ "node": ">=10" } }, - "node_modules/chardet": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-2.1.0.tgz", - "integrity": "sha512-bNFETTG/pM5ryzQ9Ad0lJOTa6HWD/YsScAR3EnCPZRPlQh77JocYktSHOUHelyhm8IARL+o4c4F1bP5KVOjiRA==", - "dev": true, - "license": "MIT", - "peer": true - }, "node_modules/check-error": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", @@ -11669,17 +11784,6 @@ "node": ">=8" } }, - "node_modules/cli-width": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz", - "integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==", - "dev": true, - "license": "ISC", - "peer": true, - "engines": { - "node": ">= 12" - } - }, "node_modules/clipanion": { "version": "4.0.0-rc.4", "resolved": "https://registry.npmjs.org/clipanion/-/clipanion-4.0.0-rc.4.tgz", @@ -18249,96 +18353,6 @@ "dev": true, "license": "ISC" }, - "node_modules/inquirer": { - "version": "9.3.8", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-9.3.8.tgz", - "integrity": "sha512-pFGGdaHrmRKMh4WoDDSowddgjT1Vkl90atobmTeSmcPGdYiwikch/m/Ef5wRaiamHejtw0cUUMMerzDUXCci2w==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@inquirer/external-editor": "^1.0.2", - "@inquirer/figures": "^1.0.3", - "ansi-escapes": "^4.3.2", - "cli-width": "^4.1.0", - "mute-stream": "1.0.0", - "ora": "^5.4.1", - "run-async": "^3.0.0", - "rxjs": "^7.8.1", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^6.2.0", - "yoctocolors-cjs": "^2.1.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/inquirer/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/inquirer/node_modules/ora": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", - "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "bl": "^4.1.0", - "chalk": "^4.1.0", - "cli-cursor": "^3.1.0", - "cli-spinners": "^2.5.0", - "is-interactive": "^1.0.0", - "is-unicode-supported": "^0.1.0", - "log-symbols": "^4.1.0", - "strip-ansi": "^6.0.0", - "wcwidth": "^1.0.1" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/inquirer/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/inquirer/node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/internal-slot": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", @@ -24700,17 +24714,6 @@ "multicast-dns": "cli.js" } }, - "node_modules/mute-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-1.0.0.tgz", - "integrity": "sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==", - "dev": true, - "license": "ISC", - "peer": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, "node_modules/mz": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", @@ -28651,17 +28654,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/run-async": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-3.0.0.tgz", - "integrity": "sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -31783,7 +31775,6 @@ "version": "2.8.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "dev": true, "license": "0BSD" }, "node_modules/tsscmp": { @@ -33935,20 +33926,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/yoctocolors-cjs": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.3.tgz", - "integrity": "sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/zip-stream": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-4.1.1.tgz", diff --git a/package.json b/package.json index ffa5d9d97..65a2e4ca5 100644 --- a/package.json +++ b/package.json @@ -31,8 +31,10 @@ "@nx/web": "21.6.2", "@nx/webpack": "21.6.2", "@supabase/node-fetch": "2.6.15", + "@swc-node/register": "~1.9.1", "@swc/core": "~1.5.7", "@swc/helpers": "~0.5.11", + "@swc/jest": "~0.2.36", "@types/faker": "^5.1.6", "@types/jest": "^29.5.14", "@types/jsonwebtoken": "^8.5.8", @@ -51,7 +53,9 @@ "faker": "^5.3.1", "husky": "^9.1.7", "jest": "^29.7.0", + "jest-environment-node": "^29.7.0", "jest-mock-server": "^0.1.0", + "jest-util": "^29.7.0", "jiti": "2.4.2", "jsonc-eslint-parser": "^2.1.0", "jsonwebtoken": "^9.0.0", @@ -59,6 +63,7 @@ "prettier": "^3.6.2", "rimraf": "^6.0.1", "semantic-release-plugin-update-version-in-files": "^1.1.0", + "ts-jest": "^29.1.0", "ts-node": "^10.9.1", "tslib": "^2.3.0", "typedoc": "^0.27.9", @@ -75,7 +80,8 @@ "packages/core/postgrest-js", "packages/core/realtime-js", "packages/core/storage-js", - "packages/core/supabase-js" + "packages/core/supabase-js", + "packages/integrations/*" ], "nx": { "includedScripts": [], diff --git a/packages/integrations/storage-vectors-js/.eslintrc.json b/packages/integrations/storage-vectors-js/.eslintrc.json new file mode 100644 index 000000000..4f6cb6e9c --- /dev/null +++ b/packages/integrations/storage-vectors-js/.eslintrc.json @@ -0,0 +1,30 @@ +{ + "extends": ["../../../.eslintrc.json"], + "ignorePatterns": ["!**/*", "**/out-tsc"], + "overrides": [ + { + "files": ["*.ts", "*.tsx", "*.js", "*.jsx"], + "rules": {} + }, + { + "files": ["*.ts", "*.tsx"], + "rules": {} + }, + { + "files": ["*.js", "*.jsx"], + "rules": {} + }, + { + "files": ["*.json"], + "parser": "jsonc-eslint-parser", + "rules": { + "@nx/dependency-checks": [ + "error", + { + "ignoredFiles": ["{projectRoot}/eslint.config.{js,cjs,mjs,ts,cts,mts}"] + } + ] + } + } + ] +} diff --git a/packages/integrations/storage-vectors-js/.spec.swcrc b/packages/integrations/storage-vectors-js/.spec.swcrc new file mode 100644 index 000000000..3b52a5376 --- /dev/null +++ b/packages/integrations/storage-vectors-js/.spec.swcrc @@ -0,0 +1,22 @@ +{ + "jsc": { + "target": "es2017", + "parser": { + "syntax": "typescript", + "decorators": true, + "dynamicImport": true + }, + "transform": { + "decoratorMetadata": true, + "legacyDecorator": true + }, + "keepClassNames": true, + "externalHelpers": true, + "loose": true + }, + "module": { + "type": "es6" + }, + "sourceMaps": true, + "exclude": [] +} diff --git a/packages/integrations/storage-vectors-js/README.md b/packages/integrations/storage-vectors-js/README.md new file mode 100644 index 000000000..9328fedb2 --- /dev/null +++ b/packages/integrations/storage-vectors-js/README.md @@ -0,0 +1,11 @@ +# storage-vectors-js + +This library was generated with [Nx](https://nx.dev). + +## Building + +Run `nx build storage-vectors-js` to build the library. + +## Running unit tests + +Run `nx test storage-vectors-js` to execute the unit tests via [Jest](https://jestjs.io). diff --git a/packages/integrations/storage-vectors-js/jest.config.ts b/packages/integrations/storage-vectors-js/jest.config.ts new file mode 100644 index 000000000..150c862f4 --- /dev/null +++ b/packages/integrations/storage-vectors-js/jest.config.ts @@ -0,0 +1,19 @@ +/* eslint-disable */ +import { readFileSync } from 'fs' + +// Reading the SWC compilation config for the spec files +const swcJestConfig = JSON.parse(readFileSync(`${__dirname}/.spec.swcrc`, 'utf-8')) + +// Disable .swcrc look-up by SWC core because we're passing in swcJestConfig ourselves +swcJestConfig.swcrc = false + +export default { + displayName: 'storage-vectors-js', + preset: '../../../jest.preset.js', + testEnvironment: 'node', + transform: { + '^.+\\.[tj]s$': ['@swc/jest', swcJestConfig], + }, + moduleFileExtensions: ['ts', 'js', 'html'], + coverageDirectory: 'test-output/jest/coverage', +} diff --git a/packages/integrations/storage-vectors-js/package.json b/packages/integrations/storage-vectors-js/package.json new file mode 100644 index 000000000..0ef6dc4fd --- /dev/null +++ b/packages/integrations/storage-vectors-js/package.json @@ -0,0 +1,27 @@ +{ + "name": "@supabase/storage-vectors-js", + "version": "0.0.1", + "type": "module", + "main": "./dist/index.js", + "module": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "@supabase-js/source": "./src/index.ts", + "types": "./dist/index.d.ts", + "import": "./dist/index.js", + "default": "./dist/index.js" + } + }, + "files": [ + "dist", + "!**/*.tsbuildinfo" + ], + "nx": { + "name": "@supabase/storage-vectors-js" + }, + "dependencies": { + "tslib": "^2.3.0" + } +} diff --git a/packages/integrations/storage-vectors-js/src/index.ts b/packages/integrations/storage-vectors-js/src/index.ts new file mode 100644 index 000000000..9c9575a19 --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/index.ts @@ -0,0 +1 @@ +export * from './lib/storage-vectors-js.js' diff --git a/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.spec.ts b/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.spec.ts new file mode 100644 index 000000000..8b0fb5da3 --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.spec.ts @@ -0,0 +1,7 @@ +import { storageVectorsJs } from './storage-vectors-js.js' + +describe('storageVectorsJs', () => { + it('should work', () => { + expect(storageVectorsJs()).toEqual('storage-vectors-js') + }) +}) diff --git a/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.ts b/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.ts new file mode 100644 index 000000000..c5218edfd --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.ts @@ -0,0 +1,3 @@ +export function storageVectorsJs(): string { + return 'storage-vectors-js' +} diff --git a/packages/integrations/storage-vectors-js/tsconfig.json b/packages/integrations/storage-vectors-js/tsconfig.json new file mode 100644 index 000000000..667a3463d --- /dev/null +++ b/packages/integrations/storage-vectors-js/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../../tsconfig.base.json", + "files": [], + "include": [], + "references": [ + { + "path": "./tsconfig.lib.json" + }, + { + "path": "./tsconfig.spec.json" + } + ] +} diff --git a/packages/integrations/storage-vectors-js/tsconfig.lib.json b/packages/integrations/storage-vectors-js/tsconfig.lib.json new file mode 100644 index 000000000..ebd01ed68 --- /dev/null +++ b/packages/integrations/storage-vectors-js/tsconfig.lib.json @@ -0,0 +1,15 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "baseUrl": ".", + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "dist/tsconfig.lib.tsbuildinfo", + "emitDeclarationOnly": false, + "forceConsistentCasingInFileNames": true, + "types": ["node"] + }, + "include": ["src/**/*.ts"], + "references": [], + "exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts"] +} diff --git a/packages/integrations/storage-vectors-js/tsconfig.spec.json b/packages/integrations/storage-vectors-js/tsconfig.spec.json new file mode 100644 index 000000000..74b7e8b05 --- /dev/null +++ b/packages/integrations/storage-vectors-js/tsconfig.spec.json @@ -0,0 +1,14 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./out-tsc/jest", + "types": ["jest", "node"], + "forceConsistentCasingInFileNames": true + }, + "include": ["jest.config.ts", "src/**/*.test.ts", "src/**/*.spec.ts", "src/**/*.d.ts"], + "references": [ + { + "path": "./tsconfig.lib.json" + } + ] +} From e14ed55b8f95c3c89edecc217a420bf7b659d6df Mon Sep 17 00:00:00 2001 From: fenos Date: Wed, 15 Oct 2025 11:46:18 +0200 Subject: [PATCH 04/36] feat(storage): implement vector client --- .../integrations/storage-vectors-js/README.md | 403 +++++++++- .../storage-vectors-js/jest.config.cjs | 40 + .../storage-vectors-js/jest.config.ts | 19 - .../storage-vectors-js/package.json | 13 +- .../src/__tests__/bucket-api.spec.ts | 261 +++++++ .../src/__tests__/e2e-workflow.spec.ts | 487 ++++++++++++ .../src/__tests__/helpers.ts | 164 ++++ .../src/__tests__/index-api.spec.ts | 424 +++++++++++ .../src/__tests__/mock-server.ts | 698 ++++++++++++++++++ .../storage-vectors-js/src/__tests__/setup.ts | 75 ++ .../src/__tests__/vector-data-api.spec.ts | 648 ++++++++++++++++ .../storage-vectors-js/src/index.ts | 64 +- .../src/lib/StorageVectorsClient.ts | 396 ++++++++++ .../src/lib/VectorBucketApi.ts | 212 ++++++ .../src/lib/VectorDataApi.ts | 356 +++++++++ .../src/lib/VectorIndexApi.ts | 251 +++++++ .../storage-vectors-js/src/lib/constants.ts | 8 + .../storage-vectors-js/src/lib/errors.ts | 78 ++ .../storage-vectors-js/src/lib/fetch.ts | 198 +++++ .../storage-vectors-js/src/lib/helpers.ts | 90 +++ .../src/lib/storage-vectors-js.spec.ts | 7 - .../src/lib/storage-vectors-js.ts | 3 - .../storage-vectors-js/src/lib/types.ts | 299 ++++++++ .../storage-vectors-js/tsconfig.lib.json | 6 + .../storage-vectors-js/tsconfig.spec.json | 33 +- 25 files changed, 5189 insertions(+), 44 deletions(-) create mode 100644 packages/integrations/storage-vectors-js/jest.config.cjs delete mode 100644 packages/integrations/storage-vectors-js/jest.config.ts create mode 100644 packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts create mode 100644 packages/integrations/storage-vectors-js/src/__tests__/e2e-workflow.spec.ts create mode 100644 packages/integrations/storage-vectors-js/src/__tests__/helpers.ts create mode 100644 packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts create mode 100644 packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts create mode 100644 packages/integrations/storage-vectors-js/src/__tests__/setup.ts create mode 100644 packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/constants.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/errors.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/fetch.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/helpers.ts delete mode 100644 packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.spec.ts delete mode 100644 packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.ts create mode 100644 packages/integrations/storage-vectors-js/src/lib/types.ts diff --git a/packages/integrations/storage-vectors-js/README.md b/packages/integrations/storage-vectors-js/README.md index 9328fedb2..28a93e200 100644 --- a/packages/integrations/storage-vectors-js/README.md +++ b/packages/integrations/storage-vectors-js/README.md @@ -1,11 +1,402 @@ -# storage-vectors-js +# @supabase/storage-vectors-js -This library was generated with [Nx](https://nx.dev). +TypeScript client library for Supabase S3 Vector Buckets - a bottomless database service for storing and querying high-dimensional embeddings backed by S3 Vectors. -## Building +## Installation -Run `nx build storage-vectors-js` to build the library. +```bash +npm install @supabase/storage-vectors-js +``` -## Running unit tests +## Features -Run `nx test storage-vectors-js` to execute the unit tests via [Jest](https://jestjs.io). +- **Vector Buckets**: Organize vector indexes into logical containers +- **Vector Indexes**: Define schemas with configurable dimensions and distance metrics +- **Batch Operations**: Insert/update/delete up to 500 vectors per request +- **Similarity Search**: Query for nearest neighbors using cosine, euclidean, or dot product distance +- **Metadata Filtering**: Store and filter vectors by arbitrary JSON metadata +- **Pagination**: Efficiently scan large vector datasets +- **Parallel Scanning**: Distribute scans across multiple workers for high throughput +- **TypeScript**: Full type safety with comprehensive type definitions +- **Cross-platform**: Works in Node.js, browsers, and edge runtimes + +## Quick Start + +```typescript +import { StorageVectorsClient } from '@supabase/storage-vectors-js' + +// Initialize client +const client = new StorageVectorsClient('https://api.example.com', { + headers: { 'Authorization': 'Bearer YOUR_TOKEN' } +}) + +// Create a vector bucket +await client.createVectorBucket('embeddings-prod') + +// Create an index +const bucket = client.from('embeddings-prod') +await bucket.createIndex({ + indexName: 'documents-openai', + dataType: 'float32', + dimension: 1536, + distanceMetric: 'cosine' +}) + +// Insert vectors +const index = bucket.index('documents-openai') +await index.putVectors({ + vectors: [ + { + key: 'doc-1', + data: { float32: [0.1, 0.2, 0.3, /* ...1536 dimensions */] }, + metadata: { title: 'Introduction', category: 'docs' } + } + ] +}) + +// Query similar vectors +const { data, error } = await index.queryVectors({ + queryVector: { float32: [0.15, 0.25, 0.35, /* ...1536 dimensions */] }, + topK: 5, + returnDistance: true, + returnMetadata: true +}) + +if (data) { + data.matches.forEach(match => { + console.log(`${match.key}: distance=${match.distance}`) + console.log('Metadata:', match.metadata) + }) +} +``` + +## API Reference + +### Client Initialization + +```typescript +const client = new StorageVectorsClient(url, options?) +``` + +**Options:** +- `headers?: Record` - Custom HTTP headers (e.g., Authorization) +- `fetch?: Fetch` - Custom fetch implementation + +### Vector Buckets + +Vector buckets are top-level containers for organizing vector indexes. + +#### Create Bucket + +```typescript +const { data, error } = await client.createVectorBucket('my-bucket') +``` + +#### Get Bucket + +```typescript +const { data, error } = await client.getVectorBucket('my-bucket') +console.log('Created at:', new Date(data.vectorBucket.creationTime! * 1000)) +``` + +#### List Buckets + +```typescript +const { data, error } = await client.listVectorBuckets({ + prefix: 'prod-', + maxResults: 100 +}) + +// Pagination +if (data?.nextToken) { + const next = await client.listVectorBuckets({ nextToken: data.nextToken }) +} +``` + +#### Delete Bucket + +```typescript +// Bucket must be empty (all indexes deleted first) +const { error } = await client.deleteVectorBucket('my-bucket') +``` + +### Vector Indexes + +Vector indexes define the schema for embeddings including dimension and distance metric. + +#### Create Index + +```typescript +const bucket = client.bucket('my-bucket') + +await bucket.createIndex({ + indexName: 'my-index', + dataType: 'float32', + dimension: 1536, + distanceMetric: 'cosine', // 'cosine' | 'euclidean' | 'dotproduct' + metadataConfiguration: { + nonFilterableMetadataKeys: ['raw_text', 'internal_id'] + } +}) +``` + +**Distance Metrics:** +- `cosine` - Cosine similarity (normalized dot product) +- `euclidean` - Euclidean distance (L2 norm) +- `dotproduct` - Dot product similarity + +#### Get Index + +```typescript +const { data, error } = await bucket.getIndex('my-index') +console.log('Dimension:', data?.index.dimension) +console.log('Distance metric:', data?.index.distanceMetric) +``` + +#### List Indexes + +```typescript +const { data, error } = await bucket.listIndexes({ + prefix: 'documents-', + maxResults: 100 +}) +``` + +#### Delete Index + +```typescript +// Deletes index and all its vectors +await bucket.deleteIndex('my-index') +``` + +### Vector Operations + +#### Insert/Update Vectors (Upsert) + +```typescript +const index = client.bucket('my-bucket').index('my-index') + +await index.putVectors({ + vectors: [ + { + key: 'unique-id-1', + data: { float32: [/* 1536 numbers */] }, + metadata: { + title: 'Document Title', + category: 'technical', + page: 1 + } + }, + // ... up to 500 vectors per request + ] +}) +``` + +**Limitations:** +- 1-500 vectors per request +- Vectors must match index dimension +- Keys must be unique within index + +#### Get Vectors by Key + +```typescript +const { data, error } = await index.getVectors({ + keys: ['doc-1', 'doc-2', 'doc-3'], + returnData: true, // Include embeddings (requires permission) + returnMetadata: true // Include metadata (requires permission) +}) + +data?.vectors.forEach(v => { + console.log(v.key, v.metadata) +}) +``` + +#### Query Similar Vectors (ANN Search) + +```typescript +const { data, error } = await index.queryVectors({ + queryVector: { float32: [/* 1536 numbers */] }, + topK: 10, + filter: { + category: 'technical', + published: true + }, + returnDistance: true, + returnMetadata: true +}) + +// Results ordered by similarity +data?.matches.forEach(match => { + console.log(`${match.key}: distance=${match.distance}`) +}) +``` + +**Filter Syntax:** +The `filter` parameter accepts arbitrary JSON for metadata filtering. Non-filterable keys (configured at index creation) cannot be used in filters but can still be returned. + +#### List/Scan Vectors + +```typescript +// Simple pagination +let nextToken: string | undefined +do { + const { data } = await index.listVectors({ + maxResults: 500, + nextToken, + returnMetadata: true + }) + + console.log('Batch:', data?.vectors.length) + nextToken = data?.nextToken +} while (nextToken) + +// Parallel scanning (4 workers) +const workers = [0, 1, 2, 3].map(async (segmentIndex) => { + const { data } = await index.listVectors({ + segmentCount: 4, + segmentIndex, + returnMetadata: true + }) + return data?.vectors || [] +}) + +const results = await Promise.all(workers) +const allVectors = results.flat() +``` + +**Limitations:** +- `maxResults`: 1-1000 (default: 500) +- `segmentCount`: 1-16 +- Response may be limited by 1MB size + +#### Delete Vectors + +```typescript +await index.deleteVectors({ + keys: ['doc-1', 'doc-2', 'doc-3'] + // ... up to 500 keys per request +}) +``` + +## Error Handling + +The library uses a consistent error handling pattern: + +```typescript +const { data, error } = await client.createVectorBucket('my-bucket') + +if (error) { + console.error('Error:', error.message) + console.error('Status:', error.status) + console.error('Code:', error.statusCode) +} +``` + +### Error Codes + +| Code | HTTP | Description | +|------|------|-------------| +| `InternalError` | 500 | Internal server error | +| `S3VectorConflictException` | 409 | Resource already exists | +| `S3VectorNotFoundException` | 404 | Resource not found | +| `S3VectorBucketNotEmpty` | 400 | Bucket contains indexes | +| `S3VectorMaxBucketsExceeded` | 400 | Bucket quota exceeded | +| `S3VectorMaxIndexesExceeded` | 400 | Index quota exceeded | + +### Throwing Errors + +You can configure the client to throw errors instead: + +```typescript +const client = new StorageVectorsClient(url, options) +client.throwOnError() + +try { + const { data } = await client.createVectorBucket('my-bucket') + // data is guaranteed to be present +} catch (error) { + if (error instanceof StorageVectorsApiError) { + console.error('API Error:', error.statusCode) + } +} +``` + +## Advanced Usage + +### Scoped Clients + +Create scoped clients for cleaner code: + +```typescript +// Bucket-scoped operations +const bucket = client.bucket('embeddings-prod') +await bucket.createIndex({ /* ... */ }) +await bucket.listIndexes() + +// Index-scoped operations +const index = bucket.index('documents-openai') +await index.putVectors({ /* ... */ }) +await index.queryVectors({ /* ... */ }) +``` + +### Custom Fetch + +Provide a custom fetch implementation: + +```typescript +import { StorageVectorsClient } from '@supabase/storage-vectors-js' + +const client = new StorageVectorsClient(url, { + fetch: customFetch, + headers: { /* ... */ } +}) +``` + +### Batch Processing + +Process large datasets in batches: + +```typescript +async function insertLargeDataset(vectors: VectorObject[]) { + const batchSize = 500 + + for (let i = 0; i < vectors.length; i += batchSize) { + const batch = vectors.slice(i, i + batchSize) + await index.putVectors({ vectors: batch }) + console.log(`Inserted ${i + batch.length}/${vectors.length}`) + } +} +``` + +### Float32 Validation + +Ensure vectors are properly normalized to float32: + +```typescript +import { normalizeToFloat32 } from '@supabase/storage-vectors-js' + +const vector = normalizeToFloat32([0.1, 0.2, 0.3, /* ... */]) +``` + +## Type Definitions + +The library exports comprehensive TypeScript types: + +```typescript +import type { + VectorBucket, + VectorIndex, + VectorData, + VectorObject, + VectorMatch, + VectorMetadata, + DistanceMetric, + ApiResponse, + StorageVectorsError +} from '@supabase/storage-vectors-js' +``` + +## Requirements + +- Node.js 14+ or modern browser with fetch support +- TypeScript 4.5+ (for type checking) \ No newline at end of file diff --git a/packages/integrations/storage-vectors-js/jest.config.cjs b/packages/integrations/storage-vectors-js/jest.config.cjs new file mode 100644 index 000000000..7bc30a7e7 --- /dev/null +++ b/packages/integrations/storage-vectors-js/jest.config.cjs @@ -0,0 +1,40 @@ +/* eslint-disable */ + +module.exports = { + displayName: 'storage-vectors-js', + preset: '../../../jest.preset.js', + testEnvironment: 'node', + transform: { + '^.+\\.[tj]s$': [ + '@swc/jest', + { + jsc: { + target: 'es2017', + parser: { + syntax: 'typescript', + decorators: true, + dynamicImport: true, + }, + transform: { + decoratorMetadata: true, + legacyDecorator: true, + }, + keepClassNames: true, + externalHelpers: true, + loose: true, + }, + module: { + type: 'commonjs', + }, + sourceMaps: true, + exclude: [], + }, + ], + }, + moduleFileExtensions: ['ts', 'js', 'html'], + coverageDirectory: 'test-output/jest/coverage', + testMatch: ['**/__tests__/**/*.spec.ts'], + moduleNameMapper: { + '^(\\.{1,2}/.*)\\.js$': '$1', + }, +} diff --git a/packages/integrations/storage-vectors-js/jest.config.ts b/packages/integrations/storage-vectors-js/jest.config.ts deleted file mode 100644 index 150c862f4..000000000 --- a/packages/integrations/storage-vectors-js/jest.config.ts +++ /dev/null @@ -1,19 +0,0 @@ -/* eslint-disable */ -import { readFileSync } from 'fs' - -// Reading the SWC compilation config for the spec files -const swcJestConfig = JSON.parse(readFileSync(`${__dirname}/.spec.swcrc`, 'utf-8')) - -// Disable .swcrc look-up by SWC core because we're passing in swcJestConfig ourselves -swcJestConfig.swcrc = false - -export default { - displayName: 'storage-vectors-js', - preset: '../../../jest.preset.js', - testEnvironment: 'node', - transform: { - '^.+\\.[tj]s$': ['@swc/jest', swcJestConfig], - }, - moduleFileExtensions: ['ts', 'js', 'html'], - coverageDirectory: 'test-output/jest/coverage', -} diff --git a/packages/integrations/storage-vectors-js/package.json b/packages/integrations/storage-vectors-js/package.json index 0ef6dc4fd..682fcad9b 100644 --- a/packages/integrations/storage-vectors-js/package.json +++ b/packages/integrations/storage-vectors-js/package.json @@ -18,10 +18,21 @@ "dist", "!**/*.tsbuildinfo" ], + "scripts": { + "test": "jest", + "test:watch": "jest --watch", + "test:coverage": "jest --coverage", + "test:real": "USE_MOCK_SERVER=false jest", + "test:mock": "USE_MOCK_SERVER=true jest" + }, "nx": { "name": "@supabase/storage-vectors-js" }, "dependencies": { - "tslib": "^2.3.0" + "@supabase/node-fetch": "^2.6.13" + }, + "devDependencies": { + "@types/node": "^24.7.2", + "tslib": "^2.8.1" } } diff --git a/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts new file mode 100644 index 000000000..b06abb6bd --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts @@ -0,0 +1,261 @@ +/** + * Integration tests for Vector Bucket API + * Tests all bucket operations: create, get, list, delete + */ + +import { createTestClient, setupTest, generateTestName, assertSuccessResponse, assertErrorResponse, assertErrorCode } from './helpers' + +describe('VectorBucketApi Integration Tests', () => { + let client: ReturnType + + beforeEach(() => { + setupTest() + client = createTestClient() + }) + + describe('createVectorBucket', () => { + it('should create a new vector bucket successfully', async () => { + const bucketName = generateTestName('test-bucket') + + const response = await client.createVectorBucket(bucketName) + + assertSuccessResponse(response) + expect(response.data).toEqual({}) + }) + + it('should return conflict error when bucket already exists', async () => { + const bucketName = generateTestName('test-bucket') + + // Create bucket first time + await client.createVectorBucket(bucketName) + + // Try to create again + const response = await client.createVectorBucket(bucketName) + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorConflictException') + expect(error.message).toContain('already exists') + }) + + it('should create multiple buckets with different names', async () => { + const bucket1 = generateTestName('test-bucket-1') + const bucket2 = generateTestName('test-bucket-2') + + const response1 = await client.createVectorBucket(bucket1) + const response2 = await client.createVectorBucket(bucket2) + + assertSuccessResponse(response1) + assertSuccessResponse(response2) + }) + }) + + describe('getVectorBucket', () => { + it('should retrieve an existing bucket', async () => { + const bucketName = generateTestName('test-bucket') + + // Create bucket + await client.createVectorBucket(bucketName) + + // Retrieve bucket + const response = await client.getVectorBucket(bucketName) + + const data = assertSuccessResponse(response) + expect(data.vectorBucket).toBeDefined() + expect(data.vectorBucket.vectorBucketName).toBe(bucketName) + expect(data.vectorBucket.creationTime).toBeDefined() + expect(typeof data.vectorBucket.creationTime).toBe('number') + }) + + it('should return not found error for non-existent bucket', async () => { + const response = await client.getVectorBucket('non-existent-bucket') + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + expect(error.message).toContain('not found') + }) + + it('should return bucket with encryption configuration if set', async () => { + const bucketName = generateTestName('test-bucket') + + await client.createVectorBucket(bucketName) + const response = await client.getVectorBucket(bucketName) + + const data = assertSuccessResponse(response) + // Encryption configuration is optional + if (data.vectorBucket.encryptionConfiguration) { + expect(data.vectorBucket.encryptionConfiguration).toHaveProperty('sseType') + } + }) + }) + + describe('listVectorBuckets', () => { + it('should list all buckets', async () => { + const bucket1 = generateTestName('test-bucket-1') + const bucket2 = generateTestName('test-bucket-2') + + await client.createVectorBucket(bucket1) + await client.createVectorBucket(bucket2) + + const response = await client.listVectorBuckets() + + const data = assertSuccessResponse(response) + expect(data.buckets).toBeDefined() + expect(Array.isArray(data.buckets)).toBe(true) + expect(data.buckets.length).toBeGreaterThanOrEqual(2) + + const bucketNames = data.buckets.map(b => b.vectorBucketName) + expect(bucketNames).toContain(bucket1) + expect(bucketNames).toContain(bucket2) + }) + + it('should filter buckets by prefix', async () => { + const prefix = generateTestName('prefix-test') + const bucket1 = `${prefix}-bucket-1` + const bucket2 = `${prefix}-bucket-2` + const bucket3 = generateTestName('other-bucket') + + await client.createVectorBucket(bucket1) + await client.createVectorBucket(bucket2) + await client.createVectorBucket(bucket3) + + const response = await client.listVectorBuckets({ prefix }) + + const data = assertSuccessResponse(response) + expect(data.buckets.length).toBeGreaterThanOrEqual(2) + + const bucketNames = data.buckets.map(b => b.vectorBucketName) + expect(bucketNames).toContain(bucket1) + expect(bucketNames).toContain(bucket2) + // bucket3 should not be included as it doesn't match prefix + const hasOtherBucket = bucketNames.some(name => name.includes('other-bucket')) + if (hasOtherBucket) { + // If other buckets exist, they should match the prefix + expect(bucketNames.every(name => name.startsWith(prefix))).toBe(true) + } + }) + + it('should support pagination with maxResults', async () => { + const response = await client.listVectorBuckets({ maxResults: 1 }) + + const data = assertSuccessResponse(response) + expect(data.buckets.length).toBeLessThanOrEqual(1) + + if (data.buckets.length === 1 && data.nextToken) { + expect(data.nextToken).toBeDefined() + expect(typeof data.nextToken).toBe('string') + } + }) + + it('should return empty array when no buckets match prefix', async () => { + const response = await client.listVectorBuckets({ + prefix: 'non-existent-prefix-' + Date.now(), + }) + + const data = assertSuccessResponse(response) + expect(data.buckets).toEqual([]) + expect(data.nextToken).toBeUndefined() + }) + }) + + describe('deleteVectorBucket', () => { + it('should delete an empty bucket successfully', async () => { + const bucketName = generateTestName('test-bucket') + + await client.createVectorBucket(bucketName) + + const response = await client.deleteVectorBucket(bucketName) + + assertSuccessResponse(response) + expect(response.data).toEqual({}) + + // Verify bucket is deleted + const getResponse = await client.getVectorBucket(bucketName) + assertErrorResponse(getResponse) + }) + + it('should return not found error for non-existent bucket', async () => { + const response = await client.deleteVectorBucket('non-existent-bucket') + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + + it('should return error when bucket is not empty', async () => { + const bucketName = generateTestName('test-bucket') + + await client.createVectorBucket(bucketName) + + // Create an index in the bucket + const bucket = client.from(bucketName) + await bucket.createIndex({ + indexName: 'test-index', + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + // Try to delete bucket with index + const response = await client.deleteVectorBucket(bucketName) + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorBucketNotEmpty') + expect(error.message).toContain('not empty') + }) + + it('should successfully delete bucket after removing all indexes', async () => { + const bucketName = generateTestName('test-bucket') + + await client.createVectorBucket(bucketName) + + const bucket = client.from(bucketName) + await bucket.createIndex({ + indexName: 'test-index', + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + // Delete the index first + await bucket.deleteIndex('test-index') + + // Now delete the bucket + const response = await client.deleteVectorBucket(bucketName) + + assertSuccessResponse(response) + }) + }) + + describe('throwOnError mode', () => { + it('should throw error instead of returning error response', async () => { + client.throwOnError() + + await expect(client.getVectorBucket('non-existent-bucket')).rejects.toThrow() + }) + + it('should still return data on success', async () => { + const bucketName = generateTestName('test-bucket') + client.throwOnError() + + await client.createVectorBucket(bucketName) + const response = await client.getVectorBucket(bucketName) + + expect(response.data).toBeDefined() + expect(response.error).toBeNull() + }) + }) + + describe('VectorBucketScope (from)', () => { + it('should create a bucket scope successfully', async () => { + const bucketName = generateTestName('test-bucket') + + await client.createVectorBucket(bucketName) + + const bucketScope = client.from(bucketName) + + expect(bucketScope).toBeDefined() + expect(typeof bucketScope.createIndex).toBe('function') + expect(typeof bucketScope.listIndexes).toBe('function') + expect(typeof bucketScope.index).toBe('function') + }) + }) +}) diff --git a/packages/integrations/storage-vectors-js/src/__tests__/e2e-workflow.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/e2e-workflow.spec.ts new file mode 100644 index 000000000..39ba7abc6 --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/__tests__/e2e-workflow.spec.ts @@ -0,0 +1,487 @@ +/** + * End-to-end workflow integration tests + * Tests complete workflows from bucket creation to vector operations + */ + +import { + createTestClient, + setupTest, + generateTestName, + generateRandomVector, + assertSuccessResponse, +} from './helpers' + +describe('End-to-End Workflow Tests', () => { + let client: ReturnType + + beforeEach(() => { + setupTest() + client = createTestClient() + }) + + describe('Complete Vector Search Workflow', () => { + it('should complete full workflow: create bucket, index, insert, query, delete', async () => { + const bucketName = generateTestName('e2e-bucket') + const indexName = generateTestName('e2e-index') + + // Step 1: Create bucket + const createBucketResponse = await client.createVectorBucket(bucketName) + assertSuccessResponse(createBucketResponse) + + // Step 2: Verify bucket exists + const getBucketResponse = await client.getVectorBucket(bucketName) + const bucketData = assertSuccessResponse(getBucketResponse) + expect(bucketData.vectorBucket.vectorBucketName).toBe(bucketName) + + // Step 3: Create index + const bucket = client.from(bucketName) + const createIndexResponse = await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 384, + distanceMetric: 'cosine', + metadataConfiguration: { + nonFilterableMetadataKeys: ['raw_text'], + }, + }) + assertSuccessResponse(createIndexResponse) + + // Step 4: Verify index exists + const getIndexResponse = await bucket.getIndex(indexName) + const indexData = assertSuccessResponse(getIndexResponse) + expect(indexData.index.indexName).toBe(indexName) + expect(indexData.index.dimension).toBe(384) + expect(indexData.index.distanceMetric).toBe('cosine') + + // Step 5: Insert vectors + const index = bucket.index(indexName) + const documents = [ + { + key: 'doc-1', + data: { float32: generateRandomVector(384) }, + metadata: { + title: 'Introduction to Vector Databases', + category: 'tech', + published: true, + }, + }, + { + key: 'doc-2', + data: { float32: generateRandomVector(384) }, + metadata: { + title: 'Advanced Vector Search Techniques', + category: 'tech', + published: true, + }, + }, + { + key: 'doc-3', + data: { float32: generateRandomVector(384) }, + metadata: { + title: 'Machine Learning Fundamentals', + category: 'science', + published: false, + }, + }, + ] + + const putResponse = await index.putVectors({ vectors: documents }) + assertSuccessResponse(putResponse) + + // Step 6: Query for similar vectors + const queryResponse = await index.queryVectors({ + queryVector: { float32: generateRandomVector(384) }, + topK: 2, + filter: { published: true }, + returnDistance: true, + returnMetadata: true, + }) + + const queryData = assertSuccessResponse(queryResponse) + expect(queryData.matches.length).toBeGreaterThan(0) + expect(queryData.matches.length).toBeLessThanOrEqual(2) + + // All matches should have published: true + for (const match of queryData.matches) { + expect(match.metadata?.published).toBe(true) + } + + // Step 7: List all vectors + const listResponse = await index.listVectors({ + returnMetadata: true, + }) + + const listData = assertSuccessResponse(listResponse) + expect(listData.vectors.length).toBeGreaterThanOrEqual(3) + + // Step 8: Get specific vectors + const getResponse = await index.getVectors({ + keys: ['doc-1', 'doc-2'], + returnData: true, + returnMetadata: true, + }) + + const getData = assertSuccessResponse(getResponse) + expect(getData.vectors.length).toBe(2) + + // Step 9: Update a vector + const updateResponse = await index.putVectors({ + vectors: [ + { + key: 'doc-1', + data: { float32: generateRandomVector(384) }, + metadata: { + title: 'Updated: Introduction to Vector Databases', + category: 'tech', + published: true, + updated: true, + }, + }, + ], + }) + assertSuccessResponse(updateResponse) + + // Step 10: Verify update + const verifyResponse = await index.getVectors({ + keys: ['doc-1'], + returnMetadata: true, + }) + + const verifyData = assertSuccessResponse(verifyResponse) + expect(verifyData.vectors[0].metadata?.updated).toBe(true) + + // Step 11: Delete some vectors + const deleteResponse = await index.deleteVectors({ + keys: ['doc-3'], + }) + assertSuccessResponse(deleteResponse) + + // Step 12: Verify deletion + const verifyDeleteResponse = await index.getVectors({ + keys: ['doc-3'], + }) + + const verifyDeleteData = assertSuccessResponse(verifyDeleteResponse) + expect(verifyDeleteData.vectors).toEqual([]) + + // Step 13: Delete index + const deleteIndexResponse = await bucket.deleteIndex(indexName) + assertSuccessResponse(deleteIndexResponse) + + // Step 14: Delete bucket + const deleteBucketResponse = await client.deleteVectorBucket(bucketName) + assertSuccessResponse(deleteBucketResponse) + }) + }) + + describe('Multi-Index Workflow', () => { + it('should manage multiple indexes in the same bucket', async () => { + const bucketName = generateTestName('multi-index-bucket') + + // Create bucket + await client.createVectorBucket(bucketName) + const bucket = client.from(bucketName) + + // Create multiple indexes with different configurations + const indexes = [ + { + name: 'embeddings-small', + dimension: 384, + metric: 'cosine' as const, + }, + { + name: 'embeddings-large', + dimension: 1536, + metric: 'euclidean' as const, + }, + { + name: 'embeddings-dotproduct', + dimension: 768, + metric: 'dotproduct' as const, + }, + ] + + for (const indexConfig of indexes) { + const response = await bucket.createIndex({ + indexName: indexConfig.name, + dataType: 'float32', + dimension: indexConfig.dimension, + distanceMetric: indexConfig.metric, + }) + assertSuccessResponse(response) + } + + // List all indexes + const listResponse = await bucket.listIndexes() + const listData = assertSuccessResponse(listResponse) + expect(listData.indexes.length).toBeGreaterThanOrEqual(3) + + // Insert vectors into each index + for (const indexConfig of indexes) { + const index = bucket.index(indexConfig.name) + const response = await index.putVectors({ + vectors: [ + { + key: 'vec-1', + data: { float32: generateRandomVector(indexConfig.dimension) }, + metadata: { index: indexConfig.name }, + }, + ], + }) + assertSuccessResponse(response) + } + + // Query each index + for (const indexConfig of indexes) { + const index = bucket.index(indexConfig.name) + const response = await index.queryVectors({ + queryVector: { float32: generateRandomVector(indexConfig.dimension) }, + topK: 1, + returnMetadata: true, + }) + const data = assertSuccessResponse(response) + expect(data.matches.length).toBeGreaterThan(0) + } + + // Cleanup: Delete all indexes + for (const indexConfig of indexes) { + await bucket.deleteIndex(indexConfig.name) + } + + // Delete bucket + await client.deleteVectorBucket(bucketName) + }) + }) + + describe('Semantic Search Workflow', () => { + it('should perform semantic search with metadata filtering', async () => { + const bucketName = generateTestName('semantic-bucket') + const indexName = generateTestName('semantic-index') + + // Setup + await client.createVectorBucket(bucketName) + const bucket = client.from(bucketName) + await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 128, + distanceMetric: 'cosine', + }) + + const index = bucket.index(indexName) + + // Insert documents with semantic embeddings + const documents = [ + { + key: 'article-1', + data: { float32: generateRandomVector(128) }, + metadata: { + type: 'article', + category: 'technology', + tags: ['ai', 'ml'], + year: 2024, + score: 4.5, + }, + }, + { + key: 'article-2', + data: { float32: generateRandomVector(128) }, + metadata: { + type: 'article', + category: 'technology', + tags: ['web', 'dev'], + year: 2023, + score: 4.0, + }, + }, + { + key: 'paper-1', + data: { float32: generateRandomVector(128) }, + metadata: { + type: 'paper', + category: 'science', + tags: ['research', 'ml'], + year: 2024, + score: 5.0, + }, + }, + { + key: 'blog-1', + data: { float32: generateRandomVector(128) }, + metadata: { + type: 'blog', + category: 'technology', + tags: ['tutorial'], + year: 2024, + score: 3.5, + }, + }, + ] + + await index.putVectors({ vectors: documents }) + + // Search 1: Find technology articles + const tech1Response = await index.queryVectors({ + queryVector: { float32: generateRandomVector(128) }, + topK: 10, + filter: { type: 'article', category: 'technology' }, + returnMetadata: true, + }) + + const tech1Data = assertSuccessResponse(tech1Response) + expect(tech1Data.matches.length).toBeGreaterThan(0) + for (const match of tech1Data.matches) { + expect(match.metadata?.type).toBe('article') + expect(match.metadata?.category).toBe('technology') + } + + // Search 2: Find 2024 content + const year2024Response = await index.queryVectors({ + queryVector: { float32: generateRandomVector(128) }, + topK: 10, + filter: { year: 2024 }, + returnMetadata: true, + }) + + const year2024Data = assertSuccessResponse(year2024Response) + expect(year2024Data.matches.length).toBeGreaterThan(0) + for (const match of year2024Data.matches) { + expect(match.metadata?.year).toBe(2024) + } + + // Search 3: Find papers + const papersResponse = await index.queryVectors({ + queryVector: { float32: generateRandomVector(128) }, + topK: 10, + filter: { type: 'paper' }, + returnMetadata: true, + }) + + const papersData = assertSuccessResponse(papersResponse) + expect(papersData.matches.length).toBeGreaterThan(0) + for (const match of papersData.matches) { + expect(match.metadata?.type).toBe('paper') + } + + // Cleanup + await bucket.deleteIndex(indexName) + await client.deleteVectorBucket(bucketName) + }) + }) + + describe('Batch Processing Workflow', () => { + it('should handle large-scale batch operations', async () => { + const bucketName = generateTestName('batch-bucket') + const indexName = generateTestName('batch-index') + + // Setup + await client.createVectorBucket(bucketName) + const bucket = client.from(bucketName) + await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 64, + distanceMetric: 'cosine', + }) + + const index = bucket.index(indexName) + + // Insert in batches of 500 + const totalVectors = 1000 + const batchSize = 500 + + for (let i = 0; i < totalVectors; i += batchSize) { + const batch = Array.from( + { length: Math.min(batchSize, totalVectors - i) }, + (_, j) => ({ + key: `vector-${i + j}`, + data: { float32: generateRandomVector(64) }, + metadata: { batch: Math.floor((i + j) / batchSize), index: i + j }, + }) + ) + + const response = await index.putVectors({ vectors: batch }) + assertSuccessResponse(response) + } + + // List all vectors (paginated) + const listResponse = await index.listVectors({ + maxResults: 100, + returnMetadata: true, + }) + + const listData = assertSuccessResponse(listResponse) + expect(listData.vectors.length).toBeGreaterThan(0) + expect(listData.vectors.length).toBeLessThanOrEqual(100) + + // Query for similar vectors + const queryResponse = await index.queryVectors({ + queryVector: { float32: generateRandomVector(64) }, + topK: 10, + returnDistance: true, + }) + + const queryData = assertSuccessResponse(queryResponse) + expect(queryData.matches.length).toBeGreaterThan(0) + expect(queryData.matches.length).toBeLessThanOrEqual(10) + + // Delete in batches + const keysToDelete = Array.from({ length: 100 }, (_, i) => `vector-${i}`) + const deleteResponse = await index.deleteVectors({ keys: keysToDelete }) + assertSuccessResponse(deleteResponse) + + // Verify deletion + const getResponse = await index.getVectors({ keys: keysToDelete.slice(0, 10) }) + const getData = assertSuccessResponse(getResponse) + expect(getData.vectors).toEqual([]) + + // Cleanup + await bucket.deleteIndex(indexName) + await client.deleteVectorBucket(bucketName) + }) + }) + + describe('Error Recovery Workflow', () => { + it('should handle errors gracefully and allow recovery', async () => { + const bucketName = generateTestName('error-bucket') + const indexName = generateTestName('error-index') + + // Create bucket + await client.createVectorBucket(bucketName) + const bucket = client.from(bucketName) + + // Try to create index in non-existent bucket (error) + const badBucket = client.from('non-existent-bucket') + const errorResponse = await badBucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + expect(errorResponse.error).toBeTruthy() + + // Recover: Create index in correct bucket + const goodResponse = await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + assertSuccessResponse(goodResponse) + + // Continue normal operations + const index = bucket.index(indexName) + const putResponse = await index.putVectors({ + vectors: [{ key: 'vec-1', data: { float32: [0.1, 0.2, 0.3] } }], + }) + + assertSuccessResponse(putResponse) + + // Cleanup + await bucket.deleteIndex(indexName) + await client.deleteVectorBucket(bucketName) + }) + }) +}) diff --git a/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts b/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts new file mode 100644 index 000000000..779ca1bbb --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts @@ -0,0 +1,164 @@ +/** + * Test helpers and utilities + */ + +/// + +import { StorageVectorsClient } from '../lib/StorageVectorsClient' +import { createMockFetch, resetMockStorage } from './mock-server' +import { getTestConfig } from './setup' + +/** + * Create a test client based on configuration + */ +export function createTestClient(): StorageVectorsClient { + const config = getTestConfig() + + if (config.useMockServer) { + return new StorageVectorsClient('https://mock.example.com', { + fetch: createMockFetch(), + headers: {}, + }) + } + + if (!config.apiUrl) { + throw new Error('STORAGE_VECTORS_API_URL environment variable is required when USE_MOCK_SERVER=false') + } + + return new StorageVectorsClient(config.apiUrl, { + headers: config.headers, + }) +} + +/** + * Setup before each test + */ +export function setupTest() { + const config = getTestConfig() + if (config.useMockServer) { + resetMockStorage() + } +} + +/** + * Generate unique test names to avoid conflicts + */ +export function generateTestName(prefix: string): string { + const timestamp = Date.now() + const random = Math.random().toString(36).substring(7) + return `${prefix}-${timestamp}-${random}` +} + +/** + * Sleep utility for tests + */ +export function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)) +} + +/** + * Retry a function with exponential backoff + */ +export async function retry( + fn: () => Promise, + options: { + maxAttempts?: number + initialDelay?: number + maxDelay?: number + factor?: number + } = {} +): Promise { + const { + maxAttempts = 3, + initialDelay = 100, + maxDelay = 5000, + factor = 2, + } = options + + let lastError: Error | undefined + let delay = initialDelay + + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + try { + return await fn() + } catch (error) { + lastError = error as Error + if (attempt < maxAttempts) { + await sleep(Math.min(delay, maxDelay)) + delay *= factor + } + } + } + + throw lastError +} + +/** + * Assert that an error has a specific status code + */ +export function assertErrorCode(error: any, expectedCode: string) { + expect(error).toBeTruthy() + expect(error.statusCode).toBe(expectedCode) +} + +/** + * Assert that data is successfully returned + */ +export function assertSuccessResponse(response: { data: T | null; error: any }): T { + expect(response.error).toBeNull() + expect(response.data).toBeTruthy() + return response.data! +} + +/** + * Assert that an error response is returned + */ +export function assertErrorResponse(response: { data: any; error: any }) { + expect(response.data).toBeNull() + expect(response.error).toBeTruthy() + return response.error +} + +/** + * Generate a random vector of specified dimension + */ +export function generateRandomVector(dimension: number): number[] { + return Array.from({ length: dimension }, () => Math.random()) +} + +/** + * Calculate cosine similarity between two vectors + */ +export function cosineSimilarity(a: number[], b: number[]): number { + if (a.length !== b.length) { + throw new Error('Vectors must have the same dimension') + } + + let dotProduct = 0 + let normA = 0 + let normB = 0 + + for (let i = 0; i < a.length; i++) { + dotProduct += a[i] * b[i] + normA += a[i] * a[i] + normB += b[i] * b[i] + } + + normA = Math.sqrt(normA) + normB = Math.sqrt(normB) + + if (normA === 0 || normB === 0) { + return 0 + } + + return dotProduct / (normA * normB) +} + +/** + * Normalize a vector to unit length + */ +export function normalizeVector(vector: number[]): number[] { + const norm = Math.sqrt(vector.reduce((sum, val) => sum + val * val, 0)) + if (norm === 0) return vector + return vector.map(val => val / norm) +} diff --git a/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts new file mode 100644 index 000000000..f86438efe --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts @@ -0,0 +1,424 @@ +/** + * Integration tests for Vector Index API + * Tests all index operations: create, get, list, delete + */ + +import { createTestClient, setupTest, generateTestName, assertSuccessResponse, assertErrorResponse, assertErrorCode } from './helpers' + +describe('VectorIndexApi Integration Tests', () => { + let client: ReturnType + let testBucket: string + + beforeEach(async () => { + setupTest() + client = createTestClient() + testBucket = generateTestName('test-bucket') + await client.createVectorBucket(testBucket) + }) + + describe('createIndex', () => { + it('should create a new index with all required parameters', async () => { + const indexName = generateTestName('test-index') + const bucket = client.from(testBucket) + + const response = await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 1536, + distanceMetric: 'cosine', + }) + + assertSuccessResponse(response) + expect(response.data).toEqual({}) + }) + + it('should create index with euclidean distance metric', async () => { + const indexName = generateTestName('test-index') + const bucket = client.from(testBucket) + + const response = await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 768, + distanceMetric: 'euclidean', + }) + + assertSuccessResponse(response) + }) + + it('should create index with dotproduct distance metric', async () => { + const indexName = generateTestName('test-index') + const bucket = client.from(testBucket) + + const response = await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 384, + distanceMetric: 'dotproduct', + }) + + assertSuccessResponse(response) + }) + + it('should create index with metadata configuration', async () => { + const indexName = generateTestName('test-index') + const bucket = client.from(testBucket) + + const response = await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 1536, + distanceMetric: 'cosine', + metadataConfiguration: { + nonFilterableMetadataKeys: ['raw_text', 'internal_id'], + }, + }) + + assertSuccessResponse(response) + }) + + it('should return conflict error when index already exists', async () => { + const indexName = generateTestName('test-index') + const bucket = client.from(testBucket) + + // Create index first time + await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + // Try to create again + const response = await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorConflictException') + expect(error.message).toContain('already exists') + }) + + it('should return not found error when bucket does not exist', async () => { + const bucket = client.from('non-existent-bucket') + + const response = await bucket.createIndex({ + indexName: 'test-index', + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + + it('should create multiple indexes in the same bucket', async () => { + const index1 = generateTestName('test-index-1') + const index2 = generateTestName('test-index-2') + const bucket = client.from(testBucket) + + const response1 = await bucket.createIndex({ + indexName: index1, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + const response2 = await bucket.createIndex({ + indexName: index2, + dataType: 'float32', + dimension: 5, + distanceMetric: 'euclidean', + }) + + assertSuccessResponse(response1) + assertSuccessResponse(response2) + }) + + it('should create indexes with different dimensions', async () => { + const dimensions = [3, 128, 384, 768, 1536, 3072] + const bucket = client.from(testBucket) + + for (const dim of dimensions) { + const indexName = generateTestName(`test-index-${dim}`) + const response = await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: dim, + distanceMetric: 'cosine', + }) + + assertSuccessResponse(response) + } + }) + }) + + describe('getIndex', () => { + it('should retrieve an existing index', async () => { + const indexName = generateTestName('test-index') + const bucket = client.from(testBucket) + + await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 1536, + distanceMetric: 'cosine', + }) + + const response = await bucket.getIndex(indexName) + + const data = assertSuccessResponse(response) + expect(data.index).toBeDefined() + expect(data.index.indexName).toBe(indexName) + expect(data.index.vectorBucketName).toBe(testBucket) + expect(data.index.dataType).toBe('float32') + expect(data.index.dimension).toBe(1536) + expect(data.index.distanceMetric).toBe('cosine') + expect(data.index.creationTime).toBeDefined() + expect(typeof data.index.creationTime).toBe('number') + }) + + it('should retrieve index with metadata configuration', async () => { + const indexName = generateTestName('test-index') + const bucket = client.from(testBucket) + + await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 768, + distanceMetric: 'euclidean', + metadataConfiguration: { + nonFilterableMetadataKeys: ['raw_text'], + }, + }) + + const response = await bucket.getIndex(indexName) + + const data = assertSuccessResponse(response) + expect(data.index.metadataConfiguration).toBeDefined() + expect(data.index.metadataConfiguration?.nonFilterableMetadataKeys).toContain('raw_text') + }) + + it('should return not found error for non-existent index', async () => { + const bucket = client.from(testBucket) + const response = await bucket.getIndex('non-existent-index') + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + + it('should return not found error when bucket does not exist', async () => { + const bucket = client.from('non-existent-bucket') + const response = await bucket.getIndex('test-index') + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + }) + + describe('listIndexes', () => { + it('should list all indexes in a bucket', async () => { + const index1 = generateTestName('test-index-1') + const index2 = generateTestName('test-index-2') + const bucket = client.from(testBucket) + + await bucket.createIndex({ + indexName: index1, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + await bucket.createIndex({ + indexName: index2, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + const response = await bucket.listIndexes() + + const data = assertSuccessResponse(response) + expect(data.indexes).toBeDefined() + expect(Array.isArray(data.indexes)).toBe(true) + expect(data.indexes.length).toBeGreaterThanOrEqual(2) + + const indexNames = data.indexes.map(i => i.indexName) + expect(indexNames).toContain(index1) + expect(indexNames).toContain(index2) + }) + + it('should filter indexes by prefix', async () => { + const prefix = generateTestName('prefix-test') + const index1 = `${prefix}-index-1` + const index2 = `${prefix}-index-2` + const index3 = generateTestName('other-index') + const bucket = client.from(testBucket) + + await bucket.createIndex({ + indexName: index1, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + await bucket.createIndex({ + indexName: index2, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + await bucket.createIndex({ + indexName: index3, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + const response = await bucket.listIndexes({ prefix }) + + const data = assertSuccessResponse(response) + expect(data.indexes.length).toBeGreaterThanOrEqual(2) + + const indexNames = data.indexes.map(i => i.indexName) + expect(indexNames).toContain(index1) + expect(indexNames).toContain(index2) + }) + + it('should support pagination with maxResults', async () => { + const bucket = client.from(testBucket) + + // Create multiple indexes + for (let i = 0; i < 3; i++) { + await bucket.createIndex({ + indexName: generateTestName(`test-index-${i}`), + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + } + + const response = await bucket.listIndexes({ maxResults: 1 }) + + const data = assertSuccessResponse(response) + expect(data.indexes.length).toBeLessThanOrEqual(1) + + if (data.indexes.length === 1 && data.nextToken) { + expect(data.nextToken).toBeDefined() + expect(typeof data.nextToken).toBe('string') + } + }) + + it('should return empty array when no indexes exist', async () => { + const emptyBucket = generateTestName('empty-bucket') + await client.createVectorBucket(emptyBucket) + + const bucket = client.from(emptyBucket) + const response = await bucket.listIndexes() + + const data = assertSuccessResponse(response) + expect(data.indexes).toEqual([]) + }) + + it('should return not found error when bucket does not exist', async () => { + const bucket = client.from('non-existent-bucket') + const response = await bucket.listIndexes() + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + }) + + describe('deleteIndex', () => { + it('should delete an index successfully', async () => { + const indexName = generateTestName('test-index') + const bucket = client.from(testBucket) + + await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + const response = await bucket.deleteIndex(indexName) + + assertSuccessResponse(response) + expect(response.data).toEqual({}) + + // Verify index is deleted + const getResponse = await bucket.getIndex(indexName) + assertErrorResponse(getResponse) + }) + + it('should delete index with vectors', async () => { + const indexName = generateTestName('test-index') + const bucket = client.from(testBucket) + const index = bucket.index(indexName) + + await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + // Add some vectors + await index.putVectors({ + vectors: [ + { key: 'vec-1', data: { float32: [0.1, 0.2, 0.3] } }, + { key: 'vec-2', data: { float32: [0.4, 0.5, 0.6] } }, + ], + }) + + // Delete index (should delete vectors too) + const response = await bucket.deleteIndex(indexName) + + assertSuccessResponse(response) + }) + + it('should return not found error for non-existent index', async () => { + const bucket = client.from(testBucket) + const response = await bucket.deleteIndex('non-existent-index') + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + + it('should return not found error when bucket does not exist', async () => { + const bucket = client.from('non-existent-bucket') + const response = await bucket.deleteIndex('test-index') + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + }) + + describe('VectorIndexScope (index)', () => { + it('should create an index scope successfully', async () => { + const indexName = generateTestName('test-index') + const bucket = client.from(testBucket) + + await bucket.createIndex({ + indexName, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + const indexScope = bucket.index(indexName) + + expect(indexScope).toBeDefined() + expect(typeof indexScope.putVectors).toBe('function') + expect(typeof indexScope.getVectors).toBe('function') + expect(typeof indexScope.listVectors).toBe('function') + expect(typeof indexScope.queryVectors).toBe('function') + expect(typeof indexScope.deleteVectors).toBe('function') + }) + }) +}) diff --git a/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts b/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts new file mode 100644 index 000000000..d52a1b218 --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts @@ -0,0 +1,698 @@ +/** + * Mock server implementation for testing + * Provides hardcoded responses for all API endpoints + */ + +/// + +import { testData } from './setup' +import type { Fetch } from '../lib/fetch' + +interface MockResponse { + status: number + data?: any + error?: { + statusCode: string + error: string + message: string + } +} + +/** + * In-memory storage for mock data + */ +class MockStorage { + private buckets = new Set() + private indexes = new Map>() // bucket -> index -> config + private vectors = new Map>() // bucket:index -> key -> vector + + constructor() { + // Initialize with test data + this.buckets.add(testData.buckets.test) + + const indexConfig = { + indexName: testData.indexes.test, + vectorBucketName: testData.buckets.test, + dataType: 'float32' as const, + dimension: 3, + distanceMetric: 'cosine' as const, + creationTime: Math.floor(Date.now() / 1000), + } + + const bucketIndexes = new Map() + bucketIndexes.set(testData.indexes.test, indexConfig) + this.indexes.set(testData.buckets.test, bucketIndexes) + + // Add sample vectors + const vectorKey = `${testData.buckets.test}:${testData.indexes.test}` + const vectorStorage = new Map() + + vectorStorage.set(testData.vectors.key1, { + key: testData.vectors.key1, + data: { float32: testData.sampleVectors.vector1 }, + metadata: testData.metadata.doc1, + }) + + vectorStorage.set(testData.vectors.key2, { + key: testData.vectors.key2, + data: { float32: testData.sampleVectors.vector2 }, + metadata: testData.metadata.doc2, + }) + + vectorStorage.set(testData.vectors.key3, { + key: testData.vectors.key3, + data: { float32: testData.sampleVectors.vector3 }, + metadata: testData.metadata.doc3, + }) + + this.vectors.set(vectorKey, vectorStorage) + } + + reset() { + this.buckets.clear() + this.indexes.clear() + this.vectors.clear() + } + + // Bucket operations + hasBucket(name: string): boolean { + return this.buckets.has(name) + } + + addBucket(name: string) { + this.buckets.add(name) + this.indexes.set(name, new Map()) + } + + removeBucket(name: string) { + this.buckets.delete(name) + this.indexes.delete(name) + } + + getBuckets(prefix?: string): string[] { + const buckets = Array.from(this.buckets) + if (prefix) { + return buckets.filter(b => b.startsWith(prefix)) + } + return buckets + } + + // Index operations + hasIndex(bucketName: string, indexName: string): boolean { + return this.indexes.get(bucketName)?.has(indexName) ?? false + } + + addIndex(bucketName: string, config: any) { + let bucketIndexes = this.indexes.get(bucketName) + if (!bucketIndexes) { + bucketIndexes = new Map() + this.indexes.set(bucketName, bucketIndexes) + } + bucketIndexes.set(config.indexName, config) + + // Initialize vector storage for this index + const vectorKey = `${bucketName}:${config.indexName}` + if (!this.vectors.has(vectorKey)) { + this.vectors.set(vectorKey, new Map()) + } + } + + getIndex(bucketName: string, indexName: string): any { + return this.indexes.get(bucketName)?.get(indexName) + } + + getIndexes(bucketName: string, prefix?: string): any[] { + const bucketIndexes = this.indexes.get(bucketName) + if (!bucketIndexes) return [] + + const indexes = Array.from(bucketIndexes.values()) + if (prefix) { + return indexes.filter(i => i.indexName.startsWith(prefix)) + } + return indexes + } + + removeIndex(bucketName: string, indexName: string) { + this.indexes.get(bucketName)?.delete(indexName) + const vectorKey = `${bucketName}:${indexName}` + this.vectors.delete(vectorKey) + } + + // Vector operations + getVectorStorage(bucketName: string, indexName: string): Map | undefined { + const vectorKey = `${bucketName}:${indexName}` + return this.vectors.get(vectorKey) + } + + putVector(bucketName: string, indexName: string, vector: any) { + const vectorKey = `${bucketName}:${indexName}` + let storage = this.vectors.get(vectorKey) + if (!storage) { + storage = new Map() + this.vectors.set(vectorKey, storage) + } + storage.set(vector.key, vector) + } + + getVector(bucketName: string, indexName: string, key: string): any { + const storage = this.getVectorStorage(bucketName, indexName) + return storage?.get(key) + } + + deleteVector(bucketName: string, indexName: string, key: string) { + const storage = this.getVectorStorage(bucketName, indexName) + storage?.delete(key) + } +} + +const storage = new MockStorage() + +/** + * Mock fetch implementation + */ +export function createMockFetch(): Fetch { + return async (input: string | URL | Request, init?: RequestInit): Promise => { + const url = input instanceof Request ? input.url : input + const urlStr = url.toString() + const endpoint = urlStr.split('/').pop() || '' + const body = init?.body ? JSON.parse(init.body as string) : {} + const method = init?.method || 'GET' + + let response: MockResponse + + try { + response = await handleRequest(endpoint, method, body) + } catch (error: any) { + response = { + status: 500, + error: { + statusCode: 'InternalError', + error: 'Internal Server Error', + message: error.message, + }, + } + } + + // Create mock Response object + const responseBody = JSON.stringify(response.error || response.data || {}) + return new Response(responseBody, { + status: response.status, + headers: { + 'Content-Type': 'application/json', + }, + }) as any + } +} + +async function handleRequest( + endpoint: string, + method: string, + body: any +): Promise { + // Bucket endpoints + if (endpoint === 'CreateVectorBucket') { + return handleCreateBucket(body) + } + if (endpoint === 'GetVectorBucket') { + return handleGetBucket(body) + } + if (endpoint === 'ListVectorBuckets') { + return handleListBuckets(body) + } + if (endpoint === 'DeleteVectorBucket') { + return handleDeleteBucket(body) + } + + // Index endpoints + if (endpoint === 'CreateIndex') { + return handleCreateIndex(body) + } + if (endpoint === 'GetIndex') { + return handleGetIndex(body) + } + if (endpoint === 'ListIndexes') { + return handleListIndexes(body) + } + if (endpoint === 'DeleteIndex') { + return handleDeleteIndex(body) + } + + // Vector data endpoints + if (endpoint === 'PutVectors') { + return handlePutVectors(body) + } + if (endpoint === 'GetVectors') { + return handleGetVectors(body) + } + if (endpoint === 'ListVectors') { + return handleListVectors(body) + } + if (endpoint === 'QueryVectors') { + return handleQueryVectors(body) + } + if (endpoint === 'DeleteVectors') { + return handleDeleteVectors(body) + } + + return { + status: 404, + error: { + statusCode: 'NotFound', + error: 'Not Found', + message: `Endpoint not found: ${endpoint}`, + }, + } +} + +// Bucket handlers +function handleCreateBucket(body: any): MockResponse { + const { vectorBucketName } = body + + if (storage.hasBucket(vectorBucketName)) { + return { + status: 409, + error: { + statusCode: 'S3VectorConflictException', + error: 'Conflict', + message: `Bucket '${vectorBucketName}' already exists`, + }, + } + } + + storage.addBucket(vectorBucketName) + return { status: 200, data: {} } +} + +function handleGetBucket(body: any): MockResponse { + const { vectorBucketName } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + return { + status: 200, + data: { + vectorBucket: { + vectorBucketName, + creationTime: Math.floor(Date.now() / 1000), + }, + }, + } +} + +function handleListBuckets(body: any): MockResponse { + const { prefix, maxResults = 100 } = body + const buckets = storage.getBuckets(prefix) + + return { + status: 200, + data: { + buckets: buckets.slice(0, maxResults).map(name => ({ vectorBucketName: name })), + nextToken: buckets.length > maxResults ? 'mock-next-token' : undefined, + }, + } +} + +function handleDeleteBucket(body: any): MockResponse { + const { vectorBucketName } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + const indexes = storage.getIndexes(vectorBucketName) + if (indexes.length > 0) { + return { + status: 400, + error: { + statusCode: 'S3VectorBucketNotEmpty', + error: 'Bad Request', + message: `Bucket '${vectorBucketName}' is not empty`, + }, + } + } + + storage.removeBucket(vectorBucketName) + return { status: 200, data: {} } +} + +// Index handlers +function handleCreateIndex(body: any): MockResponse { + const { vectorBucketName, indexName } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + if (storage.hasIndex(vectorBucketName, indexName)) { + return { + status: 409, + error: { + statusCode: 'S3VectorConflictException', + error: 'Conflict', + message: `Index '${indexName}' already exists`, + }, + } + } + + storage.addIndex(vectorBucketName, { + ...body, + creationTime: Math.floor(Date.now() / 1000), + }) + + return { status: 200, data: {} } +} + +function handleGetIndex(body: any): MockResponse { + const { vectorBucketName, indexName } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + const index = storage.getIndex(vectorBucketName, indexName) + if (!index) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Index '${indexName}' not found`, + }, + } + } + + return { + status: 200, + data: { index }, + } +} + +function handleListIndexes(body: any): MockResponse { + const { vectorBucketName, prefix, maxResults = 100 } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + const indexes = storage.getIndexes(vectorBucketName, prefix) + + return { + status: 200, + data: { + indexes: indexes.slice(0, maxResults).map(i => ({ indexName: i.indexName })), + nextToken: indexes.length > maxResults ? 'mock-next-token' : undefined, + }, + } +} + +function handleDeleteIndex(body: any): MockResponse { + const { vectorBucketName, indexName } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + if (!storage.hasIndex(vectorBucketName, indexName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Index '${indexName}' not found`, + }, + } + } + + storage.removeIndex(vectorBucketName, indexName) + return { status: 200, data: {} } +} + +// Vector data handlers +function handlePutVectors(body: any): MockResponse { + const { vectorBucketName, indexName, vectors } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + if (!storage.hasIndex(vectorBucketName, indexName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Index '${indexName}' not found`, + }, + } + } + + for (const vector of vectors) { + storage.putVector(vectorBucketName, indexName, vector) + } + + return { status: 200, data: {} } +} + +function handleGetVectors(body: any): MockResponse { + const { vectorBucketName, indexName, keys, returnData = true, returnMetadata = true } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + if (!storage.hasIndex(vectorBucketName, indexName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Index '${indexName}' not found`, + }, + } + } + + const vectors = keys + .map((key: string) => { + const vector = storage.getVector(vectorBucketName, indexName, key) + if (!vector) return null + + const result: any = { key: vector.key } + if (returnData) result.data = vector.data + if (returnMetadata) result.metadata = vector.metadata + + return result + }) + .filter(Boolean) + + return { + status: 200, + data: { vectors }, + } +} + +function handleListVectors(body: any): MockResponse { + const { vectorBucketName, indexName, maxResults = 500, returnData = true, returnMetadata = true } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + if (!storage.hasIndex(vectorBucketName, indexName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Index '${indexName}' not found`, + }, + } + } + + const vectorStorage = storage.getVectorStorage(vectorBucketName, indexName) + const allVectors = Array.from(vectorStorage?.values() || []) + + const vectors = allVectors.slice(0, maxResults).map(vector => { + const result: any = { key: vector.key } + if (returnData) result.data = vector.data + if (returnMetadata) result.metadata = vector.metadata + return result + }) + + return { + status: 200, + data: { + vectors, + nextToken: allVectors.length > maxResults ? 'mock-next-token' : undefined, + }, + } +} + +function handleQueryVectors(body: any): MockResponse { + const { + vectorBucketName, + indexName, + queryVector, + topK = 10, + filter, + returnDistance = false, + returnMetadata = true, + } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + if (!storage.hasIndex(vectorBucketName, indexName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Index '${indexName}' not found`, + }, + } + } + + const vectorStorage = storage.getVectorStorage(vectorBucketName, indexName) + let allVectors = Array.from(vectorStorage?.values() || []) + + // Apply filter if provided + if (filter) { + allVectors = allVectors.filter(vector => { + if (!vector.metadata) return false + return Object.entries(filter).every(([key, value]) => vector.metadata[key] === value) + }) + } + + // Calculate cosine similarity (simplified mock) + const matches = allVectors + .map((vector, index) => { + const result: any = { key: vector.key } + if (returnDistance) { + // Mock distance calculation + result.distance = 0.1 + (index * 0.05) + } + if (returnMetadata) result.metadata = vector.metadata + return result + }) + .slice(0, topK) + + return { + status: 200, + data: { matches }, + } +} + +function handleDeleteVectors(body: any): MockResponse { + const { vectorBucketName, indexName, keys } = body + + if (!storage.hasBucket(vectorBucketName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Bucket '${vectorBucketName}' not found`, + }, + } + } + + if (!storage.hasIndex(vectorBucketName, indexName)) { + return { + status: 404, + error: { + statusCode: 'S3VectorNotFoundException', + error: 'Not Found', + message: `Index '${indexName}' not found`, + }, + } + } + + for (const key of keys) { + storage.deleteVector(vectorBucketName, indexName, key) + } + + return { status: 200, data: {} } +} + +/** + * Reset mock storage to initial state + */ +export function resetMockStorage() { + storage.reset() + // Re-initialize with default test data + const newStorage = new MockStorage() + Object.assign(storage, newStorage) +} diff --git a/packages/integrations/storage-vectors-js/src/__tests__/setup.ts b/packages/integrations/storage-vectors-js/src/__tests__/setup.ts new file mode 100644 index 000000000..0cba1e8cc --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/__tests__/setup.ts @@ -0,0 +1,75 @@ +/** + * Test configuration and setup + * Supports both real API testing and mock server testing + */ + +/// + +export interface TestConfig { + /** + * Whether to use a mock server or real API + */ + useMockServer: boolean + /** + * Base URL for the API (used when useMockServer is false) + */ + apiUrl?: string + /** + * API headers (e.g., Authorization token) + */ + headers?: Record +} + +/** + * Get test configuration from environment variables + */ +export function getTestConfig(): TestConfig { + const useMockServer = process.env.USE_MOCK_SERVER !== 'false' + const apiUrl = process.env.STORAGE_VECTORS_API_URL + const authToken = process.env.STORAGE_VECTORS_API_TOKEN + + const headers: Record = {} + if (authToken) { + headers['Authorization'] = `Bearer ${authToken}` + } + + return { + useMockServer, + apiUrl: useMockServer ? undefined : apiUrl, + headers: useMockServer ? {} : headers, + } +} + +/** + * Shared test data + */ +export const testData = { + buckets: { + test: 'test-bucket', + test2: 'test-bucket-2', + nonExistent: 'non-existent-bucket', + }, + indexes: { + test: 'test-index', + test2: 'test-index-2', + nonExistent: 'non-existent-index', + }, + vectors: { + key1: 'vector-1', + key2: 'vector-2', + key3: 'vector-3', + nonExistent: 'non-existent-vector', + }, + // Sample 3-dimensional vectors for testing + sampleVectors: { + vector1: [0.1, 0.2, 0.3], + vector2: [0.4, 0.5, 0.6], + vector3: [0.7, 0.8, 0.9], + query: [0.15, 0.25, 0.35], + }, + metadata: { + doc1: { title: 'Document 1', category: 'tech', page: 1 }, + doc2: { title: 'Document 2', category: 'science', page: 2 }, + doc3: { title: 'Document 3', category: 'tech', page: 3 }, + }, +} diff --git a/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts new file mode 100644 index 000000000..8a5cec608 --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts @@ -0,0 +1,648 @@ +/** + * Integration tests for Vector Data API + * Tests all vector operations: put, get, list, query, delete + */ + +import { + createTestClient, + setupTest, + generateTestName, + generateRandomVector, + assertSuccessResponse, + assertErrorResponse, + assertErrorCode, +} from './helpers' + +describe('VectorDataApi Integration Tests', () => { + let client: ReturnType + let testBucket: string + let testIndex: string + + beforeEach(async () => { + setupTest() + client = createTestClient() + testBucket = generateTestName('test-bucket') + testIndex = generateTestName('test-index') + + await client.createVectorBucket(testBucket) + const bucket = client.from(testBucket) + await bucket.createIndex({ + indexName: testIndex, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + }) + + describe('putVectors', () => { + it('should insert a single vector successfully', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.putVectors({ + vectors: [ + { + key: 'vector-1', + data: { float32: [0.1, 0.2, 0.3] }, + metadata: { title: 'Test Vector' }, + }, + ], + }) + + assertSuccessResponse(response) + expect(response.data).toEqual({}) + }) + + it('should insert multiple vectors in batch', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.putVectors({ + vectors: [ + { key: 'vec-1', data: { float32: [0.1, 0.2, 0.3] }, metadata: { id: 1 } }, + { key: 'vec-2', data: { float32: [0.4, 0.5, 0.6] }, metadata: { id: 2 } }, + { key: 'vec-3', data: { float32: [0.7, 0.8, 0.9] }, metadata: { id: 3 } }, + ], + }) + + assertSuccessResponse(response) + }) + + it('should insert vector without metadata', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.putVectors({ + vectors: [{ key: 'vec-no-meta', data: { float32: [0.1, 0.2, 0.3] } }], + }) + + assertSuccessResponse(response) + }) + + it('should upsert (update existing vector)', async () => { + const index = client.from(testBucket).index(testIndex) + + // Insert first time + await index.putVectors({ + vectors: [ + { + key: 'vec-1', + data: { float32: [0.1, 0.2, 0.3] }, + metadata: { version: 1 }, + }, + ], + }) + + // Update same key + const response = await index.putVectors({ + vectors: [ + { + key: 'vec-1', + data: { float32: [0.4, 0.5, 0.6] }, + metadata: { version: 2 }, + }, + ], + }) + + assertSuccessResponse(response) + + // Verify updated + const getResponse = await index.getVectors({ + keys: ['vec-1'], + returnData: true, + returnMetadata: true, + }) + + const data = assertSuccessResponse(getResponse) + expect(data.vectors[0].metadata?.version).toBe(2) + }) + + it('should insert vectors with complex metadata', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.putVectors({ + vectors: [ + { + key: 'complex-meta', + data: { float32: [0.1, 0.2, 0.3] }, + metadata: { + title: 'Document Title', + category: 'tech', + tags: ['ai', 'ml', 'vectors'], + published: true, + score: 4.5, + nested: { field: 'value' }, + }, + }, + ], + }) + + assertSuccessResponse(response) + }) + + it('should return not found error when bucket does not exist', async () => { + const index = client.from('non-existent-bucket').index(testIndex) + + const response = await index.putVectors({ + vectors: [{ key: 'vec-1', data: { float32: [0.1, 0.2, 0.3] } }], + }) + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + + it('should return not found error when index does not exist', async () => { + const index = client.from(testBucket).index('non-existent-index') + + const response = await index.putVectors({ + vectors: [{ key: 'vec-1', data: { float32: [0.1, 0.2, 0.3] } }], + }) + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + + it('should handle batch size limits', async () => { + const index = client.from(testBucket).index(testIndex) + + // Create a large batch (500 vectors) + const vectors = Array.from({ length: 500 }, (_, i) => ({ + key: `vec-${i}`, + data: { float32: generateRandomVector(3) }, + })) + + const response = await index.putVectors({ vectors }) + + assertSuccessResponse(response) + }) + }) + + describe('getVectors', () => { + beforeEach(async () => { + const index = client.from(testBucket).index(testIndex) + + // Insert test vectors + await index.putVectors({ + vectors: [ + { + key: 'vec-1', + data: { float32: [0.1, 0.2, 0.3] }, + metadata: { title: 'Vector 1' }, + }, + { + key: 'vec-2', + data: { float32: [0.4, 0.5, 0.6] }, + metadata: { title: 'Vector 2' }, + }, + { + key: 'vec-3', + data: { float32: [0.7, 0.8, 0.9] }, + metadata: { title: 'Vector 3' }, + }, + ], + }) + }) + + it('should retrieve vectors by keys', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.getVectors({ + keys: ['vec-1', 'vec-2'], + returnData: true, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + expect(data.vectors).toBeDefined() + expect(data.vectors.length).toBe(2) + + const keys = data.vectors.map(v => v.key) + expect(keys).toContain('vec-1') + expect(keys).toContain('vec-2') + }) + + it('should retrieve vectors with data', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.getVectors({ + keys: ['vec-1'], + returnData: true, + returnMetadata: false, + }) + + const data = assertSuccessResponse(response) + expect(data.vectors[0].data).toBeDefined() + expect(data.vectors[0].data?.float32).toEqual([0.1, 0.2, 0.3]) + expect(data.vectors[0].metadata).toBeUndefined() + }) + + it('should retrieve vectors with metadata only', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.getVectors({ + keys: ['vec-1'], + returnData: false, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + expect(data.vectors[0].data).toBeUndefined() + expect(data.vectors[0].metadata).toBeDefined() + expect(data.vectors[0].metadata?.title).toBe('Vector 1') + }) + + it('should retrieve vectors with keys only', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.getVectors({ + keys: ['vec-1'], + returnData: false, + returnMetadata: false, + }) + + const data = assertSuccessResponse(response) + expect(data.vectors[0].key).toBe('vec-1') + expect(data.vectors[0].data).toBeUndefined() + expect(data.vectors[0].metadata).toBeUndefined() + }) + + it('should return empty array for non-existent keys', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.getVectors({ + keys: ['non-existent-key'], + returnData: true, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + expect(data.vectors).toEqual([]) + }) + + it('should retrieve mix of existing and non-existent keys', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.getVectors({ + keys: ['vec-1', 'non-existent', 'vec-2'], + returnData: true, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + expect(data.vectors.length).toBe(2) + + const keys = data.vectors.map(v => v.key) + expect(keys).toContain('vec-1') + expect(keys).toContain('vec-2') + expect(keys).not.toContain('non-existent') + }) + }) + + describe('listVectors', () => { + beforeEach(async () => { + const index = client.from(testBucket).index(testIndex) + + // Insert multiple vectors + await index.putVectors({ + vectors: Array.from({ length: 10 }, (_, i) => ({ + key: `vec-${i}`, + data: { float32: generateRandomVector(3) }, + metadata: { index: i }, + })), + }) + }) + + it('should list all vectors in index', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.listVectors({ + returnData: true, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + expect(data.vectors).toBeDefined() + expect(data.vectors.length).toBeGreaterThanOrEqual(10) + }) + + it('should list vectors with data', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.listVectors({ + returnData: true, + returnMetadata: false, + }) + + const data = assertSuccessResponse(response) + expect(data.vectors[0].data).toBeDefined() + expect(data.vectors[0].metadata).toBeUndefined() + }) + + it('should list vectors with metadata only', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.listVectors({ + returnData: false, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + expect(data.vectors[0].data).toBeUndefined() + expect(data.vectors[0].metadata).toBeDefined() + }) + + it('should support pagination with maxResults', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.listVectors({ + maxResults: 5, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + expect(data.vectors.length).toBeLessThanOrEqual(5) + + if (data.vectors.length === 5 && data.nextToken) { + expect(data.nextToken).toBeDefined() + } + }) + + it('should return empty array for empty index', async () => { + const emptyIndex = generateTestName('empty-index') + const bucket = client.from(testBucket) + + await bucket.createIndex({ + indexName: emptyIndex, + dataType: 'float32', + dimension: 3, + distanceMetric: 'cosine', + }) + + const index = bucket.index(emptyIndex) + const response = await index.listVectors() + + const data = assertSuccessResponse(response) + expect(data.vectors).toEqual([]) + }) + }) + + describe('queryVectors', () => { + beforeEach(async () => { + const index = client.from(testBucket).index(testIndex) + + // Insert test vectors with different metadata + await index.putVectors({ + vectors: [ + { + key: 'doc-1', + data: { float32: [0.1, 0.2, 0.3] }, + metadata: { category: 'tech', published: true, score: 5 }, + }, + { + key: 'doc-2', + data: { float32: [0.15, 0.25, 0.35] }, + metadata: { category: 'tech', published: false, score: 3 }, + }, + { + key: 'doc-3', + data: { float32: [0.4, 0.5, 0.6] }, + metadata: { category: 'science', published: true, score: 4 }, + }, + { + key: 'doc-4', + data: { float32: [0.7, 0.8, 0.9] }, + metadata: { category: 'science', published: true, score: 5 }, + }, + ], + }) + }) + + it('should query for similar vectors', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.queryVectors({ + queryVector: { float32: [0.12, 0.22, 0.32] }, + topK: 3, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + expect(data.matches).toBeDefined() + expect(Array.isArray(data.matches)).toBe(true) + expect(data.matches.length).toBeGreaterThan(0) + expect(data.matches.length).toBeLessThanOrEqual(3) + }) + + it('should return vectors with distance scores', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.queryVectors({ + queryVector: { float32: [0.1, 0.2, 0.3] }, + topK: 2, + returnDistance: true, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + expect(data.matches[0].distance).toBeDefined() + expect(typeof data.matches[0].distance).toBe('number') + }) + + it('should filter by metadata', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.queryVectors({ + queryVector: { float32: [0.1, 0.2, 0.3] }, + topK: 5, + filter: { category: 'tech' }, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + expect(data.matches.length).toBeGreaterThan(0) + + // All results should match filter + for (const match of data.matches) { + expect(match.metadata?.category).toBe('tech') + } + }) + + it('should filter by multiple metadata fields', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.queryVectors({ + queryVector: { float32: [0.1, 0.2, 0.3] }, + topK: 5, + filter: { category: 'tech', published: true }, + returnMetadata: true, + }) + + const data = assertSuccessResponse(response) + + for (const match of data.matches) { + expect(match.metadata?.category).toBe('tech') + expect(match.metadata?.published).toBe(true) + } + }) + + it('should respect topK parameter', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.queryVectors({ + queryVector: { float32: [0.5, 0.5, 0.5] }, + topK: 2, + }) + + const data = assertSuccessResponse(response) + expect(data.matches.length).toBeLessThanOrEqual(2) + }) + + it('should return empty matches when filter matches nothing', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.queryVectors({ + queryVector: { float32: [0.1, 0.2, 0.3] }, + topK: 5, + filter: { category: 'non-existent' }, + }) + + const data = assertSuccessResponse(response) + expect(data.matches).toEqual([]) + }) + + it('should query without metadata in results', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.queryVectors({ + queryVector: { float32: [0.1, 0.2, 0.3] }, + topK: 3, + returnMetadata: false, + returnDistance: true, + }) + + const data = assertSuccessResponse(response) + expect(data.matches[0].key).toBeDefined() + expect(data.matches[0].metadata).toBeUndefined() + }) + }) + + describe('deleteVectors', () => { + beforeEach(async () => { + const index = client.from(testBucket).index(testIndex) + + // Insert test vectors + await index.putVectors({ + vectors: [ + { key: 'vec-1', data: { float32: [0.1, 0.2, 0.3] } }, + { key: 'vec-2', data: { float32: [0.4, 0.5, 0.6] } }, + { key: 'vec-3', data: { float32: [0.7, 0.8, 0.9] } }, + ], + }) + }) + + it('should delete a single vector', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.deleteVectors({ keys: ['vec-1'] }) + + assertSuccessResponse(response) + expect(response.data).toEqual({}) + + // Verify deletion + const getResponse = await index.getVectors({ keys: ['vec-1'] }) + const data = assertSuccessResponse(getResponse) + expect(data.vectors).toEqual([]) + }) + + it('should delete multiple vectors', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.deleteVectors({ + keys: ['vec-1', 'vec-2'], + }) + + assertSuccessResponse(response) + + // Verify deletion + const getResponse = await index.getVectors({ + keys: ['vec-1', 'vec-2', 'vec-3'], + }) + const data = assertSuccessResponse(getResponse) + expect(data.vectors.length).toBe(1) + expect(data.vectors[0].key).toBe('vec-3') + }) + + it('should succeed when deleting non-existent keys', async () => { + const index = client.from(testBucket).index(testIndex) + + const response = await index.deleteVectors({ + keys: ['non-existent-1', 'non-existent-2'], + }) + + assertSuccessResponse(response) + }) + + it('should handle batch deletion limits', async () => { + const index = client.from(testBucket).index(testIndex) + + // Insert many vectors + const vectors = Array.from({ length: 100 }, (_, i) => ({ + key: `batch-vec-${i}`, + data: { float32: generateRandomVector(3) }, + })) + + await index.putVectors({ vectors }) + + // Delete in batch (max 500) + const keysToDelete = vectors.slice(0, 50).map(v => v.key) + const response = await index.deleteVectors({ keys: keysToDelete }) + + assertSuccessResponse(response) + + // Verify deletion + const getResponse = await index.getVectors({ keys: keysToDelete }) + const data = assertSuccessResponse(getResponse) + expect(data.vectors).toEqual([]) + }) + + it('should return not found error when bucket does not exist', async () => { + const index = client.from('non-existent-bucket').index(testIndex) + + const response = await index.deleteVectors({ keys: ['vec-1'] }) + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + + it('should return not found error when index does not exist', async () => { + const index = client.from(testBucket).index('non-existent-index') + + const response = await index.deleteVectors({ keys: ['vec-1'] }) + + const error = assertErrorResponse(response) + assertErrorCode(error, 'S3VectorNotFoundException') + }) + }) + + describe('Batch operations', () => { + it('should handle large batch inserts efficiently', async () => { + const index = client.from(testBucket).index(testIndex) + + // Insert 500 vectors (max batch size) + const vectors = Array.from({ length: 500 }, (_, i) => ({ + key: `large-batch-${i}`, + data: { float32: generateRandomVector(3) }, + metadata: { batch: 'large', index: i }, + })) + + const response = await index.putVectors({ vectors }) + + assertSuccessResponse(response) + + // Verify some vectors were inserted + const getResponse = await index.getVectors({ + keys: ['large-batch-0', 'large-batch-100', 'large-batch-499'], + }) + + const data = assertSuccessResponse(getResponse) + expect(data.vectors.length).toBe(3) + }) + }) +}) diff --git a/packages/integrations/storage-vectors-js/src/index.ts b/packages/integrations/storage-vectors-js/src/index.ts index 9c9575a19..c5f01155f 100644 --- a/packages/integrations/storage-vectors-js/src/index.ts +++ b/packages/integrations/storage-vectors-js/src/index.ts @@ -1 +1,63 @@ -export * from './lib/storage-vectors-js.js' +// Main client +export { StorageVectorsClient, VectorBucketScope, VectorIndexScope } from './lib/StorageVectorsClient' +export type { StorageVectorsClientOptions } from './lib/StorageVectorsClient' + +// API classes (for advanced usage) +export { default as VectorBucketApi } from './lib/VectorBucketApi' +export { default as VectorIndexApi } from './lib/VectorIndexApi' +export { default as VectorDataApi } from './lib/VectorDataApi' +export type { CreateIndexOptions } from './lib/VectorIndexApi' + +// Types +export type { + // Core types + VectorBucket, + VectorIndex, + VectorData, + VectorMetadata, + VectorObject, + VectorMatch, + EncryptionConfiguration, + MetadataConfiguration, + VectorDataType, + DistanceMetric, + VectorFilter, + + // Request/Response types + ListVectorBucketsOptions, + ListVectorBucketsResponse, + ListIndexesOptions, + ListIndexesResponse, + PutVectorsOptions, + GetVectorsOptions, + GetVectorsResponse, + DeleteVectorsOptions, + ListVectorsOptions, + ListVectorsResponse, + QueryVectorsOptions, + QueryVectorsResponse, + + // Response wrappers + ApiResponse, + SuccessResponse, + ErrorResponse, + FetchParameters, +} from './lib/types' + +// Errors +export { + StorageVectorsError, + StorageVectorsApiError, + StorageVectorsUnknownError, + StorageVectorsErrorCode, + isStorageVectorsError, +} from './lib/errors' + +// Fetch utilities (for custom implementations) +export type { Fetch, FetchOptions, RequestMethodType } from './lib/fetch' + +// Helper utilities +export { resolveFetch, resolveResponse, isPlainObject, normalizeToFloat32, validateVectorDimension } from './lib/helpers' + +// Constants +export { DEFAULT_HEADERS } from './lib/constants' diff --git a/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts b/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts new file mode 100644 index 000000000..89bae60f8 --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts @@ -0,0 +1,396 @@ +import VectorIndexApi, { CreateIndexOptions } from './VectorIndexApi' +import VectorDataApi from './VectorDataApi' +import { Fetch } from './fetch' +import VectorBucketApi from './VectorBucketApi' +import { + DeleteVectorsOptions, + GetVectorsOptions, + ListIndexesOptions, + ListVectorsOptions, + PutVectorsOptions, + QueryVectorsOptions, +} from './types' + +/** + * Configuration options for the Storage Vectors client + */ +export interface StorageVectorsClientOptions { + /** + * Custom headers to include in all requests + */ + headers?: { [key: string]: string } + /** + * Custom fetch implementation (optional) + * Useful for testing or custom request handling + */ + fetch?: Fetch +} + +/** + * Main client for interacting with S3 Vectors API + * Provides access to bucket, index, and vector data operations + * + * @example + * ```typescript + * import { StorageVectorsClient } from '@supabase/storage-vectors-js' + * + * const client = new StorageVectorsClient('https://api.example.com', { + * headers: { 'Authorization': 'Bearer token' } + * }) + * + * // Access bucket operations + * await client.createVectorBucket('embeddings-prod') + * + * // Access index operations via buckets + * const bucket = client.bucket('embeddings-prod') + * await bucket.createIndex({ + * indexName: 'documents', + * dataType: 'float32', + * dimension: 1536, + * distanceMetric: 'cosine' + * }) + * + * // Access vector operations via index + * const index = bucket.index('documents') + * await index.putVectors({ + * vectors: [ + * { key: 'doc-1', data: { float32: [...] }, metadata: { title: 'Intro' } } + * ] + * }) + * + * // Query similar vectors + * const { data } = await index.queryVectors({ + * queryVector: { float32: [...] }, + * topK: 5, + * returnDistance: true + * }) + * ``` + */ +export class StorageVectorsClient extends VectorBucketApi { + constructor(url: string, options: StorageVectorsClientOptions = {}) { + super(url, options.headers || {}, options.fetch) + } + + /** + * Access operations for a specific vector bucket + * Returns a scoped client for index and vector operations within the bucket + * + * @param vectorBucketName - Name of the vector bucket + * @returns Bucket-scoped client with index and vector operations + * + * @example + * ```typescript + * const bucket = client.bucket('embeddings-prod') + * + * // Create an index in this bucket + * await bucket.createIndex({ + * indexName: 'documents-openai', + * dataType: 'float32', + * dimension: 1536, + * distanceMetric: 'cosine' + * }) + * + * // List indexes in this bucket + * const { data } = await bucket.listIndexes() + * ``` + */ + from(vectorBucketName: string): VectorBucketScope { + return new VectorBucketScope(this.url, this.headers, vectorBucketName, this.fetch) + } +} + +/** + * Scoped client for operations within a specific vector bucket + * Provides index management and access to vector operations + */ +export class VectorBucketScope extends VectorIndexApi { + private vectorBucketName: string + + constructor( + url: string, + headers: { [key: string]: string }, + vectorBucketName: string, + fetch?: Fetch + ) { + super(url, headers, fetch) + this.vectorBucketName = vectorBucketName + } + + /** + * Creates a new vector index in this bucket + * Convenience method that automatically includes the bucket name + * + * @param options - Index configuration (vectorBucketName is automatically set) + * @returns Promise with empty response on success or error + * + * @example + * ```typescript + * const bucket = client.bucket('embeddings-prod') + * await bucket.createIndex({ + * indexName: 'documents-openai', + * dataType: 'float32', + * dimension: 1536, + * distanceMetric: 'cosine', + * metadataConfiguration: { + * nonFilterableMetadataKeys: ['raw_text'] + * } + * }) + * ``` + */ + override async createIndex( + options: Omit + ) { + return super.createIndex({ + ...options, + vectorBucketName: this.vectorBucketName, + }) + } + + /** + * Lists indexes in this bucket + * Convenience method that automatically includes the bucket name + * + * @param options - Listing options (vectorBucketName is automatically set) + * @returns Promise with list of indexes or error + * + * @example + * ```typescript + * const bucket = client.bucket('embeddings-prod') + * const { data } = await bucket.listIndexes({ prefix: 'documents-' }) + * ``` + */ + override async listIndexes( + options: Omit = {} + ) { + return super.listIndexes({ + ...options, + vectorBucketName: this.vectorBucketName, + }) + } + + /** + * Retrieves metadata for a specific index in this bucket + * Convenience method that automatically includes the bucket name + * + * @param indexName - Name of the index to retrieve + * @returns Promise with index metadata or error + * + * @example + * ```typescript + * const bucket = client.bucket('embeddings-prod') + * const { data } = await bucket.getIndex('documents-openai') + * console.log('Dimension:', data?.index.dimension) + * ``` + */ + override async getIndex(indexName: string) { + return super.getIndex(this.vectorBucketName, indexName) + } + + /** + * Deletes an index from this bucket + * Convenience method that automatically includes the bucket name + * + * @param indexName - Name of the index to delete + * @returns Promise with empty response on success or error + * + * @example + * ```typescript + * const bucket = client.bucket('embeddings-prod') + * await bucket.deleteIndex('old-index') + * ``` + */ + override async deleteIndex(indexName: string) { + return super.deleteIndex(this.vectorBucketName, indexName) + } + + /** + * Access operations for a specific index within this bucket + * Returns a scoped client for vector data operations + * + * @param indexName - Name of the index + * @returns Index-scoped client with vector data operations + * + * @example + * ```typescript + * const index = client.bucket('embeddings-prod').index('documents-openai') + * + * // Insert vectors + * await index.putVectors({ + * vectors: [ + * { key: 'doc-1', data: { float32: [...] }, metadata: { title: 'Intro' } } + * ] + * }) + * + * // Query similar vectors + * const { data } = await index.queryVectors({ + * queryVector: { float32: [...] }, + * topK: 5 + * }) + * ``` + */ + index(indexName: string): VectorIndexScope { + return new VectorIndexScope( + this.url, + this.headers, + this.vectorBucketName, + indexName, + this.fetch + ) + } +} + +/** + * Scoped client for operations within a specific vector index + * Provides vector data operations (put, get, list, query, delete) + */ +export class VectorIndexScope extends VectorDataApi { + private vectorBucketName: string + private indexName: string + + constructor( + url: string, + headers: { [key: string]: string }, + vectorBucketName: string, + indexName: string, + fetch?: Fetch + ) { + super(url, headers, fetch) + this.vectorBucketName = vectorBucketName + this.indexName = indexName + } + + /** + * Inserts or updates vectors in this index + * Convenience method that automatically includes bucket and index names + * + * @param options - Vector insertion options (bucket and index names automatically set) + * @returns Promise with empty response on success or error + * + * @example + * ```typescript + * const index = client.bucket('embeddings-prod').index('documents-openai') + * await index.putVectors({ + * vectors: [ + * { + * key: 'doc-1', + * data: { float32: [0.1, 0.2, ...] }, + * metadata: { title: 'Introduction', page: 1 } + * } + * ] + * }) + * ``` + */ + override async putVectors(options: Omit) { + return super.putVectors({ + ...options, + vectorBucketName: this.vectorBucketName, + indexName: this.indexName, + }) + } + + /** + * Retrieves vectors by keys from this index + * Convenience method that automatically includes bucket and index names + * + * @param options - Vector retrieval options (bucket and index names automatically set) + * @returns Promise with array of vectors or error + * + * @example + * ```typescript + * const index = client.bucket('embeddings-prod').index('documents-openai') + * const { data } = await index.getVectors({ + * keys: ['doc-1', 'doc-2'], + * returnMetadata: true + * }) + * ``` + */ + override async getVectors( + options: Omit + ) { + return super.getVectors({ + ...options, + vectorBucketName: this.vectorBucketName, + indexName: this.indexName, + }) + } + + /** + * Lists vectors in this index with pagination + * Convenience method that automatically includes bucket and index names + * + * @param options - Listing options (bucket and index names automatically set) + * @returns Promise with array of vectors and pagination token + * + * @example + * ```typescript + * const index = client.bucket('embeddings-prod').index('documents-openai') + * const { data } = await index.listVectors({ + * maxResults: 500, + * returnMetadata: true + * }) + * ``` + */ + override async listVectors( + options: Omit = {} + ) { + return super.listVectors({ + ...options, + vectorBucketName: this.vectorBucketName, + indexName: this.indexName, + }) + } + + /** + * Queries for similar vectors in this index + * Convenience method that automatically includes bucket and index names + * + * @param options - Query options (bucket and index names automatically set) + * @returns Promise with array of similar vectors ordered by distance + * + * @example + * ```typescript + * const index = client.bucket('embeddings-prod').index('documents-openai') + * const { data } = await index.queryVectors({ + * queryVector: { float32: [0.1, 0.2, ...] }, + * topK: 5, + * filter: { category: 'technical' }, + * returnDistance: true, + * returnMetadata: true + * }) + * ``` + */ + override async queryVectors( + options: Omit + ) { + return super.queryVectors({ + ...options, + vectorBucketName: this.vectorBucketName, + indexName: this.indexName, + }) + } + + /** + * Deletes vectors by keys from this index + * Convenience method that automatically includes bucket and index names + * + * @param options - Deletion options (bucket and index names automatically set) + * @returns Promise with empty response on success or error + * + * @example + * ```typescript + * const index = client.bucket('embeddings-prod').index('documents-openai') + * await index.deleteVectors({ + * keys: ['doc-1', 'doc-2', 'doc-3'] + * }) + * ``` + */ + override async deleteVectors( + options: Omit + ) { + return super.deleteVectors({ + ...options, + vectorBucketName: this.vectorBucketName, + indexName: this.indexName, + }) + } +} diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts b/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts new file mode 100644 index 000000000..40f1badda --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts @@ -0,0 +1,212 @@ +import { DEFAULT_HEADERS } from './constants' +import { isStorageVectorsError } from './errors' +import { Fetch, post, remove } from './fetch' +import { resolveFetch } from './helpers' +import { + ApiResponse, + VectorBucket, + ListVectorBucketsOptions, + ListVectorBucketsResponse, +} from './types' + +/** + * API class for managing Vector Buckets + * Provides methods for creating, reading, listing, and deleting vector buckets + */ +export default class VectorBucketApi { + protected url: string + protected headers: { [key: string]: string } + protected fetch: Fetch + protected shouldThrowOnError = false + + constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) { + this.url = url.replace(/\/$/, '') + this.headers = { ...DEFAULT_HEADERS, ...headers } + this.fetch = resolveFetch(fetch) + } + + /** + * Enable throwing errors instead of returning them in the response + * When enabled, failed operations will throw instead of returning { data: null, error } + * + * @returns This instance for method chaining + * @example + * ```typescript + * const client = new VectorBucketApi(url, headers) + * client.throwOnError() + * const { data } = await client.createVectorBucket('my-bucket') // throws on error + * ``` + */ + public throwOnError(): this { + this.shouldThrowOnError = true + return this + } + + /** + * Creates a new vector bucket + * Vector buckets are containers for vector indexes and their data + * + * @param vectorBucketName - Unique name for the vector bucket + * @returns Promise with empty response on success or error + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorConflictException` if bucket already exists (HTTP 409) + * - `S3VectorMaxBucketsExceeded` if quota exceeded (HTTP 400) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * const { data, error } = await client.createVectorBucket('embeddings-prod') + * if (error) { + * console.error('Failed to create bucket:', error.message) + * } + * ``` + */ + async createVectorBucket(vectorBucketName: string): Promise> { + try { + const data = await post( + this.fetch, + `${this.url}/CreateVectorBucket`, + { vectorBucketName }, + { headers: this.headers } + ) + return { data: data || {}, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } + + /** + * Retrieves metadata for a specific vector bucket + * Returns bucket configuration including encryption settings and creation time + * + * @param vectorBucketName - Name of the vector bucket to retrieve + * @returns Promise with bucket metadata or error + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorNotFoundException` if bucket doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * const { data, error } = await client.getVectorBucket('embeddings-prod') + * if (data) { + * console.log('Bucket created at:', new Date(data.vectorBucket.creationTime! * 1000)) + * } + * ``` + */ + async getVectorBucket(vectorBucketName: string): Promise> { + try { + const data = await post( + this.fetch, + `${this.url}/GetVectorBucket`, + { vectorBucketName }, + { headers: this.headers } + ) + return { data, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } + + /** + * Lists vector buckets with optional filtering and pagination + * Supports prefix-based filtering and paginated results + * + * @param options - Listing options + * @param options.prefix - Filter buckets by name prefix + * @param options.maxResults - Maximum results per page (default: 100) + * @param options.nextToken - Pagination token from previous response + * @returns Promise with list of buckets and pagination token + * + * @throws {StorageVectorsApiError} With code: + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * // List all buckets with prefix 'prod-' + * const { data, error } = await client.listVectorBuckets({ prefix: 'prod-' }) + * if (data) { + * console.log('Found buckets:', data.buckets.length) + * // Fetch next page if available + * if (data.nextToken) { + * const next = await client.listVectorBuckets({ nextToken: data.nextToken }) + * } + * } + * ``` + */ + async listVectorBuckets( + options: ListVectorBucketsOptions = {} + ): Promise> { + try { + const data = await post( + this.fetch, + `${this.url}/ListVectorBuckets`, + options, + { headers: this.headers } + ) + return { data, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } + + /** + * Deletes a vector bucket + * Bucket must be empty before deletion (all indexes must be removed first) + * + * @param vectorBucketName - Name of the vector bucket to delete + * @returns Promise with empty response on success or error + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorBucketNotEmpty` if bucket contains indexes (HTTP 400) + * - `S3VectorNotFoundException` if bucket doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * // Delete all indexes first, then delete bucket + * const { error } = await client.deleteVectorBucket('old-bucket') + * if (error?.statusCode === 'S3VectorBucketNotEmpty') { + * console.error('Must delete all indexes first') + * } + * ``` + */ + async deleteVectorBucket(vectorBucketName: string): Promise> { + try { + const data = await remove( + this.fetch, + `${this.url}/DeleteVectorBucket`, + { vectorBucketName }, + { headers: this.headers } + ) + return { data: data || {}, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } +} diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts b/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts new file mode 100644 index 000000000..3bb1d2771 --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts @@ -0,0 +1,356 @@ +import { DEFAULT_HEADERS } from './constants' +import { isStorageVectorsError } from './errors' +import { Fetch, post, remove } from './fetch' +import { resolveFetch } from './helpers' +import { + ApiResponse, + PutVectorsOptions, + GetVectorsOptions, + GetVectorsResponse, + DeleteVectorsOptions, + ListVectorsOptions, + ListVectorsResponse, + QueryVectorsOptions, + QueryVectorsResponse, +} from './types' + +/** + * API class for managing Vector Data within Vector Indexes + * Provides methods for inserting, querying, listing, and deleting vector embeddings + */ +export default class VectorDataApi { + protected url: string + protected headers: { [key: string]: string } + protected fetch: Fetch + protected shouldThrowOnError = false + + constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) { + this.url = url.replace(/\/$/, '') + this.headers = { ...DEFAULT_HEADERS, ...headers } + this.fetch = resolveFetch(fetch) + } + + /** + * Enable throwing errors instead of returning them in the response + * When enabled, failed operations will throw instead of returning { data: null, error } + * + * @returns This instance for method chaining + * @example + * ```typescript + * const client = new VectorDataApi(url, headers) + * client.throwOnError() + * const { data } = await client.putVectors(options) // throws on error + * ``` + */ + public throwOnError(): this { + this.shouldThrowOnError = true + return this + } + + /** + * Inserts or updates vectors in batch (upsert operation) + * Accepts 1-500 vectors per request. Larger batches should be split + * + * @param options - Vector insertion options + * @param options.vectorBucketName - Name of the parent vector bucket + * @param options.indexName - Name of the target index + * @param options.vectors - Array of vectors to insert/update (1-500 items) + * @returns Promise with empty response on success or error + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorConflictException` if duplicate key conflict occurs (HTTP 409) + * - `S3VectorNotFoundException` if bucket or index doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * const { data, error } = await client.putVectors({ + * vectorBucketName: 'embeddings-prod', + * indexName: 'documents-openai-small', + * vectors: [ + * { + * key: 'doc-1', + * data: { float32: [0.1, 0.2, 0.3, ...] }, // 1536 dimensions + * metadata: { title: 'Introduction', page: 1 } + * }, + * { + * key: 'doc-2', + * data: { float32: [0.4, 0.5, 0.6, ...] }, + * metadata: { title: 'Conclusion', page: 42 } + * } + * ] + * }) + * ``` + */ + async putVectors(options: PutVectorsOptions): Promise> { + try { + // Validate batch size + if (options.vectors.length < 1 || options.vectors.length > 500) { + throw new Error('Vector batch size must be between 1 and 500 items') + } + + const data = await post( + this.fetch, + `${this.url}/PutVectors`, + options, + { headers: this.headers } + ) + return { data: data || {}, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } + + /** + * Retrieves vectors by their keys in batch + * Optionally includes vector data and/or metadata in response + * Additional permissions required when returning data or metadata + * + * @param options - Vector retrieval options + * @param options.vectorBucketName - Name of the parent vector bucket + * @param options.indexName - Name of the index + * @param options.keys - Array of vector keys to retrieve + * @param options.returnData - Whether to include vector embeddings (requires permission) + * @param options.returnMetadata - Whether to include metadata (requires permission) + * @returns Promise with array of vectors or error + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorNotFoundException` if bucket or index doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * const { data, error } = await client.getVectors({ + * vectorBucketName: 'embeddings-prod', + * indexName: 'documents-openai-small', + * keys: ['doc-1', 'doc-2', 'doc-3'], + * returnData: false, // Don't return embeddings + * returnMetadata: true // Return metadata only + * }) + * if (data) { + * data.vectors.forEach(v => console.log(v.key, v.metadata)) + * } + * ``` + */ + async getVectors(options: GetVectorsOptions): Promise> { + try { + const data = await post( + this.fetch, + `${this.url}/GetVectors`, + options, + { headers: this.headers } + ) + return { data, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } + + /** + * Lists/scans vectors in an index with pagination + * Supports parallel scanning via segment configuration for high-throughput scenarios + * Additional permissions required when returning data or metadata + * + * @param options - Vector listing options + * @param options.vectorBucketName - Name of the parent vector bucket + * @param options.indexName - Name of the index + * @param options.maxResults - Maximum results per page (default: 500, max: 1000) + * @param options.nextToken - Pagination token from previous response + * @param options.returnData - Whether to include vector embeddings (requires permission) + * @param options.returnMetadata - Whether to include metadata (requires permission) + * @param options.segmentCount - Total parallel segments (1-16) for distributed scanning + * @param options.segmentIndex - Zero-based segment index (0 to segmentCount-1) + * @returns Promise with array of vectors, pagination token, or error + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorNotFoundException` if bucket or index doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * // Simple pagination + * let nextToken: string | undefined + * do { + * const { data, error } = await client.listVectors({ + * vectorBucketName: 'embeddings-prod', + * indexName: 'documents-openai-small', + * maxResults: 500, + * nextToken, + * returnMetadata: true + * }) + * if (error) break + * console.log('Batch:', data.vectors.length) + * nextToken = data.nextToken + * } while (nextToken) + * + * // Parallel scanning (4 concurrent workers) + * const workers = [0, 1, 2, 3].map(async (segmentIndex) => { + * const { data } = await client.listVectors({ + * vectorBucketName: 'embeddings-prod', + * indexName: 'documents-openai-small', + * segmentCount: 4, + * segmentIndex, + * returnMetadata: true + * }) + * return data?.vectors || [] + * }) + * const results = await Promise.all(workers) + * ``` + */ + async listVectors(options: ListVectorsOptions): Promise> { + try { + // Validate segment configuration + if (options.segmentCount !== undefined) { + if (options.segmentCount < 1 || options.segmentCount > 16) { + throw new Error('segmentCount must be between 1 and 16') + } + if (options.segmentIndex !== undefined) { + if (options.segmentIndex < 0 || options.segmentIndex >= options.segmentCount) { + throw new Error(`segmentIndex must be between 0 and ${options.segmentCount - 1}`) + } + } + } + + const data = await post( + this.fetch, + `${this.url}/ListVectors`, + options, + { headers: this.headers } + ) + return { data, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } + + /** + * Queries for similar vectors using approximate nearest neighbor (ANN) search + * Returns top-K most similar vectors based on the configured distance metric + * Supports optional metadata filtering (requires GetVectors permission) + * + * @param options - Query options + * @param options.vectorBucketName - Name of the parent vector bucket + * @param options.indexName - Name of the index + * @param options.queryVector - Query embedding to find similar vectors + * @param options.topK - Number of nearest neighbors to return (default: 10) + * @param options.filter - Optional JSON filter for metadata (requires GetVectors permission) + * @param options.returnDistance - Whether to include similarity distances + * @param options.returnMetadata - Whether to include metadata (requires GetVectors permission) + * @returns Promise with array of similar vectors ordered by distance + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorNotFoundException` if bucket or index doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * // Semantic search with filtering + * const { data, error } = await client.queryVectors({ + * vectorBucketName: 'embeddings-prod', + * indexName: 'documents-openai-small', + * queryVector: { float32: [0.1, 0.2, 0.3, ...] }, // 1536 dimensions + * topK: 5, + * filter: { + * category: 'technical', + * published: true + * }, + * returnDistance: true, + * returnMetadata: true + * }) + * if (data) { + * data.matches.forEach(match => { + * console.log(`${match.key}: distance=${match.distance}`) + * console.log('Metadata:', match.metadata) + * }) + * } + * ``` + */ + async queryVectors(options: QueryVectorsOptions): Promise> { + try { + const data = await post( + this.fetch, + `${this.url}/QueryVectors`, + options, + { headers: this.headers } + ) + return { data, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } + + /** + * Deletes vectors by their keys in batch + * Accepts 1-500 keys per request + * + * @param options - Vector deletion options + * @param options.vectorBucketName - Name of the parent vector bucket + * @param options.indexName - Name of the index + * @param options.keys - Array of vector keys to delete (1-500 items) + * @returns Promise with empty response on success or error + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorNotFoundException` if bucket or index doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * const { error } = await client.deleteVectors({ + * vectorBucketName: 'embeddings-prod', + * indexName: 'documents-openai-small', + * keys: ['doc-1', 'doc-2', 'doc-3'] + * }) + * if (!error) { + * console.log('Vectors deleted successfully') + * } + * ``` + */ + async deleteVectors(options: DeleteVectorsOptions): Promise> { + try { + // Validate batch size + if (options.keys.length < 1 || options.keys.length > 500) { + throw new Error('Keys batch size must be between 1 and 500 items') + } + + const data = await remove( + this.fetch, + `${this.url}/DeleteVectors`, + options, + { headers: this.headers } + ) + return { data: data || {}, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } +} diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts b/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts new file mode 100644 index 000000000..ebfb96b03 --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts @@ -0,0 +1,251 @@ +import { DEFAULT_HEADERS } from './constants' +import { isStorageVectorsError } from './errors' +import { Fetch, post, remove } from './fetch' +import { resolveFetch } from './helpers' +import { + ApiResponse, + VectorIndex, + ListIndexesOptions, + ListIndexesResponse, + VectorDataType, + DistanceMetric, + MetadataConfiguration, +} from './types' + +/** + * Options for creating a vector index + */ +export interface CreateIndexOptions { + vectorBucketName: string + indexName: string + dataType: VectorDataType + dimension: number + distanceMetric: DistanceMetric + metadataConfiguration?: MetadataConfiguration +} + +/** + * API class for managing Vector Indexes within Vector Buckets + * Provides methods for creating, reading, listing, and deleting vector indexes + */ +export default class VectorIndexApi { + protected url: string + protected headers: { [key: string]: string } + protected fetch: Fetch + protected shouldThrowOnError = false + + constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) { + this.url = url.replace(/\/$/, '') + this.headers = { ...DEFAULT_HEADERS, ...headers } + this.fetch = resolveFetch(fetch) + } + + /** + * Enable throwing errors instead of returning them in the response + * When enabled, failed operations will throw instead of returning { data: null, error } + * + * @returns This instance for method chaining + * @example + * ```typescript + * const client = new VectorIndexApi(url, headers) + * client.throwOnError() + * const { data } = await client.createIndex(options) // throws on error + * ``` + */ + public throwOnError(): this { + this.shouldThrowOnError = true + return this + } + + /** + * Creates a new vector index within a bucket + * Defines the schema for vectors including dimensionality, distance metric, and metadata config + * + * @param options - Index configuration + * @param options.vectorBucketName - Name of the parent vector bucket + * @param options.indexName - Unique name for the index within the bucket + * @param options.dataType - Data type for vector components (currently only 'float32') + * @param options.dimension - Dimensionality of vectors (e.g., 384, 768, 1536) + * @param options.distanceMetric - Similarity metric ('cosine', 'euclidean', 'dotproduct') + * @param options.metadataConfiguration - Optional config for non-filterable metadata keys + * @returns Promise with empty response on success or error + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorConflictException` if index already exists (HTTP 409) + * - `S3VectorMaxIndexesExceeded` if quota exceeded (HTTP 400) + * - `S3VectorNotFoundException` if bucket doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * const { data, error } = await client.createIndex({ + * vectorBucketName: 'embeddings-prod', + * indexName: 'documents-openai-small', + * dataType: 'float32', + * dimension: 1536, + * distanceMetric: 'cosine', + * metadataConfiguration: { + * nonFilterableMetadataKeys: ['raw_text', 'internal_id'] + * } + * }) + * ``` + */ + async createIndex(options: CreateIndexOptions): Promise> { + try { + const data = await post( + this.fetch, + `${this.url}/CreateIndex`, + options, + { headers: this.headers } + ) + return { data: data || {}, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } + + /** + * Retrieves metadata for a specific vector index + * Returns index configuration including dimension, distance metric, and metadata settings + * + * @param vectorBucketName - Name of the parent vector bucket + * @param indexName - Name of the index to retrieve + * @returns Promise with index metadata or error + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorNotFoundException` if index or bucket doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * const { data, error } = await client.getIndex('embeddings-prod', 'documents-openai-small') + * if (data) { + * console.log('Index dimension:', data.index.dimension) + * console.log('Distance metric:', data.index.distanceMetric) + * } + * ``` + */ + async getIndex( + vectorBucketName: string, + indexName: string + ): Promise> { + try { + const data = await post( + this.fetch, + `${this.url}/GetIndex`, + { vectorBucketName, indexName }, + { headers: this.headers } + ) + return { data, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } + + /** + * Lists vector indexes within a bucket with optional filtering and pagination + * Supports prefix-based filtering and paginated results + * + * @param options - Listing options + * @param options.vectorBucketName - Name of the parent vector bucket + * @param options.prefix - Filter indexes by name prefix + * @param options.maxResults - Maximum results per page (default: 100) + * @param options.nextToken - Pagination token from previous response + * @returns Promise with list of indexes and pagination token + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorNotFoundException` if bucket doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * // List all indexes in a bucket + * const { data, error } = await client.listIndexes({ + * vectorBucketName: 'embeddings-prod', + * prefix: 'documents-' + * }) + * if (data) { + * console.log('Found indexes:', data.indexes.map(i => i.indexName)) + * // Fetch next page if available + * if (data.nextToken) { + * const next = await client.listIndexes({ + * vectorBucketName: 'embeddings-prod', + * nextToken: data.nextToken + * }) + * } + * } + * ``` + */ + async listIndexes(options: ListIndexesOptions): Promise> { + try { + const data = await post( + this.fetch, + `${this.url}/ListIndexes`, + options, + { headers: this.headers } + ) + return { data, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } + + /** + * Deletes a vector index and all its data + * This operation removes the index schema and all vectors stored in the index + * + * @param vectorBucketName - Name of the parent vector bucket + * @param indexName - Name of the index to delete + * @returns Promise with empty response on success or error + * + * @throws {StorageVectorsApiError} With code: + * - `S3VectorNotFoundException` if index or bucket doesn't exist (HTTP 404) + * - `InternalError` for server errors (HTTP 500) + * + * @example + * ```typescript + * // Delete an index and all its vectors + * const { error } = await client.deleteIndex('embeddings-prod', 'old-index') + * if (!error) { + * console.log('Index deleted successfully') + * } + * ``` + */ + async deleteIndex(vectorBucketName: string, indexName: string): Promise> { + try { + const data = await remove( + this.fetch, + `${this.url}/DeleteIndex`, + { vectorBucketName, indexName }, + { headers: this.headers } + ) + return { data: data || {}, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageVectorsError(error)) { + return { data: null, error } + } + throw error + } + } +} diff --git a/packages/integrations/storage-vectors-js/src/lib/constants.ts b/packages/integrations/storage-vectors-js/src/lib/constants.ts new file mode 100644 index 000000000..3643113bf --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/constants.ts @@ -0,0 +1,8 @@ +/** + * Default HTTP headers for all requests + * Includes client identification for analytics and debugging + */ +export const DEFAULT_HEADERS = { + 'X-Client-Info': `storage-vectors-js/0.0.1`, + 'Content-Type': 'application/json' +} diff --git a/packages/integrations/storage-vectors-js/src/lib/errors.ts b/packages/integrations/storage-vectors-js/src/lib/errors.ts new file mode 100644 index 000000000..bcaed5beb --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/errors.ts @@ -0,0 +1,78 @@ +/** + * Base error class for all Storage Vectors errors + */ +export class StorageVectorsError extends Error { + protected __isStorageVectorsError = true + + constructor(message: string) { + super(message) + this.name = 'StorageVectorsError' + } +} + +/** + * Type guard to check if an error is a StorageVectorsError + * @param error - The error to check + * @returns True if the error is a StorageVectorsError + */ +export function isStorageVectorsError(error: unknown): error is StorageVectorsError { + return typeof error === 'object' && error !== null && '__isStorageVectorsError' in error +} + +/** + * API error returned from S3 Vectors service + * Includes HTTP status code and service-specific error code + */ +export class StorageVectorsApiError extends StorageVectorsError { + status: number + statusCode: string + + constructor(message: string, status: number, statusCode: string) { + super(message) + this.name = 'StorageVectorsApiError' + this.status = status + this.statusCode = statusCode + } + + toJSON() { + return { + name: this.name, + message: this.message, + status: this.status, + statusCode: this.statusCode, + } + } +} + +/** + * Unknown error that doesn't match expected error patterns + * Wraps the original error for debugging + */ +export class StorageVectorsUnknownError extends StorageVectorsError { + originalError: unknown + + constructor(message: string, originalError: unknown) { + super(message) + this.name = 'StorageVectorsUnknownError' + this.originalError = originalError + } +} + +/** + * Error codes specific to S3 Vectors API + * Maps AWS service errors to application-friendly error codes + */ +export enum StorageVectorsErrorCode { + /** Internal server fault (HTTP 500) */ + InternalError = 'InternalError', + /** Resource already exists / conflict (HTTP 409) */ + S3VectorConflictException = 'S3VectorConflictException', + /** Resource not found (HTTP 404) */ + S3VectorNotFoundException = 'S3VectorNotFoundException', + /** Delete bucket while not empty (HTTP 400) */ + S3VectorBucketNotEmpty = 'S3VectorBucketNotEmpty', + /** Exceeds bucket quota/limit (HTTP 400) */ + S3VectorMaxBucketsExceeded = 'S3VectorMaxBucketsExceeded', + /** Exceeds index quota/limit (HTTP 400) */ + S3VectorMaxIndexesExceeded = 'S3VectorMaxIndexesExceeded', +} diff --git a/packages/integrations/storage-vectors-js/src/lib/fetch.ts b/packages/integrations/storage-vectors-js/src/lib/fetch.ts new file mode 100644 index 000000000..864dbf6fc --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/fetch.ts @@ -0,0 +1,198 @@ +import { StorageVectorsApiError, StorageVectorsUnknownError } from './errors' +import { isPlainObject, resolveResponse } from './helpers' +import { FetchParameters } from './types' + +export type Fetch = typeof fetch + +/** + * Options for fetch requests + * @property headers - Custom HTTP headers + * @property noResolveJson - If true, return raw Response instead of parsing JSON + */ +export interface FetchOptions { + headers?: { + [key: string]: string + } + noResolveJson?: boolean +} + +/** + * HTTP methods supported by the API + */ +export type RequestMethodType = 'GET' | 'POST' | 'PUT' | 'DELETE' + +/** + * Extracts error message from various error response formats + * @param err - Error object from API + * @returns Human-readable error message + */ +const _getErrorMessage = (err: any): string => + err.msg || err.message || err.error_description || err.error || JSON.stringify(err) + +/** + * Handles fetch errors and converts them to StorageVectors error types + * @param error - The error caught from fetch + * @param reject - Promise rejection function + * @param options - Fetch options that may affect error handling + */ +const handleError = async ( + error: unknown, + reject: (reason?: any) => void, + options?: FetchOptions +) => { + const Res = await resolveResponse() + + if (error instanceof Res && !options?.noResolveJson) { + error + .json() + .then((err: any) => { + const status = error.status || 500 + const statusCode = err?.statusCode || err?.code || status + '' + reject(new StorageVectorsApiError(_getErrorMessage(err), status, statusCode)) + }) + .catch((err: any) => { + reject(new StorageVectorsUnknownError(_getErrorMessage(err), err)) + }) + } else { + reject(new StorageVectorsUnknownError(_getErrorMessage(error), error)) + } +} + +/** + * Builds request parameters for fetch calls + * @param method - HTTP method + * @param options - Custom fetch options + * @param parameters - Additional fetch parameters like AbortSignal + * @param body - Request body (will be JSON stringified if plain object) + * @returns Complete fetch request parameters + */ +const _getRequestParams = ( + method: RequestMethodType, + options?: FetchOptions, + parameters?: FetchParameters, + body?: object +) => { + const params: { [k: string]: any } = { method, headers: options?.headers || {} } + + if (method === 'GET' || !body) { + return params + } + + if (isPlainObject(body)) { + params.headers = { 'Content-Type': 'application/json', ...options?.headers } + params.body = JSON.stringify(body) + } else { + params.body = body + } + + return { ...params, ...parameters } +} + +/** + * Internal request handler that wraps fetch with error handling + * @param fetcher - Fetch function to use + * @param method - HTTP method + * @param url - Request URL + * @param options - Custom fetch options + * @param parameters - Additional fetch parameters + * @param body - Request body + * @returns Promise with parsed response or error + */ +async function _handleRequest( + fetcher: Fetch, + method: RequestMethodType, + url: string, + options?: FetchOptions, + parameters?: FetchParameters, + body?: object +): Promise { + return new Promise((resolve, reject) => { + fetcher(url, _getRequestParams(method, options, parameters, body)) + .then((result) => { + if (!result.ok) throw result + if (options?.noResolveJson) return result + // Handle empty responses (204, empty body) + const contentType = result.headers.get('content-type') + if (!contentType || !contentType.includes('application/json')) { + return {} + } + return result.json() + }) + .then((data) => resolve(data)) + .catch((error) => handleError(error, reject, options)) + }) +} + +/** + * Performs a GET request + * @param fetcher - Fetch function to use + * @param url - Request URL + * @param options - Custom fetch options + * @param parameters - Additional fetch parameters + * @returns Promise with parsed response + */ +export async function get( + fetcher: Fetch, + url: string, + options?: FetchOptions, + parameters?: FetchParameters +): Promise { + return _handleRequest(fetcher, 'GET', url, options, parameters) +} + +/** + * Performs a POST request + * @param fetcher - Fetch function to use + * @param url - Request URL + * @param body - Request body to be JSON stringified + * @param options - Custom fetch options + * @param parameters - Additional fetch parameters + * @returns Promise with parsed response + */ +export async function post( + fetcher: Fetch, + url: string, + body: object, + options?: FetchOptions, + parameters?: FetchParameters +): Promise { + return _handleRequest(fetcher, 'POST', url, options, parameters, body) +} + +/** + * Performs a PUT request + * @param fetcher - Fetch function to use + * @param url - Request URL + * @param body - Request body to be JSON stringified + * @param options - Custom fetch options + * @param parameters - Additional fetch parameters + * @returns Promise with parsed response + */ +export async function put( + fetcher: Fetch, + url: string, + body: object, + options?: FetchOptions, + parameters?: FetchParameters +): Promise { + return _handleRequest(fetcher, 'PUT', url, options, parameters, body) +} + +/** + * Performs a DELETE request + * @param fetcher - Fetch function to use + * @param url - Request URL + * @param body - Request body to be JSON stringified + * @param options - Custom fetch options + * @param parameters - Additional fetch parameters + * @returns Promise with parsed response + */ +export async function remove( + fetcher: Fetch, + url: string, + body: object, + options?: FetchOptions, + parameters?: FetchParameters +): Promise { + return _handleRequest(fetcher, 'DELETE', url, options, parameters, body) +} diff --git a/packages/integrations/storage-vectors-js/src/lib/helpers.ts b/packages/integrations/storage-vectors-js/src/lib/helpers.ts new file mode 100644 index 000000000..a9252b436 --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/helpers.ts @@ -0,0 +1,90 @@ +type Fetch = typeof fetch + +/** + * Resolves the fetch implementation to use + * Uses custom fetch if provided, otherwise falls back to: + * - Native fetch in browser/modern environments + * - @supabase/node-fetch polyfill in Node.js environments without fetch + * + * @param customFetch - Optional custom fetch implementation + * @returns Resolved fetch function + */ +export const resolveFetch = (customFetch?: Fetch): Fetch => { + let _fetch: Fetch + if (customFetch) { + _fetch = customFetch + } else if (typeof fetch === 'undefined') { + _fetch = (...args) => + import('@supabase/node-fetch' as any).then(({ default: fetch }) => fetch(...args)) + } else { + _fetch = fetch + } + return (...args) => _fetch(...args) +} + +/** + * Resolves the Response constructor to use + * Uses native Response in browser/modern environments + * Falls back to @supabase/node-fetch polyfill in Node.js environments + * + * @returns Response constructor + */ +export const resolveResponse = async (): Promise => { + if (typeof Response === 'undefined') { + // @ts-ignore + return (await import('@supabase/node-fetch' as any)).Response + } + + return Response +} + +/** + * Determine if input is a plain object + * An object is plain if it's created by either {}, new Object(), or Object.create(null) + * + * @param value - Value to check + * @returns True if value is a plain object + * @source https://github.com/sindresorhus/is-plain-obj + */ +export const isPlainObject = (value: object): boolean => { + if (typeof value !== 'object' || value === null) { + return false + } + + const prototype = Object.getPrototypeOf(value) + return ( + (prototype === null || + prototype === Object.prototype || + Object.getPrototypeOf(prototype) === null) && + !(Symbol.toStringTag in value) && + !(Symbol.iterator in value) + ) +} + +/** + * Normalizes a number array to float32 format + * Ensures all vector values are valid 32-bit floats + * + * @param values - Array of numbers to normalize + * @returns Normalized float32 array + */ +export const normalizeToFloat32 = (values: number[]): number[] => { + // Use Float32Array to ensure proper precision + return Array.from(new Float32Array(values)) +} + +/** + * Validates vector dimensions match expected dimension + * Throws error if dimensions don't match + * + * @param vector - Vector data to validate + * @param expectedDimension - Expected vector dimension + * @throws Error if dimensions don't match + */ +export const validateVectorDimension = (vector: { float32: number[] }, expectedDimension?: number): void => { + if (expectedDimension !== undefined && vector.float32.length !== expectedDimension) { + throw new Error( + `Vector dimension mismatch: expected ${expectedDimension}, got ${vector.float32.length}` + ) + } +} diff --git a/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.spec.ts b/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.spec.ts deleted file mode 100644 index 8b0fb5da3..000000000 --- a/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.spec.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { storageVectorsJs } from './storage-vectors-js.js' - -describe('storageVectorsJs', () => { - it('should work', () => { - expect(storageVectorsJs()).toEqual('storage-vectors-js') - }) -}) diff --git a/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.ts b/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.ts deleted file mode 100644 index c5218edfd..000000000 --- a/packages/integrations/storage-vectors-js/src/lib/storage-vectors-js.ts +++ /dev/null @@ -1,3 +0,0 @@ -export function storageVectorsJs(): string { - return 'storage-vectors-js' -} diff --git a/packages/integrations/storage-vectors-js/src/lib/types.ts b/packages/integrations/storage-vectors-js/src/lib/types.ts new file mode 100644 index 000000000..31182fd7d --- /dev/null +++ b/packages/integrations/storage-vectors-js/src/lib/types.ts @@ -0,0 +1,299 @@ +import { StorageVectorsError } from './errors' + +/** + * Configuration for encryption at rest + * @property kmsKeyArn - ARN of the KMS key used for encryption + * @property sseType - Server-side encryption type (e.g., 'KMS') + */ +export interface EncryptionConfiguration { + kmsKeyArn?: string + sseType?: string +} + +/** + * Vector bucket metadata + * @property vectorBucketName - Unique name of the vector bucket + * @property creationTime - Unix timestamp of when the bucket was created + * @property encryptionConfiguration - Optional encryption settings + */ +export interface VectorBucket { + vectorBucketName: string + creationTime?: number + encryptionConfiguration?: EncryptionConfiguration +} + +/** + * Metadata configuration for vector index + * Defines which metadata keys should not be indexed for filtering + * @property nonFilterableMetadataKeys - Array of metadata keys that cannot be used in filters + */ +export interface MetadataConfiguration { + nonFilterableMetadataKeys?: string[] +} + +/** + * Supported data types for vectors + * Currently only float32 is supported + */ +export type VectorDataType = 'float32' + +/** + * Distance metrics for vector similarity search + */ +export type DistanceMetric = 'cosine' | 'euclidean' | 'dotproduct' + +/** + * Vector index configuration and metadata + * @property indexName - Unique name of the index within the bucket + * @property vectorBucketName - Name of the parent vector bucket + * @property dataType - Data type of vector components (currently only 'float32') + * @property dimension - Dimensionality of vectors (e.g., 384, 768, 1536) + * @property distanceMetric - Similarity metric used for queries + * @property metadataConfiguration - Configuration for metadata filtering + * @property creationTime - Unix timestamp of when the index was created + */ +export interface VectorIndex { + indexName: string + vectorBucketName: string + dataType: VectorDataType + dimension: number + distanceMetric: DistanceMetric + metadataConfiguration?: MetadataConfiguration + creationTime?: number +} + +/** + * Vector data representation + * Vectors must be float32 arrays with dimensions matching the index + * @property float32 - Array of 32-bit floating point numbers + */ +export interface VectorData { + float32: number[] +} + +/** + * Arbitrary JSON metadata attached to vectors + * Keys configured as non-filterable in the index can be stored but not queried + */ +export type VectorMetadata = Record + +/** + * Single vector object for insertion/update + * @property key - Unique identifier for the vector + * @property data - Vector embedding data + * @property metadata - Optional arbitrary metadata + */ +export interface VectorObject { + key: string + data: VectorData + metadata?: VectorMetadata +} + +/** + * Vector object returned from queries with optional distance + * @property key - Unique identifier for the vector + * @property data - Vector embedding data (if requested) + * @property metadata - Arbitrary metadata (if requested) + * @property distance - Similarity distance from query vector (if requested) + */ +export interface VectorMatch { + key: string + data?: VectorData + metadata?: VectorMetadata + distance?: number +} + +/** + * Options for fetching vector buckets + * @property prefix - Filter buckets by name prefix + * @property maxResults - Maximum number of results to return (default: 100) + * @property nextToken - Token for pagination from previous response + */ +export interface ListVectorBucketsOptions { + prefix?: string + maxResults?: number + nextToken?: string +} + +/** + * Response from listing vector buckets + * @property buckets - Array of bucket names + * @property nextToken - Token for fetching next page (if more results exist) + */ +export interface ListVectorBucketsResponse { + buckets: { vectorBucketName: string }[] + nextToken?: string +} + +/** + * Options for listing indexes within a bucket + * @property vectorBucketName - Name of the parent vector bucket + * @property prefix - Filter indexes by name prefix + * @property maxResults - Maximum number of results to return (default: 100) + * @property nextToken - Token for pagination from previous response + */ +export interface ListIndexesOptions { + vectorBucketName: string + prefix?: string + maxResults?: number + nextToken?: string +} + +/** + * Response from listing indexes + * @property indexes - Array of index names + * @property nextToken - Token for fetching next page (if more results exist) + */ +export interface ListIndexesResponse { + indexes: { indexName: string }[] + nextToken?: string +} + +/** + * Options for batch reading vectors + * @property vectorBucketName - Name of the vector bucket + * @property indexName - Name of the index + * @property keys - Array of vector keys to retrieve + * @property returnData - Whether to include vector data in response + * @property returnMetadata - Whether to include metadata in response + */ +export interface GetVectorsOptions { + vectorBucketName: string + indexName: string + keys: string[] + returnData?: boolean + returnMetadata?: boolean +} + +/** + * Response from getting vectors + * @property vectors - Array of retrieved vector objects + */ +export interface GetVectorsResponse { + vectors: VectorMatch[] +} + +/** + * Options for batch inserting/updating vectors + * @property vectorBucketName - Name of the vector bucket + * @property indexName - Name of the index + * @property vectors - Array of vectors to insert/upsert (1-500 items) + */ +export interface PutVectorsOptions { + vectorBucketName: string + indexName: string + vectors: VectorObject[] +} + +/** + * Options for batch deleting vectors + * @property vectorBucketName - Name of the vector bucket + * @property indexName - Name of the index + * @property keys - Array of vector keys to delete (1-500 items) + */ +export interface DeleteVectorsOptions { + vectorBucketName: string + indexName: string + keys: string[] +} + +/** + * Options for listing/scanning vectors in an index + * Supports parallel scanning via segment configuration + * @property vectorBucketName - Name of the vector bucket + * @property indexName - Name of the index + * @property maxResults - Maximum number of results to return (default: 500, max: 1000) + * @property nextToken - Token for pagination from previous response + * @property returnData - Whether to include vector data in response + * @property returnMetadata - Whether to include metadata in response + * @property segmentCount - Total number of parallel segments (1-16) + * @property segmentIndex - Zero-based index of this segment (0 to segmentCount-1) + */ +export interface ListVectorsOptions { + vectorBucketName: string + indexName: string + maxResults?: number + nextToken?: string + returnData?: boolean + returnMetadata?: boolean + segmentCount?: number + segmentIndex?: number +} + +/** + * Response from listing vectors + * @property vectors - Array of vector objects + * @property nextToken - Token for fetching next page (if more results exist) + */ +export interface ListVectorsResponse { + vectors: VectorMatch[] + nextToken?: string +} + +/** + * JSON filter expression for metadata filtering + * Format and syntax depend on the S3 Vectors service implementation + */ +export type VectorFilter = Record + +/** + * Options for querying similar vectors (ANN search) + * @property vectorBucketName - Name of the vector bucket + * @property indexName - Name of the index + * @property queryVector - Query vector to find similar vectors + * @property topK - Number of nearest neighbors to return (default: 10) + * @property filter - Optional JSON filter for metadata + * @property returnDistance - Whether to include distance scores + * @property returnMetadata - Whether to include metadata in results + */ +export interface QueryVectorsOptions { + vectorBucketName: string + indexName: string + queryVector: VectorData + topK?: number + filter?: VectorFilter + returnDistance?: boolean + returnMetadata?: boolean +} + +/** + * Response from vector similarity query + * @property matches - Array of similar vectors ordered by distance + */ +export interface QueryVectorsResponse { + matches: VectorMatch[] +} + +/** + * Fetch-specific parameters like abort signals + * @property signal - AbortSignal for cancelling requests + */ +export interface FetchParameters { + signal?: AbortSignal +} + +/** + * Standard response wrapper for successful operations + * @property data - Response data of type T + * @property error - Null on success + */ +export interface SuccessResponse { + data: T + error: null +} + +/** + * Standard response wrapper for failed operations + * @property data - Null on error + * @property error - StorageVectorsError with details + */ +export interface ErrorResponse { + data: null + error: StorageVectorsError +} + +/** + * Union type for all API responses + * Follows the pattern: { data: T, error: null } | { data: null, error: Error } + */ +export type ApiResponse = SuccessResponse | ErrorResponse diff --git a/packages/integrations/storage-vectors-js/tsconfig.lib.json b/packages/integrations/storage-vectors-js/tsconfig.lib.json index ebd01ed68..be4599486 100644 --- a/packages/integrations/storage-vectors-js/tsconfig.lib.json +++ b/packages/integrations/storage-vectors-js/tsconfig.lib.json @@ -4,9 +4,15 @@ "baseUrl": ".", "rootDir": "src", "outDir": "dist", + "target": "ES6", + "lib": ["ES2022", "dom"], "tsBuildInfoFile": "dist/tsconfig.lib.tsbuildinfo", "emitDeclarationOnly": false, "forceConsistentCasingInFileNames": true, + "importHelpers": false, + "moduleResolution": "Node", + "esModuleInterop": true, + "strict": true, "types": ["node"] }, "include": ["src/**/*.ts"], diff --git a/packages/integrations/storage-vectors-js/tsconfig.spec.json b/packages/integrations/storage-vectors-js/tsconfig.spec.json index 74b7e8b05..47a3f1d46 100644 --- a/packages/integrations/storage-vectors-js/tsconfig.spec.json +++ b/packages/integrations/storage-vectors-js/tsconfig.spec.json @@ -1,14 +1,33 @@ { - "extends": "../../../tsconfig.base.json", "compilerOptions": { "outDir": "./out-tsc/jest", "types": ["jest", "node"], - "forceConsistentCasingInFileNames": true + "module": "commonjs", + "moduleResolution": "node", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "resolveJsonModule": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "skipLibCheck": true, + "composite": false, + "declaration": false, + "declarationMap": false, + "emitDeclarationOnly": false, + "noEmit": true, + "target": "es2022", + "lib": ["es2022"] }, - "include": ["jest.config.ts", "src/**/*.test.ts", "src/**/*.spec.ts", "src/**/*.d.ts"], - "references": [ - { - "path": "./tsconfig.lib.json" - } + "include": [ + "jest.config.ts", + "jest.config.cjs", + "src/**/*.test.ts", + "src/**/*.spec.ts", + "src/**/*.d.ts", + "src/__tests__/**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" ] } From adad162baf3f12529deb7e6718b5596690c731b8 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Wed, 15 Oct 2025 16:04:18 +0300 Subject: [PATCH 05/36] chore(repo): npm i --- package-lock.json | 169 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 168 insertions(+), 1 deletion(-) diff --git a/package-lock.json b/package-lock.json index adc538541..b1d3f1122 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11251,6 +11251,14 @@ "node": ">=10" } }, + "node_modules/chardet": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-2.1.0.tgz", + "integrity": "sha512-bNFETTG/pM5ryzQ9Ad0lJOTa6HWD/YsScAR3EnCPZRPlQh77JocYktSHOUHelyhm8IARL+o4c4F1bP5KVOjiRA==", + "dev": true, + "license": "MIT", + "peer": true + }, "node_modules/check-error": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", @@ -11784,6 +11792,17 @@ "node": ">=8" } }, + "node_modules/cli-width": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz", + "integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==", + "dev": true, + "license": "ISC", + "peer": true, + "engines": { + "node": ">= 12" + } + }, "node_modules/clipanion": { "version": "4.0.0-rc.4", "resolved": "https://registry.npmjs.org/clipanion/-/clipanion-4.0.0-rc.4.tgz", @@ -18353,6 +18372,96 @@ "dev": true, "license": "ISC" }, + "node_modules/inquirer": { + "version": "9.3.8", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-9.3.8.tgz", + "integrity": "sha512-pFGGdaHrmRKMh4WoDDSowddgjT1Vkl90atobmTeSmcPGdYiwikch/m/Ef5wRaiamHejtw0cUUMMerzDUXCci2w==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@inquirer/external-editor": "^1.0.2", + "@inquirer/figures": "^1.0.3", + "ansi-escapes": "^4.3.2", + "cli-width": "^4.1.0", + "mute-stream": "1.0.0", + "ora": "^5.4.1", + "run-async": "^3.0.0", + "rxjs": "^7.8.1", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^6.2.0", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/inquirer/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/inquirer/node_modules/ora": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", + "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "bl": "^4.1.0", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-spinners": "^2.5.0", + "is-interactive": "^1.0.0", + "is-unicode-supported": "^0.1.0", + "log-symbols": "^4.1.0", + "strip-ansi": "^6.0.0", + "wcwidth": "^1.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/inquirer/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/inquirer/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/internal-slot": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", @@ -24714,6 +24823,17 @@ "multicast-dns": "cli.js" } }, + "node_modules/mute-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-1.0.0.tgz", + "integrity": "sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==", + "dev": true, + "license": "ISC", + "peer": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/mz": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", @@ -28654,6 +28774,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/run-async": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-3.0.0.tgz", + "integrity": "sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.12.0" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -31775,6 +31906,7 @@ "version": "2.8.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, "license": "0BSD" }, "node_modules/tsscmp": { @@ -33926,6 +34058,20 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/yoctocolors-cjs": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.3.tgz", + "integrity": "sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/zip-stream": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-4.1.1.tgz", @@ -37192,8 +37338,29 @@ "version": "0.0.1", "extraneous": true, "dependencies": { - "tslib": "^2.3.0" + "@supabase/node-fetch": "^2.6.13" + }, + "devDependencies": { + "@types/node": "^24.7.2", + "tslib": "^2.8.1" } + }, + "packages/integrations/storage-vectors-js/node_modules/@types/node": { + "version": "24.7.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.7.2.tgz", + "integrity": "sha512-/NbVmcGTP+lj5oa4yiYxxeBjRivKQ5Ns1eSZeB99ExsEQ6rX5XYU1Zy/gGxY/ilqtD4Etx9mKyrPxZRetiahhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.14.0" + } + }, + "packages/integrations/storage-vectors-js/node_modules/undici-types": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.14.0.tgz", + "integrity": "sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA==", + "dev": true, + "license": "MIT" } } } From aeeb78eb5243708bbddb2cc5bce87d27ed26894b Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Wed, 15 Oct 2025 16:12:30 +0300 Subject: [PATCH 06/36] chore(storage): update some settings --- nx.json | 3 +- package-lock.json | 15 +++++-- .../storage-vectors-js/jest.config.cjs | 40 ------------------- .../storage-vectors-js/jest.config.ts | 19 +++++++++ .../storage-vectors-js/package.json | 5 ++- 5 files changed, 34 insertions(+), 48 deletions(-) delete mode 100644 packages/integrations/storage-vectors-js/jest.config.cjs create mode 100644 packages/integrations/storage-vectors-js/jest.config.ts diff --git a/nx.json b/nx.json index cf49db157..1f0242833 100644 --- a/nx.json +++ b/nx.json @@ -248,11 +248,10 @@ } ], "release": { - "projects": ["packages/core/*", "storage-vectors-js"], + "projects": ["packages/core/*"], "projectsRelationship": "fixed", "releaseTagPatternCheckAllBranchesWhen": true, "version": { - "preVersionCommand": "npx nx run-many -t build", "conventionalCommits": true }, "changelog": { diff --git a/package-lock.json b/package-lock.json index b1d3f1122..ee17f2a60 100644 --- a/package-lock.json +++ b/package-lock.json @@ -37333,10 +37333,9 @@ "dev": true, "license": "ISC" }, - "packages/utils/fetch": { - "name": "@supabase/utils-fetch", - "version": "0.0.1", - "extraneous": true, + "packages/integrations/storage-vectors-js": { + "name": "@supabase/storage-vectors-js", + "version": "0.0.0", "dependencies": { "@supabase/node-fetch": "^2.6.13" }, @@ -37345,6 +37344,14 @@ "tslib": "^2.8.1" } }, + "packages/integrations/storage-vectors-js-non-pub": { + "name": "@supabase-js/storage-vectors-js-non-pub", + "version": "0.0.1", + "extraneous": true, + "dependencies": { + "tslib": "^2.3.0" + } + }, "packages/integrations/storage-vectors-js/node_modules/@types/node": { "version": "24.7.2", "resolved": "https://registry.npmjs.org/@types/node/-/node-24.7.2.tgz", diff --git a/packages/integrations/storage-vectors-js/jest.config.cjs b/packages/integrations/storage-vectors-js/jest.config.cjs deleted file mode 100644 index 7bc30a7e7..000000000 --- a/packages/integrations/storage-vectors-js/jest.config.cjs +++ /dev/null @@ -1,40 +0,0 @@ -/* eslint-disable */ - -module.exports = { - displayName: 'storage-vectors-js', - preset: '../../../jest.preset.js', - testEnvironment: 'node', - transform: { - '^.+\\.[tj]s$': [ - '@swc/jest', - { - jsc: { - target: 'es2017', - parser: { - syntax: 'typescript', - decorators: true, - dynamicImport: true, - }, - transform: { - decoratorMetadata: true, - legacyDecorator: true, - }, - keepClassNames: true, - externalHelpers: true, - loose: true, - }, - module: { - type: 'commonjs', - }, - sourceMaps: true, - exclude: [], - }, - ], - }, - moduleFileExtensions: ['ts', 'js', 'html'], - coverageDirectory: 'test-output/jest/coverage', - testMatch: ['**/__tests__/**/*.spec.ts'], - moduleNameMapper: { - '^(\\.{1,2}/.*)\\.js$': '$1', - }, -} diff --git a/packages/integrations/storage-vectors-js/jest.config.ts b/packages/integrations/storage-vectors-js/jest.config.ts new file mode 100644 index 000000000..150c862f4 --- /dev/null +++ b/packages/integrations/storage-vectors-js/jest.config.ts @@ -0,0 +1,19 @@ +/* eslint-disable */ +import { readFileSync } from 'fs' + +// Reading the SWC compilation config for the spec files +const swcJestConfig = JSON.parse(readFileSync(`${__dirname}/.spec.swcrc`, 'utf-8')) + +// Disable .swcrc look-up by SWC core because we're passing in swcJestConfig ourselves +swcJestConfig.swcrc = false + +export default { + displayName: 'storage-vectors-js', + preset: '../../../jest.preset.js', + testEnvironment: 'node', + transform: { + '^.+\\.[tj]s$': ['@swc/jest', swcJestConfig], + }, + moduleFileExtensions: ['ts', 'js', 'html'], + coverageDirectory: 'test-output/jest/coverage', +} diff --git a/packages/integrations/storage-vectors-js/package.json b/packages/integrations/storage-vectors-js/package.json index 682fcad9b..4f60513d8 100644 --- a/packages/integrations/storage-vectors-js/package.json +++ b/packages/integrations/storage-vectors-js/package.json @@ -1,6 +1,7 @@ { "name": "@supabase/storage-vectors-js", - "version": "0.0.1", + "version": "0.0.0", + "private": true, "type": "module", "main": "./dist/index.js", "module": "./dist/index.js", @@ -26,7 +27,7 @@ "test:mock": "USE_MOCK_SERVER=true jest" }, "nx": { - "name": "@supabase/storage-vectors-js" + "name": "storage-vectors-js" }, "dependencies": { "@supabase/node-fetch": "^2.6.13" From ce65330680085a4ed659f522c1c7c77c0ad58a3a Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Wed, 15 Oct 2025 17:29:22 +0300 Subject: [PATCH 07/36] chore(repo): correct references --- packages/core/storage-js/src/StorageClient.ts | 2 ++ packages/core/storage-js/tsconfig.lib.json | 22 +++++++++++++++++++ packages/core/storage-js/webpack.config.js | 6 +++++ .../storage-vectors-js/tsconfig.lib.json | 4 ++-- 4 files changed, 32 insertions(+), 2 deletions(-) create mode 100644 packages/core/storage-js/tsconfig.lib.json diff --git a/packages/core/storage-js/src/StorageClient.ts b/packages/core/storage-js/src/StorageClient.ts index 1e7a421e7..bf270497b 100644 --- a/packages/core/storage-js/src/StorageClient.ts +++ b/packages/core/storage-js/src/StorageClient.ts @@ -1,6 +1,7 @@ import StorageFileApi from './packages/StorageFileApi' import StorageBucketApi from './packages/StorageBucketApi' import { Fetch } from './lib/fetch' +import { StorageVectorsClient } from '@supabase/storage-vectors-js' export interface StorageClientOptions { useNewHostname?: boolean @@ -14,6 +15,7 @@ export class StorageClient extends StorageBucketApi { opts?: StorageClientOptions ) { super(url, headers, fetch, opts) + console.log(new StorageVectorsClient('https://api.example.com')) } /** diff --git a/packages/core/storage-js/tsconfig.lib.json b/packages/core/storage-js/tsconfig.lib.json new file mode 100644 index 000000000..1a58ad78e --- /dev/null +++ b/packages/core/storage-js/tsconfig.lib.json @@ -0,0 +1,22 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "module": "ES2020", + "outDir": "./dist/main", + "rootDir": "src", + "sourceMap": true, + "target": "ES6", + "emitDeclarationOnly": false, + + "stripInternal": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true + }, + "include": ["src/**/*.ts"], + "exclude": ["src/**/*.spec.ts", "src/**/*.test.ts"], + "references": [ + { + "path": "../../integrations/storage-vectors-js/tsconfig.lib.json" + } + ] +} diff --git a/packages/core/storage-js/webpack.config.js b/packages/core/storage-js/webpack.config.js index 4139d9d00..a9653327d 100644 --- a/packages/core/storage-js/webpack.config.js +++ b/packages/core/storage-js/webpack.config.js @@ -20,6 +20,12 @@ module.exports = { transpileOnly: true, }, }, + { + test: /\.m?js$/, + resolve: { + fullySpecified: false, + }, + }, ], }, resolve: { diff --git a/packages/integrations/storage-vectors-js/tsconfig.lib.json b/packages/integrations/storage-vectors-js/tsconfig.lib.json index be4599486..fc8c6ef2c 100644 --- a/packages/integrations/storage-vectors-js/tsconfig.lib.json +++ b/packages/integrations/storage-vectors-js/tsconfig.lib.json @@ -5,17 +5,17 @@ "rootDir": "src", "outDir": "dist", "target": "ES6", + "module": "ES2020", "lib": ["ES2022", "dom"], "tsBuildInfoFile": "dist/tsconfig.lib.tsbuildinfo", "emitDeclarationOnly": false, "forceConsistentCasingInFileNames": true, "importHelpers": false, - "moduleResolution": "Node", "esModuleInterop": true, "strict": true, "types": ["node"] }, "include": ["src/**/*.ts"], "references": [], - "exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts"] + "exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts", "src/**/__tests__/**/*"] } From 0a93b2431986ebef8aa53c008add97903bbfa379 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Wed, 15 Oct 2025 17:42:39 +0300 Subject: [PATCH 08/36] chore(repo): project references --- packages/core/functions-js/tsconfig.json.bak | 24 ++++ packages/core/postgrest-js/tsconfig.json.bak | 19 +++ packages/core/realtime-js/tsconfig.json.bak | 17 +++ packages/core/storage-js/tsconfig.lib.json | 2 + packages/core/storage-js/tsconfig.module.json | 4 +- .../lib/rest/types/common/common/common.ts | 56 ++++++++ .../src/lib/rest/types/common/common/rpc.ts | 135 ++++++++++++++++++ packages/core/supabase-js/tsconfig.lib.json | 34 +++++ packages/core/supabase-js/webpack.config.js | 6 + 9 files changed, 295 insertions(+), 2 deletions(-) create mode 100644 packages/core/functions-js/tsconfig.json.bak create mode 100644 packages/core/postgrest-js/tsconfig.json.bak create mode 100644 packages/core/realtime-js/tsconfig.json.bak create mode 100644 packages/core/supabase-js/src/lib/rest/types/common/common/common.ts create mode 100644 packages/core/supabase-js/src/lib/rest/types/common/common/rpc.ts create mode 100644 packages/core/supabase-js/tsconfig.lib.json diff --git a/packages/core/functions-js/tsconfig.json.bak b/packages/core/functions-js/tsconfig.json.bak new file mode 100644 index 000000000..25317bbaa --- /dev/null +++ b/packages/core/functions-js/tsconfig.json.bak @@ -0,0 +1,24 @@ +{ + "include": ["src"], + "exclude": ["node_modules/**/*.ts"], + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "module": "CommonJS", + "outDir": "dist/main", + "rootDir": "src", + "sourceMap": true, + "target": "ES2015", + + "strict": true, + + "esModuleInterop": true, + "moduleResolution": "Node", + "isolatedModules": true, + + "forceConsistentCasingInFileNames": true, + "stripInternal": true, + "allowSyntheticDefaultImports": true + }, + "typeRoots": ["./src/types"] +} diff --git a/packages/core/postgrest-js/tsconfig.json.bak b/packages/core/postgrest-js/tsconfig.json.bak new file mode 100644 index 000000000..27ce6487e --- /dev/null +++ b/packages/core/postgrest-js/tsconfig.json.bak @@ -0,0 +1,19 @@ +{ + "include": ["src"], + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "module": "CommonJS", + "outDir": "dist/cjs", + "sourceMap": true, + "target": "ES2017", + + "strict": true, + + "esModuleInterop": true, + "moduleResolution": "Node", + + "forceConsistentCasingInFileNames": true, + "stripInternal": true + } +} diff --git a/packages/core/realtime-js/tsconfig.json.bak b/packages/core/realtime-js/tsconfig.json.bak new file mode 100644 index 000000000..5a1b5e0f0 --- /dev/null +++ b/packages/core/realtime-js/tsconfig.json.bak @@ -0,0 +1,17 @@ +{ + "include": ["src"], + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "module": "CommonJS", + "outDir": "dist/main", + "rootDir": "src", + "sourceMap": true, + "target": "ES2017", + "strict": true, + "esModuleInterop": true, + "moduleResolution": "Node", + "forceConsistentCasingInFileNames": true, + "stripInternal": true + } +} diff --git a/packages/core/storage-js/tsconfig.lib.json b/packages/core/storage-js/tsconfig.lib.json index 1a58ad78e..d2201f3eb 100644 --- a/packages/core/storage-js/tsconfig.lib.json +++ b/packages/core/storage-js/tsconfig.lib.json @@ -7,6 +7,8 @@ "sourceMap": true, "target": "ES6", "emitDeclarationOnly": false, + "declaration": true, + "declarationMap": true, "stripInternal": true, "allowSyntheticDefaultImports": true, diff --git a/packages/core/storage-js/tsconfig.module.json b/packages/core/storage-js/tsconfig.module.json index 8726ca43b..65a348eb4 100644 --- a/packages/core/storage-js/tsconfig.module.json +++ b/packages/core/storage-js/tsconfig.module.json @@ -1,7 +1,7 @@ { - "extends": "./tsconfig", + "extends": "./tsconfig.lib.json", "compilerOptions": { "module": "ES2020", - "outDir": "dist/module" + "outDir": "./dist/module" } } diff --git a/packages/core/supabase-js/src/lib/rest/types/common/common/common.ts b/packages/core/supabase-js/src/lib/rest/types/common/common/common.ts new file mode 100644 index 000000000..9ad962ef5 --- /dev/null +++ b/packages/core/supabase-js/src/lib/rest/types/common/common/common.ts @@ -0,0 +1,56 @@ +// Types that are shared between supabase-js and postgrest-js + +export type Fetch = typeof fetch + +export type GenericRelationship = { + foreignKeyName: string + columns: string[] + isOneToOne?: boolean + referencedRelation: string + referencedColumns: string[] +} + +export type GenericTable = { + Row: Record + Insert: Record + Update: Record + Relationships: GenericRelationship[] +} + +export type GenericUpdatableView = { + Row: Record + Insert: Record + Update: Record + Relationships: GenericRelationship[] +} + +export type GenericNonUpdatableView = { + Row: Record + Relationships: GenericRelationship[] +} + +export type GenericView = GenericUpdatableView | GenericNonUpdatableView + +export type GenericSetofOption = { + isSetofReturn?: boolean | undefined + isOneToOne?: boolean | undefined + isNotNullable?: boolean | undefined + to: string + from: string +} + +export type GenericFunction = { + Args: Record | never + Returns: unknown + SetofOptions?: GenericSetofOption +} + +export type GenericSchema = { + Tables: Record + Views: Record + Functions: Record +} + +export type ClientServerOptions = { + PostgrestVersion?: string +} diff --git a/packages/core/supabase-js/src/lib/rest/types/common/common/rpc.ts b/packages/core/supabase-js/src/lib/rest/types/common/common/rpc.ts new file mode 100644 index 000000000..52e57419a --- /dev/null +++ b/packages/core/supabase-js/src/lib/rest/types/common/common/rpc.ts @@ -0,0 +1,135 @@ +import type { GenericFunction, GenericSchema, GenericSetofOption } from './common' + +// Functions matching utils +type IsMatchingArgs< + FnArgs extends GenericFunction['Args'], + PassedArgs extends GenericFunction['Args'], +> = [FnArgs] extends [Record] + ? PassedArgs extends Record + ? true + : false + : keyof PassedArgs extends keyof FnArgs + ? PassedArgs extends FnArgs + ? true + : false + : false + +type MatchingFunctionArgs< + Fn extends GenericFunction, + Args extends GenericFunction['Args'], +> = Fn extends { Args: infer A extends GenericFunction['Args'] } + ? IsMatchingArgs extends true + ? Fn + : never + : false + +type FindMatchingFunctionByArgs< + FnUnion, + Args extends GenericFunction['Args'], +> = FnUnion extends infer Fn extends GenericFunction ? MatchingFunctionArgs : false + +// Types for working with database schemas +type TablesAndViews = Schema['Tables'] & Exclude + +// Utility types for working with unions +type UnionToIntersection = (U extends any ? (k: U) => void : never) extends (k: infer I) => void + ? I + : never + +type LastOf = + UnionToIntersection T : never> extends () => infer R ? R : never + +type IsAny = 0 extends 1 & T ? true : false + +type ExactMatch = [T] extends [S] ? ([S] extends [T] ? true : false) : false + +type ExtractExactFunction = Fns extends infer F + ? F extends GenericFunction + ? ExactMatch extends true + ? F + : never + : never + : never + +type IsNever = [T] extends [never] ? true : false + +type RpcFunctionNotFound = { + Row: any + Result: { + error: true + } & "Couldn't infer function definition matching provided arguments" + RelationName: FnName + Relationships: null +} + +export type GetRpcFunctionFilterBuilderByArgs< + Schema extends GenericSchema, + FnName extends string & keyof Schema['Functions'], + Args, +> = { + 0: Schema['Functions'][FnName] + // If the Args is exactly never (function call without any params) + 1: IsAny extends true + ? any + : IsNever extends true + ? // This is for retro compatibility, if the funcition is defined with an single return and an union of Args + // we fallback to the last function definition matched by name + IsNever> extends true + ? LastOf + : ExtractExactFunction + : Args extends Record + ? LastOf + : // Otherwise, we attempt to match with one of the function definition in the union based + // on the function arguments provided + Args extends GenericFunction['Args'] + ? // This is for retro compatibility, if the funcition is defined with an single return and an union of Args + // we fallback to the last function definition matched by name + IsNever< + LastOf> + > extends true + ? LastOf + : // Otherwise, we use the arguments based function definition narrowing to get the right value + LastOf> + : // If we can't find a matching function by args, we try to find one by function name + ExtractExactFunction extends GenericFunction + ? ExtractExactFunction + : any +}[1] extends infer Fn + ? // If we are dealing with an non-typed client everything is any + IsAny extends true + ? { Row: any; Result: any; RelationName: FnName; Relationships: null } + : // Otherwise, we use the arguments based function definition narrowing to get the rigt value + Fn extends GenericFunction + ? { + Row: Fn['SetofOptions'] extends GenericSetofOption + ? Fn['SetofOptions']['isSetofReturn'] extends true + ? TablesAndViews[Fn['SetofOptions']['to']]['Row'] + : TablesAndViews[Fn['SetofOptions']['to']]['Row'] + : Fn['Returns'] extends any[] + ? Fn['Returns'][number] extends Record + ? Fn['Returns'][number] + : never + : Fn['Returns'] extends Record + ? Fn['Returns'] + : never + Result: Fn['SetofOptions'] extends GenericSetofOption + ? Fn['SetofOptions']['isSetofReturn'] extends true + ? Fn['SetofOptions']['isOneToOne'] extends true + ? Fn['Returns'][] + : Fn['Returns'] + : Fn['Returns'] + : Fn['Returns'] + RelationName: Fn['SetofOptions'] extends GenericSetofOption + ? Fn['SetofOptions']['to'] + : FnName + Relationships: Fn['SetofOptions'] extends GenericSetofOption + ? Fn['SetofOptions']['to'] extends keyof Schema['Tables'] + ? Schema['Tables'][Fn['SetofOptions']['to']]['Relationships'] + : Schema['Views'][Fn['SetofOptions']['to']]['Relationships'] + : null + } + : // If we failed to find the function by argument, we still pass with any but also add an overridable + Fn extends false + ? RpcFunctionNotFound + : RpcFunctionNotFound + : RpcFunctionNotFound diff --git a/packages/core/supabase-js/tsconfig.lib.json b/packages/core/supabase-js/tsconfig.lib.json new file mode 100644 index 000000000..78cd73a5a --- /dev/null +++ b/packages/core/supabase-js/tsconfig.lib.json @@ -0,0 +1,34 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "module": "ES2020", + "outDir": "./dist/main", + "rootDir": "src", + "sourceMap": true, + "target": "ES2015", + "emitDeclarationOnly": false, + + "stripInternal": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true + }, + "include": ["src/**/*.ts"], + "exclude": ["src/**/*.spec.ts", "src/**/*.test.ts"], + "references": [ + { + "path": "../storage-js/tsconfig.lib.json" + }, + { + "path": "../realtime-js" + }, + { + "path": "../postgrest-js" + }, + { + "path": "../functions-js" + }, + { + "path": "../auth-js" + } + ] +} diff --git a/packages/core/supabase-js/webpack.config.js b/packages/core/supabase-js/webpack.config.js index fc170305f..857e44041 100644 --- a/packages/core/supabase-js/webpack.config.js +++ b/packages/core/supabase-js/webpack.config.js @@ -21,6 +21,12 @@ module.exports = (env) => ({ transpileOnly: true, }, }, + { + test: /\.m?js$/, + resolve: { + fullySpecified: false, + }, + }, ], }, resolve: { From 5ad63213af0fb5bffc5f2ba0191a0c30cd7ede98 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Wed, 15 Oct 2025 17:56:17 +0300 Subject: [PATCH 09/36] chore(storage): use the new lib --- packages/core/storage-js/src/StorageClient.ts | 15 +++++++++++++-- packages/core/storage-js/src/index.ts | 1 + .../integrations/storage-vectors-js/src/index.ts | 2 +- .../storage-vectors-js/src/lib/fetch.ts | 14 +++++++------- .../storage-vectors-js/src/lib/types.ts | 2 +- 5 files changed, 23 insertions(+), 11 deletions(-) diff --git a/packages/core/storage-js/src/StorageClient.ts b/packages/core/storage-js/src/StorageClient.ts index bf270497b..fd037567c 100644 --- a/packages/core/storage-js/src/StorageClient.ts +++ b/packages/core/storage-js/src/StorageClient.ts @@ -15,7 +15,6 @@ export class StorageClient extends StorageBucketApi { opts?: StorageClientOptions ) { super(url, headers, fetch, opts) - console.log(new StorageVectorsClient('https://api.example.com')) } /** @@ -26,4 +25,16 @@ export class StorageClient extends StorageBucketApi { from(id: string): StorageFileApi { return new StorageFileApi(this.url, this.headers, id, this.fetch) } -} + + /** + * Access vector storage operations. + * + * @returns A StorageVectorsClient instance configured with the current storage settings. + */ + vectors(): StorageVectorsClient { + return new StorageVectorsClient(this.url, { + headers: this.headers, + fetch: this.fetch, + }) + } +} \ No newline at end of file diff --git a/packages/core/storage-js/src/index.ts b/packages/core/storage-js/src/index.ts index b7d3d8caa..807650b7d 100644 --- a/packages/core/storage-js/src/index.ts +++ b/packages/core/storage-js/src/index.ts @@ -2,3 +2,4 @@ export { StorageClient } from './StorageClient' export type { StorageClientOptions } from './StorageClient' export * from './lib/types' export * from './lib/errors' +export * from '@supabase/storage-vectors-js' \ No newline at end of file diff --git a/packages/integrations/storage-vectors-js/src/index.ts b/packages/integrations/storage-vectors-js/src/index.ts index c5f01155f..7865ab52e 100644 --- a/packages/integrations/storage-vectors-js/src/index.ts +++ b/packages/integrations/storage-vectors-js/src/index.ts @@ -41,7 +41,7 @@ export type { ApiResponse, SuccessResponse, ErrorResponse, - FetchParameters, + VectorFetchParameters, } from './lib/types' // Errors diff --git a/packages/integrations/storage-vectors-js/src/lib/fetch.ts b/packages/integrations/storage-vectors-js/src/lib/fetch.ts index 864dbf6fc..29a7b8589 100644 --- a/packages/integrations/storage-vectors-js/src/lib/fetch.ts +++ b/packages/integrations/storage-vectors-js/src/lib/fetch.ts @@ -1,6 +1,6 @@ import { StorageVectorsApiError, StorageVectorsUnknownError } from './errors' import { isPlainObject, resolveResponse } from './helpers' -import { FetchParameters } from './types' +import { VectorFetchParameters } from './types' export type Fetch = typeof fetch @@ -69,7 +69,7 @@ const handleError = async ( const _getRequestParams = ( method: RequestMethodType, options?: FetchOptions, - parameters?: FetchParameters, + parameters?: VectorFetchParameters, body?: object ) => { const params: { [k: string]: any } = { method, headers: options?.headers || {} } @@ -103,7 +103,7 @@ async function _handleRequest( method: RequestMethodType, url: string, options?: FetchOptions, - parameters?: FetchParameters, + parameters?: VectorFetchParameters, body?: object ): Promise { return new Promise((resolve, reject) => { @@ -135,7 +135,7 @@ export async function get( fetcher: Fetch, url: string, options?: FetchOptions, - parameters?: FetchParameters + parameters?: VectorFetchParameters ): Promise { return _handleRequest(fetcher, 'GET', url, options, parameters) } @@ -154,7 +154,7 @@ export async function post( url: string, body: object, options?: FetchOptions, - parameters?: FetchParameters + parameters?: VectorFetchParameters ): Promise { return _handleRequest(fetcher, 'POST', url, options, parameters, body) } @@ -173,7 +173,7 @@ export async function put( url: string, body: object, options?: FetchOptions, - parameters?: FetchParameters + parameters?: VectorFetchParameters ): Promise { return _handleRequest(fetcher, 'PUT', url, options, parameters, body) } @@ -192,7 +192,7 @@ export async function remove( url: string, body: object, options?: FetchOptions, - parameters?: FetchParameters + parameters?: VectorFetchParameters ): Promise { return _handleRequest(fetcher, 'DELETE', url, options, parameters, body) } diff --git a/packages/integrations/storage-vectors-js/src/lib/types.ts b/packages/integrations/storage-vectors-js/src/lib/types.ts index 31182fd7d..2b798f2d9 100644 --- a/packages/integrations/storage-vectors-js/src/lib/types.ts +++ b/packages/integrations/storage-vectors-js/src/lib/types.ts @@ -268,7 +268,7 @@ export interface QueryVectorsResponse { * Fetch-specific parameters like abort signals * @property signal - AbortSignal for cancelling requests */ -export interface FetchParameters { +export interface VectorFetchParameters { signal?: AbortSignal } From aed1096b48d62cf2c39e08a71972ed441e3c7208 Mon Sep 17 00:00:00 2001 From: fenos Date: Thu, 16 Oct 2025 12:11:18 +0200 Subject: [PATCH 10/36] fix(storage): endpoint calls --- .../src/__tests__/bucket-api.spec.ts | 8 +-- .../src/__tests__/helpers.ts | 4 +- .../src/__tests__/index-api.spec.ts | 14 +++--- .../src/__tests__/mock-server.ts | 49 +++++++++---------- .../src/__tests__/vector-data-api.spec.ts | 8 +-- .../src/lib/VectorBucketApi.ts | 8 +-- .../src/lib/VectorDataApi.ts | 8 +-- .../src/lib/VectorIndexApi.ts | 8 +-- 8 files changed, 53 insertions(+), 54 deletions(-) diff --git a/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts index b06abb6bd..54ea4bb07 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts @@ -33,7 +33,7 @@ describe('VectorBucketApi Integration Tests', () => { const response = await client.createVectorBucket(bucketName) const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorConflictException') + assertErrorCode(error, 409) expect(error.message).toContain('already exists') }) @@ -70,7 +70,7 @@ describe('VectorBucketApi Integration Tests', () => { const response = await client.getVectorBucket('non-existent-bucket') const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) expect(error.message).toContain('not found') }) @@ -177,7 +177,7 @@ describe('VectorBucketApi Integration Tests', () => { const response = await client.deleteVectorBucket('non-existent-bucket') const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 409) }) it('should return error when bucket is not empty', async () => { @@ -198,7 +198,7 @@ describe('VectorBucketApi Integration Tests', () => { const response = await client.deleteVectorBucket(bucketName) const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorBucketNotEmpty') + assertErrorCode(error, 409) expect(error.message).toContain('not empty') }) diff --git a/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts b/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts index 779ca1bbb..03436f218 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts @@ -96,9 +96,9 @@ export async function retry( /** * Assert that an error has a specific status code */ -export function assertErrorCode(error: any, expectedCode: string) { +export function assertErrorCode(error: any, expectedCode: number) { expect(error).toBeTruthy() - expect(error.statusCode).toBe(expectedCode) + expect(error.statusCode.toString()).toBe(expectedCode.toString()) } /** diff --git a/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts index f86438efe..8f4cc663c 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts @@ -98,7 +98,7 @@ describe('VectorIndexApi Integration Tests', () => { }) const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorConflictException') + assertErrorCode(error, 409) expect(error.message).toContain('already exists') }) @@ -113,7 +113,7 @@ describe('VectorIndexApi Integration Tests', () => { }) const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) }) it('should create multiple indexes in the same bucket', async () => { @@ -208,7 +208,7 @@ describe('VectorIndexApi Integration Tests', () => { const response = await bucket.getIndex('non-existent-index') const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) }) it('should return not found error when bucket does not exist', async () => { @@ -216,7 +216,7 @@ describe('VectorIndexApi Integration Tests', () => { const response = await bucket.getIndex('test-index') const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) }) }) @@ -330,7 +330,7 @@ describe('VectorIndexApi Integration Tests', () => { const response = await bucket.listIndexes() const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) }) }) @@ -387,7 +387,7 @@ describe('VectorIndexApi Integration Tests', () => { const response = await bucket.deleteIndex('non-existent-index') const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) }) it('should return not found error when bucket does not exist', async () => { @@ -395,7 +395,7 @@ describe('VectorIndexApi Integration Tests', () => { const response = await bucket.deleteIndex('test-index') const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) }) }) diff --git a/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts b/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts index d52a1b218..ee9a7f460 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts @@ -12,7 +12,7 @@ interface MockResponse { status: number data?: any error?: { - statusCode: string + statusCode: number error: string message: string } @@ -186,7 +186,7 @@ export function createMockFetch(): Fetch { response = { status: 500, error: { - statusCode: 'InternalError', + statusCode: 500, error: 'Internal Server Error', message: error.message, }, @@ -257,7 +257,7 @@ async function handleRequest( return { status: 404, error: { - statusCode: 'NotFound', + statusCode: 404, error: 'Not Found', message: `Endpoint not found: ${endpoint}`, }, @@ -272,7 +272,7 @@ function handleCreateBucket(body: any): MockResponse { return { status: 409, error: { - statusCode: 'S3VectorConflictException', + statusCode: 409, error: 'Conflict', message: `Bucket '${vectorBucketName}' already exists`, }, @@ -290,7 +290,7 @@ function handleGetBucket(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -328,7 +328,7 @@ function handleDeleteBucket(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 409, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -340,7 +340,7 @@ function handleDeleteBucket(body: any): MockResponse { return { status: 400, error: { - statusCode: 'S3VectorBucketNotEmpty', + statusCode: 409, error: 'Bad Request', message: `Bucket '${vectorBucketName}' is not empty`, }, @@ -359,7 +359,7 @@ function handleCreateIndex(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 409, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -370,7 +370,7 @@ function handleCreateIndex(body: any): MockResponse { return { status: 409, error: { - statusCode: 'S3VectorConflictException', + statusCode: 409, error: 'Conflict', message: `Index '${indexName}' already exists`, }, @@ -392,7 +392,7 @@ function handleGetIndex(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 409, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -404,7 +404,7 @@ function handleGetIndex(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 409, error: 'Not Found', message: `Index '${indexName}' not found`, }, @@ -424,7 +424,7 @@ function handleListIndexes(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 409, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -449,7 +449,7 @@ function handleDeleteIndex(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 409, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -460,7 +460,7 @@ function handleDeleteIndex(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 409, error: 'Not Found', message: `Index '${indexName}' not found`, }, @@ -479,7 +479,7 @@ function handlePutVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -490,7 +490,7 @@ function handlePutVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 404, error: 'Not Found', message: `Index '${indexName}' not found`, }, @@ -511,7 +511,7 @@ function handleGetVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -522,7 +522,7 @@ function handleGetVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 404, error: 'Not Found', message: `Index '${indexName}' not found`, }, @@ -555,7 +555,7 @@ function handleListVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -566,7 +566,7 @@ function handleListVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 404, error: 'Not Found', message: `Index '${indexName}' not found`, }, @@ -596,7 +596,6 @@ function handleQueryVectors(body: any): MockResponse { const { vectorBucketName, indexName, - queryVector, topK = 10, filter, returnDistance = false, @@ -607,7 +606,7 @@ function handleQueryVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 409, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -618,7 +617,7 @@ function handleQueryVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 404, error: 'Not Found', message: `Index '${indexName}' not found`, }, @@ -662,7 +661,7 @@ function handleDeleteVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -673,7 +672,7 @@ function handleDeleteVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 'S3VectorNotFoundException', + statusCode: 404, error: 'Not Found', message: `Index '${indexName}' not found`, }, diff --git a/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts index 8a5cec608..0c54cebe8 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts @@ -145,7 +145,7 @@ describe('VectorDataApi Integration Tests', () => { }) const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) }) it('should return not found error when index does not exist', async () => { @@ -156,7 +156,7 @@ describe('VectorDataApi Integration Tests', () => { }) const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) }) it('should handle batch size limits', async () => { @@ -608,7 +608,7 @@ describe('VectorDataApi Integration Tests', () => { const response = await index.deleteVectors({ keys: ['vec-1'] }) const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) }) it('should return not found error when index does not exist', async () => { @@ -617,7 +617,7 @@ describe('VectorDataApi Integration Tests', () => { const response = await index.deleteVectors({ keys: ['vec-1'] }) const error = assertErrorResponse(response) - assertErrorCode(error, 'S3VectorNotFoundException') + assertErrorCode(error, 404) }) }) diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts b/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts index 40f1badda..860153c43 100644 --- a/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts +++ b/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts @@ -1,6 +1,6 @@ import { DEFAULT_HEADERS } from './constants' import { isStorageVectorsError } from './errors' -import { Fetch, post, remove } from './fetch' +import { Fetch, post } from './fetch' import { resolveFetch } from './helpers' import { ApiResponse, @@ -62,7 +62,7 @@ export default class VectorBucketApi { * } * ``` */ - async createVectorBucket(vectorBucketName: string): Promise> { + async createVectorBucket(vectorBucketName: string): Promise> { try { const data = await post( this.fetch, @@ -190,9 +190,9 @@ export default class VectorBucketApi { * } * ``` */ - async deleteVectorBucket(vectorBucketName: string): Promise> { + async deleteVectorBucket(vectorBucketName: string): Promise> { try { - const data = await remove( + const data = await post( this.fetch, `${this.url}/DeleteVectorBucket`, { vectorBucketName }, diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts b/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts index 3bb1d2771..39272c649 100644 --- a/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts +++ b/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts @@ -1,6 +1,6 @@ import { DEFAULT_HEADERS } from './constants' import { isStorageVectorsError } from './errors' -import { Fetch, post, remove } from './fetch' +import { Fetch, post } from './fetch' import { resolveFetch } from './helpers' import { ApiResponse, @@ -82,7 +82,7 @@ export default class VectorDataApi { * }) * ``` */ - async putVectors(options: PutVectorsOptions): Promise> { + async putVectors(options: PutVectorsOptions): Promise> { try { // Validate batch size if (options.vectors.length < 1 || options.vectors.length > 500) { @@ -329,14 +329,14 @@ export default class VectorDataApi { * } * ``` */ - async deleteVectors(options: DeleteVectorsOptions): Promise> { + async deleteVectors(options: DeleteVectorsOptions): Promise> { try { // Validate batch size if (options.keys.length < 1 || options.keys.length > 500) { throw new Error('Keys batch size must be between 1 and 500 items') } - const data = await remove( + const data = await post( this.fetch, `${this.url}/DeleteVectors`, options, diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts b/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts index ebfb96b03..8d353f866 100644 --- a/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts +++ b/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts @@ -1,6 +1,6 @@ import { DEFAULT_HEADERS } from './constants' import { isStorageVectorsError } from './errors' -import { Fetch, post, remove } from './fetch' +import { Fetch, post } from './fetch' import { resolveFetch } from './helpers' import { ApiResponse, @@ -90,7 +90,7 @@ export default class VectorIndexApi { * }) * ``` */ - async createIndex(options: CreateIndexOptions): Promise> { + async createIndex(options: CreateIndexOptions): Promise> { try { const data = await post( this.fetch, @@ -229,9 +229,9 @@ export default class VectorIndexApi { * } * ``` */ - async deleteIndex(vectorBucketName: string, indexName: string): Promise> { + async deleteIndex(vectorBucketName: string, indexName: string): Promise> { try { - const data = await remove( + const data = await post( this.fetch, `${this.url}/DeleteIndex`, { vectorBucketName, indexName }, From d871c142352d9e53c7dec32fad5e9b7762bce910 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 14:13:37 +0300 Subject: [PATCH 11/36] chore(repo): nx sync --- packages/core/functions-js/tsconfig.json.bak | 24 -------------------- packages/core/postgrest-js/tsconfig.json.bak | 19 ---------------- packages/core/realtime-js/tsconfig.json.bak | 17 -------------- packages/core/storage-js/tsconfig.json | 7 +++++- tsconfig.json | 6 ++--- 5 files changed, 9 insertions(+), 64 deletions(-) delete mode 100644 packages/core/functions-js/tsconfig.json.bak delete mode 100644 packages/core/postgrest-js/tsconfig.json.bak delete mode 100644 packages/core/realtime-js/tsconfig.json.bak diff --git a/packages/core/functions-js/tsconfig.json.bak b/packages/core/functions-js/tsconfig.json.bak deleted file mode 100644 index 25317bbaa..000000000 --- a/packages/core/functions-js/tsconfig.json.bak +++ /dev/null @@ -1,24 +0,0 @@ -{ - "include": ["src"], - "exclude": ["node_modules/**/*.ts"], - "compilerOptions": { - "declaration": true, - "declarationMap": true, - "module": "CommonJS", - "outDir": "dist/main", - "rootDir": "src", - "sourceMap": true, - "target": "ES2015", - - "strict": true, - - "esModuleInterop": true, - "moduleResolution": "Node", - "isolatedModules": true, - - "forceConsistentCasingInFileNames": true, - "stripInternal": true, - "allowSyntheticDefaultImports": true - }, - "typeRoots": ["./src/types"] -} diff --git a/packages/core/postgrest-js/tsconfig.json.bak b/packages/core/postgrest-js/tsconfig.json.bak deleted file mode 100644 index 27ce6487e..000000000 --- a/packages/core/postgrest-js/tsconfig.json.bak +++ /dev/null @@ -1,19 +0,0 @@ -{ - "include": ["src"], - "compilerOptions": { - "declaration": true, - "declarationMap": true, - "module": "CommonJS", - "outDir": "dist/cjs", - "sourceMap": true, - "target": "ES2017", - - "strict": true, - - "esModuleInterop": true, - "moduleResolution": "Node", - - "forceConsistentCasingInFileNames": true, - "stripInternal": true - } -} diff --git a/packages/core/realtime-js/tsconfig.json.bak b/packages/core/realtime-js/tsconfig.json.bak deleted file mode 100644 index 5a1b5e0f0..000000000 --- a/packages/core/realtime-js/tsconfig.json.bak +++ /dev/null @@ -1,17 +0,0 @@ -{ - "include": ["src"], - "compilerOptions": { - "declaration": true, - "declarationMap": true, - "module": "CommonJS", - "outDir": "dist/main", - "rootDir": "src", - "sourceMap": true, - "target": "ES2017", - "strict": true, - "esModuleInterop": true, - "moduleResolution": "Node", - "forceConsistentCasingInFileNames": true, - "stripInternal": true - } -} diff --git a/packages/core/storage-js/tsconfig.json b/packages/core/storage-js/tsconfig.json index e4361c213..348331e6a 100644 --- a/packages/core/storage-js/tsconfig.json +++ b/packages/core/storage-js/tsconfig.json @@ -22,5 +22,10 @@ "allowSyntheticDefaultImports": true, "noImplicitOverride": false, "isolatedModules": false - } + }, + "references": [ + { + "path": "../../integrations/storage-vectors-js" + } + ] } diff --git a/tsconfig.json b/tsconfig.json index c02f6d2b0..c7b31066f 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -12,14 +12,14 @@ { "path": "./packages/core/realtime-js" }, - { - "path": "./packages/core/supabase-js" - }, { "path": "./packages/core/storage-js" }, { "path": "./packages/core/auth-js" + }, + { + "path": "./packages/integrations/storage-vectors-js" } ] } From 3292c4ab3ebef80c99a7d0d363a5bd0823adf039 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 14:16:07 +0300 Subject: [PATCH 12/36] chore(repo): removed deps --- jest.config.ts | 6 --- jest.preset.js | 3 -- package-lock.json | 118 ---------------------------------------------- package.json | 5 -- 4 files changed, 132 deletions(-) delete mode 100644 jest.config.ts delete mode 100644 jest.preset.js diff --git a/jest.config.ts b/jest.config.ts deleted file mode 100644 index 3a5809fbf..000000000 --- a/jest.config.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { Config } from 'jest' -import { getJestProjectsAsync } from '@nx/jest' - -export default async (): Promise => ({ - projects: await getJestProjectsAsync(), -}) diff --git a/jest.preset.js b/jest.preset.js deleted file mode 100644 index 8cd53f8e9..000000000 --- a/jest.preset.js +++ /dev/null @@ -1,3 +0,0 @@ -const nxPreset = require('@nx/jest/preset').default - -module.exports = { ...nxPreset } diff --git a/package-lock.json b/package-lock.json index ee17f2a60..ebc9d034b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -31,10 +31,8 @@ "@nx/web": "21.6.2", "@nx/webpack": "21.6.2", "@supabase/node-fetch": "2.6.15", - "@swc-node/register": "~1.9.1", "@swc/core": "~1.5.7", "@swc/helpers": "~0.5.11", - "@swc/jest": "~0.2.36", "@types/faker": "^5.1.6", "@types/jest": "^29.5.14", "@types/jsonwebtoken": "^8.5.8", @@ -53,9 +51,7 @@ "faker": "^5.3.1", "husky": "^9.1.7", "jest": "^29.7.0", - "jest-environment-node": "^29.7.0", "jest-mock-server": "^0.1.0", - "jest-util": "^29.7.0", "jiti": "2.4.2", "jsonc-eslint-parser": "^2.1.0", "jsonwebtoken": "^9.0.0", @@ -63,7 +59,6 @@ "prettier": "^3.6.2", "rimraf": "^6.0.1", "semantic-release-plugin-update-version-in-files": "^1.1.0", - "ts-jest": "^29.1.0", "ts-node": "^10.9.1", "tslib": "^2.3.0", "typedoc": "^0.27.9", @@ -4231,38 +4226,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node_modules/@jest/create-cache-key-function": { - "version": "30.2.0", - "resolved": "https://registry.npmjs.org/@jest/create-cache-key-function/-/create-cache-key-function-30.2.0.tgz", - "integrity": "sha512-44F4l4Enf+MirJN8X/NhdGkl71k5rBYiwdVlo4HxOwbu0sHV8QKrGEedb1VUU4K3W7fBKE0HGfbn7eZm0Ti3zg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "30.2.0" - }, - "engines": { - "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" - } - }, - "node_modules/@jest/create-cache-key-function/node_modules/@jest/types": { - "version": "30.2.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", - "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/pattern": "30.0.1", - "@jest/schemas": "30.0.5", - "@types/istanbul-lib-coverage": "^2.0.6", - "@types/istanbul-reports": "^3.0.4", - "@types/node": "*", - "@types/yargs": "^17.0.33", - "chalk": "^4.1.2" - }, - "engines": { - "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" - } - }, "node_modules/@jest/diff-sequences": { "version": "30.0.1", "resolved": "https://registry.npmjs.org/@jest/diff-sequences/-/diff-sequences-30.0.1.tgz", @@ -6621,69 +6584,6 @@ "resolved": "packages/core/supabase-js", "link": true }, - "node_modules/@swc-node/core": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@swc-node/core/-/core-1.14.1.tgz", - "integrity": "sha512-jrt5GUaZUU6cmMS+WTJEvGvaB6j1YNKPHPzC2PUi2BjaFbtxURHj6641Az6xN7b665hNniAIdvjxWcRml5yCnw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Brooooooklyn" - }, - "peerDependencies": { - "@swc/core": ">= 1.13.3", - "@swc/types": ">= 0.1" - } - }, - "node_modules/@swc-node/register": { - "version": "1.9.2", - "resolved": "https://registry.npmjs.org/@swc-node/register/-/register-1.9.2.tgz", - "integrity": "sha512-BBjg0QNuEEmJSoU/++JOXhrjWdu3PTyYeJWsvchsI0Aqtj8ICkz/DqlwtXbmZVZ5vuDPpTfFlwDBZe81zgShMA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@swc-node/core": "^1.13.1", - "@swc-node/sourcemap-support": "^0.5.0", - "colorette": "^2.0.20", - "debug": "^4.3.4", - "pirates": "^4.0.6", - "tslib": "^2.6.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Brooooooklyn" - }, - "peerDependencies": { - "@swc/core": ">= 1.4.13", - "typescript": ">= 4.3" - } - }, - "node_modules/@swc-node/sourcemap-support": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/@swc-node/sourcemap-support/-/sourcemap-support-0.5.1.tgz", - "integrity": "sha512-JxIvIo/Hrpv0JCHSyRpetAdQ6lB27oFYhv0PKCNf1g2gUXOjpeR1exrXccRxLMuAV5WAmGFBwRnNOJqN38+qtg==", - "dev": true, - "license": "MIT", - "dependencies": { - "source-map-support": "^0.5.21", - "tslib": "^2.6.3" - } - }, - "node_modules/@swc-node/sourcemap-support/node_modules/source-map-support": { - "version": "0.5.21", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", - "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, "node_modules/@swc/core": { "version": "1.5.29", "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.5.29.tgz", @@ -6910,24 +6810,6 @@ "tslib": "^2.8.0" } }, - "node_modules/@swc/jest": { - "version": "0.2.39", - "resolved": "https://registry.npmjs.org/@swc/jest/-/jest-0.2.39.tgz", - "integrity": "sha512-eyokjOwYd0Q8RnMHri+8/FS1HIrIUKK/sRrFp8c1dThUOfNeCWbLmBP1P5VsKdvmkd25JaH+OKYwEYiAYg9YAA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/create-cache-key-function": "^30.0.0", - "@swc/counter": "^0.1.3", - "jsonc-parser": "^3.2.0" - }, - "engines": { - "npm": ">= 7.0.0" - }, - "peerDependencies": { - "@swc/core": "*" - } - }, "node_modules/@swc/types": { "version": "0.1.25", "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.25.tgz", diff --git a/package.json b/package.json index 65a2e4ca5..fea23590c 100644 --- a/package.json +++ b/package.json @@ -31,10 +31,8 @@ "@nx/web": "21.6.2", "@nx/webpack": "21.6.2", "@supabase/node-fetch": "2.6.15", - "@swc-node/register": "~1.9.1", "@swc/core": "~1.5.7", "@swc/helpers": "~0.5.11", - "@swc/jest": "~0.2.36", "@types/faker": "^5.1.6", "@types/jest": "^29.5.14", "@types/jsonwebtoken": "^8.5.8", @@ -53,9 +51,7 @@ "faker": "^5.3.1", "husky": "^9.1.7", "jest": "^29.7.0", - "jest-environment-node": "^29.7.0", "jest-mock-server": "^0.1.0", - "jest-util": "^29.7.0", "jiti": "2.4.2", "jsonc-eslint-parser": "^2.1.0", "jsonwebtoken": "^9.0.0", @@ -63,7 +59,6 @@ "prettier": "^3.6.2", "rimraf": "^6.0.1", "semantic-release-plugin-update-version-in-files": "^1.1.0", - "ts-jest": "^29.1.0", "ts-node": "^10.9.1", "tslib": "^2.3.0", "typedoc": "^0.27.9", From e2a74f3a3d99345d722c8f26bba3b0d6e872bc44 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 14:31:18 +0300 Subject: [PATCH 13/36] fix(repo): small syntax update --- packages/integrations/storage-vectors-js/jest.config.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/integrations/storage-vectors-js/jest.config.ts b/packages/integrations/storage-vectors-js/jest.config.ts index 150c862f4..acf3bfbb0 100644 --- a/packages/integrations/storage-vectors-js/jest.config.ts +++ b/packages/integrations/storage-vectors-js/jest.config.ts @@ -1,8 +1,13 @@ /* eslint-disable */ import { readFileSync } from 'fs' +import { fileURLToPath } from 'url' +import { dirname, join } from 'path' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) // Reading the SWC compilation config for the spec files -const swcJestConfig = JSON.parse(readFileSync(`${__dirname}/.spec.swcrc`, 'utf-8')) +const swcJestConfig = JSON.parse(readFileSync(join(__dirname, '.spec.swcrc'), 'utf-8')) // Disable .swcrc look-up by SWC core because we're passing in swcJestConfig ourselves swcJestConfig.swcrc = false From a5fdafd8854eb1b58d760c09914941bc82db099d Mon Sep 17 00:00:00 2001 From: fenos Date: Thu, 16 Oct 2025 14:03:41 +0200 Subject: [PATCH 14/36] fix(storage): url suffix --- packages/core/storage-js/src/StorageClient.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/core/storage-js/src/StorageClient.ts b/packages/core/storage-js/src/StorageClient.ts index fd037567c..ea5dde9e5 100644 --- a/packages/core/storage-js/src/StorageClient.ts +++ b/packages/core/storage-js/src/StorageClient.ts @@ -32,7 +32,7 @@ export class StorageClient extends StorageBucketApi { * @returns A StorageVectorsClient instance configured with the current storage settings. */ vectors(): StorageVectorsClient { - return new StorageVectorsClient(this.url, { + return new StorageVectorsClient(this.url + '/vector', { headers: this.headers, fetch: this.fetch, }) From 89240214303ec7fe9b1f90bcf54a3f82369b8366 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 15:39:57 +0300 Subject: [PATCH 15/36] chore(postgrest): tstyche config --- package-lock.json | 8 -------- packages/core/functions-js/jest.config.ts | 5 ----- packages/core/functions-js/tsconfig.test.json | 9 --------- 3 files changed, 22 deletions(-) delete mode 100644 packages/core/functions-js/tsconfig.test.json diff --git a/package-lock.json b/package-lock.json index ebc9d034b..620d9c0aa 100644 --- a/package-lock.json +++ b/package-lock.json @@ -37226,14 +37226,6 @@ "tslib": "^2.8.1" } }, - "packages/integrations/storage-vectors-js-non-pub": { - "name": "@supabase-js/storage-vectors-js-non-pub", - "version": "0.0.1", - "extraneous": true, - "dependencies": { - "tslib": "^2.3.0" - } - }, "packages/integrations/storage-vectors-js/node_modules/@types/node": { "version": "24.7.2", "resolved": "https://registry.npmjs.org/@types/node/-/node-24.7.2.tgz", diff --git a/packages/core/functions-js/jest.config.ts b/packages/core/functions-js/jest.config.ts index 1ce27bfe2..12e789a6d 100644 --- a/packages/core/functions-js/jest.config.ts +++ b/packages/core/functions-js/jest.config.ts @@ -39,10 +39,5 @@ const config: Config.InitialOptions = { statements: 0, }, }, - globals: { - 'ts-jest': { - tsconfig: 'tsconfig.test.json', - }, - }, } export default config diff --git a/packages/core/functions-js/tsconfig.test.json b/packages/core/functions-js/tsconfig.test.json deleted file mode 100644 index 24980bda7..000000000 --- a/packages/core/functions-js/tsconfig.test.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "./tsconfig.json", - "include": ["src", "test"], - "compilerOptions": { - "composite": false, - "outDir": "dist/test", - "rootDir": "." - } -} From dc9025c09d69aa7ae764f05431b2b493553fc970 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 16:17:01 +0300 Subject: [PATCH 16/36] chore(ci): fix some settings --- nx.json | 1 + .../integrations/storage-vectors-js/jest.config.ts | 10 +++------- packages/integrations/storage-vectors-js/package.json | 4 +--- .../integrations/storage-vectors-js/tsconfig.lib.json | 5 +++-- 4 files changed, 8 insertions(+), 12 deletions(-) diff --git a/nx.json b/nx.json index 1f0242833..b6b005b51 100644 --- a/nx.json +++ b/nx.json @@ -129,6 +129,7 @@ }, "test:integration": { "inputs": ["testing", "^production"], + "dependsOn": ["^build"], "cache": true, "outputs": ["{projectRoot}/coverage"] }, diff --git a/packages/integrations/storage-vectors-js/jest.config.ts b/packages/integrations/storage-vectors-js/jest.config.ts index acf3bfbb0..fb15bbd1a 100644 --- a/packages/integrations/storage-vectors-js/jest.config.ts +++ b/packages/integrations/storage-vectors-js/jest.config.ts @@ -1,10 +1,6 @@ /* eslint-disable */ -import { readFileSync } from 'fs' -import { fileURLToPath } from 'url' -import { dirname, join } from 'path' - -const __filename = fileURLToPath(import.meta.url) -const __dirname = dirname(__filename) +const { readFileSync } = require('fs') +const { join } = require('path') // Reading the SWC compilation config for the spec files const swcJestConfig = JSON.parse(readFileSync(join(__dirname, '.spec.swcrc'), 'utf-8')) @@ -12,7 +8,7 @@ const swcJestConfig = JSON.parse(readFileSync(join(__dirname, '.spec.swcrc'), 'u // Disable .swcrc look-up by SWC core because we're passing in swcJestConfig ourselves swcJestConfig.swcrc = false -export default { +module.exports = { displayName: 'storage-vectors-js', preset: '../../../jest.preset.js', testEnvironment: 'node', diff --git a/packages/integrations/storage-vectors-js/package.json b/packages/integrations/storage-vectors-js/package.json index 4f60513d8..c733fdcee 100644 --- a/packages/integrations/storage-vectors-js/package.json +++ b/packages/integrations/storage-vectors-js/package.json @@ -2,16 +2,14 @@ "name": "@supabase/storage-vectors-js", "version": "0.0.0", "private": true, - "type": "module", "main": "./dist/index.js", - "module": "./dist/index.js", "types": "./dist/index.d.ts", "exports": { "./package.json": "./package.json", ".": { "@supabase-js/source": "./src/index.ts", "types": "./dist/index.d.ts", - "import": "./dist/index.js", + "require": "./dist/index.js", "default": "./dist/index.js" } }, diff --git a/packages/integrations/storage-vectors-js/tsconfig.lib.json b/packages/integrations/storage-vectors-js/tsconfig.lib.json index fc8c6ef2c..0be08ac4f 100644 --- a/packages/integrations/storage-vectors-js/tsconfig.lib.json +++ b/packages/integrations/storage-vectors-js/tsconfig.lib.json @@ -4,8 +4,9 @@ "baseUrl": ".", "rootDir": "src", "outDir": "dist", - "target": "ES6", - "module": "ES2020", + "target": "ES2017", + "module": "CommonJS", + "moduleResolution": "Node", "lib": ["ES2022", "dom"], "tsBuildInfoFile": "dist/tsconfig.lib.tsbuildinfo", "emitDeclarationOnly": false, From 984181ab7d4090084187edfa331e380afeee5135 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 16:21:34 +0300 Subject: [PATCH 17/36] chore(ci): remove format check from postgrest --- packages/core/postgrest-js/README.md | 2 -- packages/core/supabase-js/supabase/.temp/cli-latest | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/core/postgrest-js/README.md b/packages/core/postgrest-js/README.md index 52f4a41b3..130d12d64 100644 --- a/packages/core/postgrest-js/README.md +++ b/packages/core/postgrest-js/README.md @@ -143,8 +143,6 @@ npx nx test:update postgrest-js # Type checking only npx nx test:types postgrest-js -# Format checking -npx nx format:check postgrest-js ``` #### Test Infrastructure diff --git a/packages/core/supabase-js/supabase/.temp/cli-latest b/packages/core/supabase-js/supabase/.temp/cli-latest index c5299e677..2213dd2c1 100644 --- a/packages/core/supabase-js/supabase/.temp/cli-latest +++ b/packages/core/supabase-js/supabase/.temp/cli-latest @@ -1 +1 @@ -v2.47.2 \ No newline at end of file +v2.51.0 \ No newline at end of file From 47ac3ec2295b8e845a59511c83b4eb706954ddda Mon Sep 17 00:00:00 2001 From: fenos Date: Thu, 16 Oct 2025 16:41:22 +0200 Subject: [PATCH 18/36] fix(storage): add custom endpoints for analytics buckets --- packages/core/storage-js/src/StorageClient.ts | 26 ++- packages/core/storage-js/src/index.ts | 1 + packages/core/storage-js/src/lib/types.ts | 8 + .../src/packages/StorageAnalyticsApi.ts | 203 ++++++++++++++++++ 4 files changed, 237 insertions(+), 1 deletion(-) create mode 100644 packages/core/storage-js/src/packages/StorageAnalyticsApi.ts diff --git a/packages/core/storage-js/src/StorageClient.ts b/packages/core/storage-js/src/StorageClient.ts index ea5dde9e5..0cb48ffed 100644 --- a/packages/core/storage-js/src/StorageClient.ts +++ b/packages/core/storage-js/src/StorageClient.ts @@ -1,5 +1,6 @@ import StorageFileApi from './packages/StorageFileApi' import StorageBucketApi from './packages/StorageBucketApi' +import StorageAnalyticsApi from './packages/StorageAnalyticsApi' import { Fetch } from './lib/fetch' import { StorageVectorsClient } from '@supabase/storage-vectors-js' @@ -31,10 +32,33 @@ export class StorageClient extends StorageBucketApi { * * @returns A StorageVectorsClient instance configured with the current storage settings. */ - vectors(): StorageVectorsClient { + get vectors(): StorageVectorsClient { return new StorageVectorsClient(this.url + '/vector', { headers: this.headers, fetch: this.fetch, }) } + + /** + * Access analytics storage operations using Iceberg tables. + * + * @returns A StorageAnalyticsApi instance configured with the current storage settings. + * @example + * ```typescript + * const client = createClient(url, key) + * const analytics = client.storage.analytics + * + * // Create an analytics bucket + * await analytics.createBucket('my-analytics-bucket', { public: false }) + * + * // List all analytics buckets + * const { data: buckets } = await analytics.listBuckets() + * + * // Delete an analytics bucket + * await analytics.deleteBucket('old-analytics-bucket') + * ``` + */ + get analytics(): StorageAnalyticsApi { + return new StorageAnalyticsApi(this.url + '/iceberg', this.headers, this.fetch) + } } \ No newline at end of file diff --git a/packages/core/storage-js/src/index.ts b/packages/core/storage-js/src/index.ts index 807650b7d..405b2281a 100644 --- a/packages/core/storage-js/src/index.ts +++ b/packages/core/storage-js/src/index.ts @@ -1,5 +1,6 @@ export { StorageClient } from './StorageClient' export type { StorageClientOptions } from './StorageClient' +export { default as StorageAnalyticsApi } from './packages/StorageAnalyticsApi' export * from './lib/types' export * from './lib/errors' export * from '@supabase/storage-vectors-js' \ No newline at end of file diff --git a/packages/core/storage-js/src/lib/types.ts b/packages/core/storage-js/src/lib/types.ts index f692c5aa9..829ff7c6d 100644 --- a/packages/core/storage-js/src/lib/types.ts +++ b/packages/core/storage-js/src/lib/types.ts @@ -14,6 +14,14 @@ export interface Bucket { public: boolean } +export interface AnalyticBucket { + id: string + type: "ANALYTICS" + format: string + created_at: string + updated_at: string +} + export interface FileObject { name: string bucket_id: string diff --git a/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts b/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts new file mode 100644 index 000000000..98104a685 --- /dev/null +++ b/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts @@ -0,0 +1,203 @@ +import { DEFAULT_HEADERS } from '../lib/constants' +import { isStorageError, StorageError } from '../lib/errors' +import { Fetch, get, post, remove } from '../lib/fetch' +import { resolveFetch } from '../lib/helpers' +import { AnalyticBucket, Bucket } from '../lib/types' + +/** + * API class for managing Analytics Buckets using Iceberg tables + * Provides methods for creating, listing, and deleting analytics buckets + */ +export default class StorageAnalyticsApi { + protected url: string + protected headers: { [key: string]: string } + protected fetch: Fetch + protected shouldThrowOnError = false + + constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) { + this.url = url.replace(/\/$/, '') + this.headers = { ...DEFAULT_HEADERS, ...headers } + this.fetch = resolveFetch(fetch) + } + + /** + * Enable throwing errors instead of returning them in the response + * When enabled, failed operations will throw instead of returning { data: null, error } + * + * @returns This instance for method chaining + */ + public throwOnError(): this { + this.shouldThrowOnError = true + return this + } + + /** + * Creates a new analytics bucket using Iceberg tables + * Analytics buckets are optimized for analytical queries and data processing + * + * @param name A unique name for the bucket you are creating + * @returns Promise with newly created bucket name or error + * + * @example + * ```typescript + * const { data, error } = await storage.analytics.createBucket('analytics-data') + * if (error) { + * console.error('Failed to create analytics bucket:', error.message) + * } else { + * console.log('Created bucket:', data.name) + * } + * ``` + */ + async createBucket( + name: string + ): Promise< + | { + data: AnalyticBucket, + error: null + } + | { + data: null + error: StorageError + } + > { + try { + const data = await post( + this.fetch, + `${this.url}/bucket`, + { name }, + { headers: this.headers } + ) + return { data, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageError(error)) { + return { data: null, error } + } + + throw error + } + } + + /** + * Retrieves the details of all Analytics Storage buckets within an existing project + * Only returns buckets of type 'ANALYTICS' + * + * @param options Query parameters for listing buckets + * @param options.limit Maximum number of buckets to return + * @param options.offset Number of buckets to skip + * @param options.sortColumn Column to sort by ('id', 'name', 'created_at', 'updated_at') + * @param options.sortOrder Sort order ('asc' or 'desc') + * @param options.search Search term to filter bucket names + * @returns Promise with list of analytics buckets or error + * + * @example + * ```typescript + * const { data, error } = await storage.analytics.listBuckets({ + * limit: 10, + * offset: 0, + * sortColumn: 'created_at', + * sortOrder: 'desc', + * search: 'analytics' + * }) + * if (data) { + * console.log('Found analytics buckets:', data.length) + * data.forEach(bucket => console.log(`- ${bucket.name}`)) + * } + * ``` + */ + async listBuckets(options?: { + limit?: number + offset?: number + sortColumn?: 'id' | 'name' | 'created_at' | 'updated_at' + sortOrder?: 'asc' | 'desc' + search?: string + }): Promise< + | { + data: AnalyticBucket[] + error: null + } + | { + data: null + error: StorageError + } + > { + try { + // Build query string from options + const queryParams = new URLSearchParams() + if (options?.limit !== undefined) queryParams.set('limit', options.limit.toString()) + if (options?.offset !== undefined) queryParams.set('offset', options.offset.toString()) + if (options?.sortColumn) queryParams.set('sortColumn', options.sortColumn) + if (options?.sortOrder) queryParams.set('sortOrder', options.sortOrder) + if (options?.search) queryParams.set('search', options.search) + + const queryString = queryParams.toString() + const url = queryString ? `${this.url}/bucket?${queryString}` : `${this.url}/bucket` + + const data = await get(this.fetch, url, { headers: this.headers }) + // Filter to only return analytics buckets + const analyticsBuckets = Array.isArray(data) + ? data.filter((bucket: Bucket) => bucket.type === 'ANALYTICS') + : [] + return { data: analyticsBuckets, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageError(error)) { + return { data: null, error } + } + + throw error + } + } + + /** + * Deletes an existing analytics bucket + * A bucket can't be deleted with existing objects inside it + * You must first empty the bucket before deletion + * + * @param bucketId The unique identifier of the bucket you would like to delete + * @returns Promise with success message or error + * + * @example + * ```typescript + * const { data, error } = await analyticsApi.deleteBucket('old-analytics-bucket') + * if (error) { + * console.error('Failed to delete bucket:', error.message) + * } else { + * console.log('Bucket deleted successfully:', data.message) + * } + * ``` + */ + async deleteBucket(bucketId: string): Promise< + | { + data: { message: string } + error: null + } + | { + data: null + error: StorageError + } + > { + try { + const data = await remove( + this.fetch, + `${this.url}/bucket/${bucketId}`, + {}, + { headers: this.headers } + ) + return { data, error: null } + } catch (error) { + if (this.shouldThrowOnError) { + throw error + } + if (isStorageError(error)) { + return { data: null, error } + } + + throw error + } + } +} \ No newline at end of file From 2ddc24677a68a90fe97dfd57d68b49b6eb7d8e53 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 18:02:26 +0300 Subject: [PATCH 19/36] chore(repo): ran nx format --- packages/core/storage-js/src/StorageClient.ts | 2 +- packages/core/storage-js/src/index.ts | 2 +- packages/core/storage-js/src/lib/types.ts | 2 +- .../src/packages/StorageAnalyticsApi.ts | 15 +-- .../integrations/storage-vectors-js/README.md | 104 +++++++++++------- .../src/__tests__/bucket-api.spec.ts | 17 ++- .../src/__tests__/e2e-workflow.spec.ts | 13 +-- .../src/__tests__/helpers.ts | 15 +-- .../src/__tests__/index-api.spec.ts | 13 ++- .../src/__tests__/mock-server.ts | 28 ++--- .../src/__tests__/vector-data-api.spec.ts | 6 +- .../storage-vectors-js/src/index.ts | 14 ++- .../src/lib/StorageVectorsClient.ts | 12 +- .../src/lib/VectorBucketApi.ts | 13 +-- .../src/lib/VectorDataApi.ts | 45 +++----- .../src/lib/VectorIndexApi.ts | 18 +-- .../storage-vectors-js/src/lib/constants.ts | 2 +- .../storage-vectors-js/src/lib/helpers.ts | 5 +- .../storage-vectors-js/tsconfig.spec.json | 5 +- 19 files changed, 168 insertions(+), 163 deletions(-) diff --git a/packages/core/storage-js/src/StorageClient.ts b/packages/core/storage-js/src/StorageClient.ts index 0cb48ffed..2127187b8 100644 --- a/packages/core/storage-js/src/StorageClient.ts +++ b/packages/core/storage-js/src/StorageClient.ts @@ -61,4 +61,4 @@ export class StorageClient extends StorageBucketApi { get analytics(): StorageAnalyticsApi { return new StorageAnalyticsApi(this.url + '/iceberg', this.headers, this.fetch) } -} \ No newline at end of file +} diff --git a/packages/core/storage-js/src/index.ts b/packages/core/storage-js/src/index.ts index 405b2281a..b6b86b6b7 100644 --- a/packages/core/storage-js/src/index.ts +++ b/packages/core/storage-js/src/index.ts @@ -3,4 +3,4 @@ export type { StorageClientOptions } from './StorageClient' export { default as StorageAnalyticsApi } from './packages/StorageAnalyticsApi' export * from './lib/types' export * from './lib/errors' -export * from '@supabase/storage-vectors-js' \ No newline at end of file +export * from '@supabase/storage-vectors-js' diff --git a/packages/core/storage-js/src/lib/types.ts b/packages/core/storage-js/src/lib/types.ts index 829ff7c6d..2905c6858 100644 --- a/packages/core/storage-js/src/lib/types.ts +++ b/packages/core/storage-js/src/lib/types.ts @@ -16,7 +16,7 @@ export interface Bucket { export interface AnalyticBucket { id: string - type: "ANALYTICS" + type: 'ANALYTICS' format: string created_at: string updated_at: string diff --git a/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts b/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts index 98104a685..d01dd6206 100644 --- a/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts +++ b/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts @@ -48,11 +48,9 @@ export default class StorageAnalyticsApi { * } * ``` */ - async createBucket( - name: string - ): Promise< + async createBucket(name: string): Promise< | { - data: AnalyticBucket, + data: AnalyticBucket error: null } | { @@ -61,12 +59,7 @@ export default class StorageAnalyticsApi { } > { try { - const data = await post( - this.fetch, - `${this.url}/bucket`, - { name }, - { headers: this.headers } - ) + const data = await post(this.fetch, `${this.url}/bucket`, { name }, { headers: this.headers }) return { data, error: null } } catch (error) { if (this.shouldThrowOnError) { @@ -200,4 +193,4 @@ export default class StorageAnalyticsApi { throw error } } -} \ No newline at end of file +} diff --git a/packages/integrations/storage-vectors-js/README.md b/packages/integrations/storage-vectors-js/README.md index 28a93e200..950703a8b 100644 --- a/packages/integrations/storage-vectors-js/README.md +++ b/packages/integrations/storage-vectors-js/README.md @@ -27,7 +27,7 @@ import { StorageVectorsClient } from '@supabase/storage-vectors-js' // Initialize client const client = new StorageVectorsClient('https://api.example.com', { - headers: { 'Authorization': 'Bearer YOUR_TOKEN' } + headers: { Authorization: 'Bearer YOUR_TOKEN' }, }) // Create a vector bucket @@ -39,7 +39,7 @@ await bucket.createIndex({ indexName: 'documents-openai', dataType: 'float32', dimension: 1536, - distanceMetric: 'cosine' + distanceMetric: 'cosine', }) // Insert vectors @@ -48,22 +48,22 @@ await index.putVectors({ vectors: [ { key: 'doc-1', - data: { float32: [0.1, 0.2, 0.3, /* ...1536 dimensions */] }, - metadata: { title: 'Introduction', category: 'docs' } - } - ] + data: { float32: [0.1, 0.2, 0.3 /* ...1536 dimensions */] }, + metadata: { title: 'Introduction', category: 'docs' }, + }, + ], }) // Query similar vectors const { data, error } = await index.queryVectors({ - queryVector: { float32: [0.15, 0.25, 0.35, /* ...1536 dimensions */] }, + queryVector: { float32: [0.15, 0.25, 0.35 /* ...1536 dimensions */] }, topK: 5, returnDistance: true, - returnMetadata: true + returnMetadata: true, }) if (data) { - data.matches.forEach(match => { + data.matches.forEach((match) => { console.log(`${match.key}: distance=${match.distance}`) console.log('Metadata:', match.metadata) }) @@ -79,6 +79,7 @@ const client = new StorageVectorsClient(url, options?) ``` **Options:** + - `headers?: Record` - Custom HTTP headers (e.g., Authorization) - `fetch?: Fetch` - Custom fetch implementation @@ -104,7 +105,7 @@ console.log('Created at:', new Date(data.vectorBucket.creationTime! * 1000)) ```typescript const { data, error } = await client.listVectorBuckets({ prefix: 'prod-', - maxResults: 100 + maxResults: 100, }) // Pagination @@ -135,12 +136,13 @@ await bucket.createIndex({ dimension: 1536, distanceMetric: 'cosine', // 'cosine' | 'euclidean' | 'dotproduct' metadataConfiguration: { - nonFilterableMetadataKeys: ['raw_text', 'internal_id'] - } + nonFilterableMetadataKeys: ['raw_text', 'internal_id'], + }, }) ``` **Distance Metrics:** + - `cosine` - Cosine similarity (normalized dot product) - `euclidean` - Euclidean distance (L2 norm) - `dotproduct` - Dot product similarity @@ -158,7 +160,7 @@ console.log('Distance metric:', data?.index.distanceMetric) ```typescript const { data, error } = await bucket.listIndexes({ prefix: 'documents-', - maxResults: 100 + maxResults: 100, }) ``` @@ -180,19 +182,24 @@ await index.putVectors({ vectors: [ { key: 'unique-id-1', - data: { float32: [/* 1536 numbers */] }, + data: { + float32: [ + /* 1536 numbers */ + ], + }, metadata: { title: 'Document Title', category: 'technical', - page: 1 - } + page: 1, + }, }, // ... up to 500 vectors per request - ] + ], }) ``` **Limitations:** + - 1-500 vectors per request - Vectors must match index dimension - Keys must be unique within index @@ -202,11 +209,11 @@ await index.putVectors({ ```typescript const { data, error } = await index.getVectors({ keys: ['doc-1', 'doc-2', 'doc-3'], - returnData: true, // Include embeddings (requires permission) - returnMetadata: true // Include metadata (requires permission) + returnData: true, // Include embeddings (requires permission) + returnMetadata: true, // Include metadata (requires permission) }) -data?.vectors.forEach(v => { +data?.vectors.forEach((v) => { console.log(v.key, v.metadata) }) ``` @@ -215,18 +222,22 @@ data?.vectors.forEach(v => { ```typescript const { data, error } = await index.queryVectors({ - queryVector: { float32: [/* 1536 numbers */] }, + queryVector: { + float32: [ + /* 1536 numbers */ + ], + }, topK: 10, filter: { category: 'technical', - published: true + published: true, }, returnDistance: true, - returnMetadata: true + returnMetadata: true, }) // Results ordered by similarity -data?.matches.forEach(match => { +data?.matches.forEach((match) => { console.log(`${match.key}: distance=${match.distance}`) }) ``` @@ -243,7 +254,7 @@ do { const { data } = await index.listVectors({ maxResults: 500, nextToken, - returnMetadata: true + returnMetadata: true, }) console.log('Batch:', data?.vectors.length) @@ -255,7 +266,7 @@ const workers = [0, 1, 2, 3].map(async (segmentIndex) => { const { data } = await index.listVectors({ segmentCount: 4, segmentIndex, - returnMetadata: true + returnMetadata: true, }) return data?.vectors || [] }) @@ -265,6 +276,7 @@ const allVectors = results.flat() ``` **Limitations:** + - `maxResults`: 1-1000 (default: 500) - `segmentCount`: 1-16 - Response may be limited by 1MB size @@ -273,7 +285,7 @@ const allVectors = results.flat() ```typescript await index.deleteVectors({ - keys: ['doc-1', 'doc-2', 'doc-3'] + keys: ['doc-1', 'doc-2', 'doc-3'], // ... up to 500 keys per request }) ``` @@ -294,14 +306,14 @@ if (error) { ### Error Codes -| Code | HTTP | Description | -|------|------|-------------| -| `InternalError` | 500 | Internal server error | -| `S3VectorConflictException` | 409 | Resource already exists | -| `S3VectorNotFoundException` | 404 | Resource not found | -| `S3VectorBucketNotEmpty` | 400 | Bucket contains indexes | -| `S3VectorMaxBucketsExceeded` | 400 | Bucket quota exceeded | -| `S3VectorMaxIndexesExceeded` | 400 | Index quota exceeded | +| Code | HTTP | Description | +| ---------------------------- | ---- | ----------------------- | +| `InternalError` | 500 | Internal server error | +| `S3VectorConflictException` | 409 | Resource already exists | +| `S3VectorNotFoundException` | 404 | Resource not found | +| `S3VectorBucketNotEmpty` | 400 | Bucket contains indexes | +| `S3VectorMaxBucketsExceeded` | 400 | Bucket quota exceeded | +| `S3VectorMaxIndexesExceeded` | 400 | Index quota exceeded | ### Throwing Errors @@ -330,13 +342,19 @@ Create scoped clients for cleaner code: ```typescript // Bucket-scoped operations const bucket = client.bucket('embeddings-prod') -await bucket.createIndex({ /* ... */ }) +await bucket.createIndex({ + /* ... */ +}) await bucket.listIndexes() // Index-scoped operations const index = bucket.index('documents-openai') -await index.putVectors({ /* ... */ }) -await index.queryVectors({ /* ... */ }) +await index.putVectors({ + /* ... */ +}) +await index.queryVectors({ + /* ... */ +}) ``` ### Custom Fetch @@ -348,7 +366,9 @@ import { StorageVectorsClient } from '@supabase/storage-vectors-js' const client = new StorageVectorsClient(url, { fetch: customFetch, - headers: { /* ... */ } + headers: { + /* ... */ + }, }) ``` @@ -375,7 +395,7 @@ Ensure vectors are properly normalized to float32: ```typescript import { normalizeToFloat32 } from '@supabase/storage-vectors-js' -const vector = normalizeToFloat32([0.1, 0.2, 0.3, /* ... */]) +const vector = normalizeToFloat32([0.1, 0.2, 0.3 /* ... */]) ``` ## Type Definitions @@ -392,11 +412,11 @@ import type { VectorMetadata, DistanceMetric, ApiResponse, - StorageVectorsError + StorageVectorsError, } from '@supabase/storage-vectors-js' ``` ## Requirements - Node.js 14+ or modern browser with fetch support -- TypeScript 4.5+ (for type checking) \ No newline at end of file +- TypeScript 4.5+ (for type checking) diff --git a/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts index 54ea4bb07..44399d643 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts @@ -3,7 +3,14 @@ * Tests all bucket operations: create, get, list, delete */ -import { createTestClient, setupTest, generateTestName, assertSuccessResponse, assertErrorResponse, assertErrorCode } from './helpers' +import { + createTestClient, + setupTest, + generateTestName, + assertSuccessResponse, + assertErrorResponse, + assertErrorCode, +} from './helpers' describe('VectorBucketApi Integration Tests', () => { let client: ReturnType @@ -103,7 +110,7 @@ describe('VectorBucketApi Integration Tests', () => { expect(Array.isArray(data.buckets)).toBe(true) expect(data.buckets.length).toBeGreaterThanOrEqual(2) - const bucketNames = data.buckets.map(b => b.vectorBucketName) + const bucketNames = data.buckets.map((b) => b.vectorBucketName) expect(bucketNames).toContain(bucket1) expect(bucketNames).toContain(bucket2) }) @@ -123,14 +130,14 @@ describe('VectorBucketApi Integration Tests', () => { const data = assertSuccessResponse(response) expect(data.buckets.length).toBeGreaterThanOrEqual(2) - const bucketNames = data.buckets.map(b => b.vectorBucketName) + const bucketNames = data.buckets.map((b) => b.vectorBucketName) expect(bucketNames).toContain(bucket1) expect(bucketNames).toContain(bucket2) // bucket3 should not be included as it doesn't match prefix - const hasOtherBucket = bucketNames.some(name => name.includes('other-bucket')) + const hasOtherBucket = bucketNames.some((name) => name.includes('other-bucket')) if (hasOtherBucket) { // If other buckets exist, they should match the prefix - expect(bucketNames.every(name => name.startsWith(prefix))).toBe(true) + expect(bucketNames.every((name) => name.startsWith(prefix))).toBe(true) } }) diff --git a/packages/integrations/storage-vectors-js/src/__tests__/e2e-workflow.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/e2e-workflow.spec.ts index 39ba7abc6..fc3bbd31e 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/e2e-workflow.spec.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/e2e-workflow.spec.ts @@ -391,14 +391,11 @@ describe('End-to-End Workflow Tests', () => { const batchSize = 500 for (let i = 0; i < totalVectors; i += batchSize) { - const batch = Array.from( - { length: Math.min(batchSize, totalVectors - i) }, - (_, j) => ({ - key: `vector-${i + j}`, - data: { float32: generateRandomVector(64) }, - metadata: { batch: Math.floor((i + j) / batchSize), index: i + j }, - }) - ) + const batch = Array.from({ length: Math.min(batchSize, totalVectors - i) }, (_, j) => ({ + key: `vector-${i + j}`, + data: { float32: generateRandomVector(64) }, + metadata: { batch: Math.floor((i + j) / batchSize), index: i + j }, + })) const response = await index.putVectors({ vectors: batch }) assertSuccessResponse(response) diff --git a/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts b/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts index 03436f218..22b96b081 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts @@ -22,7 +22,9 @@ export function createTestClient(): StorageVectorsClient { } if (!config.apiUrl) { - throw new Error('STORAGE_VECTORS_API_URL environment variable is required when USE_MOCK_SERVER=false') + throw new Error( + 'STORAGE_VECTORS_API_URL environment variable is required when USE_MOCK_SERVER=false' + ) } return new StorageVectorsClient(config.apiUrl, { @@ -53,7 +55,7 @@ export function generateTestName(prefix: string): string { * Sleep utility for tests */ export function sleep(ms: number): Promise { - return new Promise(resolve => setTimeout(resolve, ms)) + return new Promise((resolve) => setTimeout(resolve, ms)) } /** @@ -68,12 +70,7 @@ export async function retry( factor?: number } = {} ): Promise { - const { - maxAttempts = 3, - initialDelay = 100, - maxDelay = 5000, - factor = 2, - } = options + const { maxAttempts = 3, initialDelay = 100, maxDelay = 5000, factor = 2 } = options let lastError: Error | undefined let delay = initialDelay @@ -160,5 +157,5 @@ export function cosineSimilarity(a: number[], b: number[]): number { export function normalizeVector(vector: number[]): number[] { const norm = Math.sqrt(vector.reduce((sum, val) => sum + val * val, 0)) if (norm === 0) return vector - return vector.map(val => val / norm) + return vector.map((val) => val / norm) } diff --git a/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts index 8f4cc663c..5bf975f2b 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts @@ -3,7 +3,14 @@ * Tests all index operations: create, get, list, delete */ -import { createTestClient, setupTest, generateTestName, assertSuccessResponse, assertErrorResponse, assertErrorCode } from './helpers' +import { + createTestClient, + setupTest, + generateTestName, + assertSuccessResponse, + assertErrorResponse, + assertErrorCode, +} from './helpers' describe('VectorIndexApi Integration Tests', () => { let client: ReturnType @@ -247,7 +254,7 @@ describe('VectorIndexApi Integration Tests', () => { expect(Array.isArray(data.indexes)).toBe(true) expect(data.indexes.length).toBeGreaterThanOrEqual(2) - const indexNames = data.indexes.map(i => i.indexName) + const indexNames = data.indexes.map((i) => i.indexName) expect(indexNames).toContain(index1) expect(indexNames).toContain(index2) }) @@ -285,7 +292,7 @@ describe('VectorIndexApi Integration Tests', () => { const data = assertSuccessResponse(response) expect(data.indexes.length).toBeGreaterThanOrEqual(2) - const indexNames = data.indexes.map(i => i.indexName) + const indexNames = data.indexes.map((i) => i.indexName) expect(indexNames).toContain(index1) expect(indexNames).toContain(index2) }) diff --git a/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts b/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts index ee9a7f460..4c1f23c48 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts @@ -92,7 +92,7 @@ class MockStorage { getBuckets(prefix?: string): string[] { const buckets = Array.from(this.buckets) if (prefix) { - return buckets.filter(b => b.startsWith(prefix)) + return buckets.filter((b) => b.startsWith(prefix)) } return buckets } @@ -127,7 +127,7 @@ class MockStorage { const indexes = Array.from(bucketIndexes.values()) if (prefix) { - return indexes.filter(i => i.indexName.startsWith(prefix)) + return indexes.filter((i) => i.indexName.startsWith(prefix)) } return indexes } @@ -204,11 +204,7 @@ export function createMockFetch(): Fetch { } } -async function handleRequest( - endpoint: string, - method: string, - body: any -): Promise { +async function handleRequest(endpoint: string, method: string, body: any): Promise { // Bucket endpoints if (endpoint === 'CreateVectorBucket') { return handleCreateBucket(body) @@ -315,7 +311,7 @@ function handleListBuckets(body: any): MockResponse { return { status: 200, data: { - buckets: buckets.slice(0, maxResults).map(name => ({ vectorBucketName: name })), + buckets: buckets.slice(0, maxResults).map((name) => ({ vectorBucketName: name })), nextToken: buckets.length > maxResults ? 'mock-next-token' : undefined, }, } @@ -436,7 +432,7 @@ function handleListIndexes(body: any): MockResponse { return { status: 200, data: { - indexes: indexes.slice(0, maxResults).map(i => ({ indexName: i.indexName })), + indexes: indexes.slice(0, maxResults).map((i) => ({ indexName: i.indexName })), nextToken: indexes.length > maxResults ? 'mock-next-token' : undefined, }, } @@ -549,7 +545,13 @@ function handleGetVectors(body: any): MockResponse { } function handleListVectors(body: any): MockResponse { - const { vectorBucketName, indexName, maxResults = 500, returnData = true, returnMetadata = true } = body + const { + vectorBucketName, + indexName, + maxResults = 500, + returnData = true, + returnMetadata = true, + } = body if (!storage.hasBucket(vectorBucketName)) { return { @@ -576,7 +578,7 @@ function handleListVectors(body: any): MockResponse { const vectorStorage = storage.getVectorStorage(vectorBucketName, indexName) const allVectors = Array.from(vectorStorage?.values() || []) - const vectors = allVectors.slice(0, maxResults).map(vector => { + const vectors = allVectors.slice(0, maxResults).map((vector) => { const result: any = { key: vector.key } if (returnData) result.data = vector.data if (returnMetadata) result.metadata = vector.metadata @@ -629,7 +631,7 @@ function handleQueryVectors(body: any): MockResponse { // Apply filter if provided if (filter) { - allVectors = allVectors.filter(vector => { + allVectors = allVectors.filter((vector) => { if (!vector.metadata) return false return Object.entries(filter).every(([key, value]) => vector.metadata[key] === value) }) @@ -641,7 +643,7 @@ function handleQueryVectors(body: any): MockResponse { const result: any = { key: vector.key } if (returnDistance) { // Mock distance calculation - result.distance = 0.1 + (index * 0.05) + result.distance = 0.1 + index * 0.05 } if (returnMetadata) result.metadata = vector.metadata return result diff --git a/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts b/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts index 0c54cebe8..854b67c3b 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts +++ b/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts @@ -213,7 +213,7 @@ describe('VectorDataApi Integration Tests', () => { expect(data.vectors).toBeDefined() expect(data.vectors.length).toBe(2) - const keys = data.vectors.map(v => v.key) + const keys = data.vectors.map((v) => v.key) expect(keys).toContain('vec-1') expect(keys).toContain('vec-2') }) @@ -288,7 +288,7 @@ describe('VectorDataApi Integration Tests', () => { const data = assertSuccessResponse(response) expect(data.vectors.length).toBe(2) - const keys = data.vectors.map(v => v.key) + const keys = data.vectors.map((v) => v.key) expect(keys).toContain('vec-1') expect(keys).toContain('vec-2') expect(keys).not.toContain('non-existent') @@ -591,7 +591,7 @@ describe('VectorDataApi Integration Tests', () => { await index.putVectors({ vectors }) // Delete in batch (max 500) - const keysToDelete = vectors.slice(0, 50).map(v => v.key) + const keysToDelete = vectors.slice(0, 50).map((v) => v.key) const response = await index.deleteVectors({ keys: keysToDelete }) assertSuccessResponse(response) diff --git a/packages/integrations/storage-vectors-js/src/index.ts b/packages/integrations/storage-vectors-js/src/index.ts index 7865ab52e..e5ac2a4fe 100644 --- a/packages/integrations/storage-vectors-js/src/index.ts +++ b/packages/integrations/storage-vectors-js/src/index.ts @@ -1,5 +1,9 @@ // Main client -export { StorageVectorsClient, VectorBucketScope, VectorIndexScope } from './lib/StorageVectorsClient' +export { + StorageVectorsClient, + VectorBucketScope, + VectorIndexScope, +} from './lib/StorageVectorsClient' export type { StorageVectorsClientOptions } from './lib/StorageVectorsClient' // API classes (for advanced usage) @@ -57,7 +61,13 @@ export { export type { Fetch, FetchOptions, RequestMethodType } from './lib/fetch' // Helper utilities -export { resolveFetch, resolveResponse, isPlainObject, normalizeToFloat32, validateVectorDimension } from './lib/helpers' +export { + resolveFetch, + resolveResponse, + isPlainObject, + normalizeToFloat32, + validateVectorDimension, +} from './lib/helpers' // Constants export { DEFAULT_HEADERS } from './lib/constants' diff --git a/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts b/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts index 89bae60f8..5a0e8988b 100644 --- a/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts +++ b/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts @@ -137,9 +137,7 @@ export class VectorBucketScope extends VectorIndexApi { * }) * ``` */ - override async createIndex( - options: Omit - ) { + override async createIndex(options: Omit) { return super.createIndex({ ...options, vectorBucketName: this.vectorBucketName, @@ -159,9 +157,7 @@ export class VectorBucketScope extends VectorIndexApi { * const { data } = await bucket.listIndexes({ prefix: 'documents-' }) * ``` */ - override async listIndexes( - options: Omit = {} - ) { + override async listIndexes(options: Omit = {}) { return super.listIndexes({ ...options, vectorBucketName: this.vectorBucketName, @@ -304,9 +300,7 @@ export class VectorIndexScope extends VectorDataApi { * }) * ``` */ - override async getVectors( - options: Omit - ) { + override async getVectors(options: Omit) { return super.getVectors({ ...options, vectorBucketName: this.vectorBucketName, diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts b/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts index 860153c43..4700c7ddc 100644 --- a/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts +++ b/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts @@ -101,7 +101,9 @@ export default class VectorBucketApi { * } * ``` */ - async getVectorBucket(vectorBucketName: string): Promise> { + async getVectorBucket( + vectorBucketName: string + ): Promise> { try { const data = await post( this.fetch, @@ -151,12 +153,9 @@ export default class VectorBucketApi { options: ListVectorBucketsOptions = {} ): Promise> { try { - const data = await post( - this.fetch, - `${this.url}/ListVectorBuckets`, - options, - { headers: this.headers } - ) + const data = await post(this.fetch, `${this.url}/ListVectorBuckets`, options, { + headers: this.headers, + }) return { data, error: null } } catch (error) { if (this.shouldThrowOnError) { diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts b/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts index 39272c649..99d6f33e3 100644 --- a/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts +++ b/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts @@ -89,12 +89,9 @@ export default class VectorDataApi { throw new Error('Vector batch size must be between 1 and 500 items') } - const data = await post( - this.fetch, - `${this.url}/PutVectors`, - options, - { headers: this.headers } - ) + const data = await post(this.fetch, `${this.url}/PutVectors`, options, { + headers: this.headers, + }) return { data: data || {}, error: null } } catch (error) { if (this.shouldThrowOnError) { @@ -140,12 +137,9 @@ export default class VectorDataApi { */ async getVectors(options: GetVectorsOptions): Promise> { try { - const data = await post( - this.fetch, - `${this.url}/GetVectors`, - options, - { headers: this.headers } - ) + const data = await post(this.fetch, `${this.url}/GetVectors`, options, { + headers: this.headers, + }) return { data, error: null } } catch (error) { if (this.shouldThrowOnError) { @@ -223,12 +217,9 @@ export default class VectorDataApi { } } - const data = await post( - this.fetch, - `${this.url}/ListVectors`, - options, - { headers: this.headers } - ) + const data = await post(this.fetch, `${this.url}/ListVectors`, options, { + headers: this.headers, + }) return { data, error: null } } catch (error) { if (this.shouldThrowOnError) { @@ -285,12 +276,9 @@ export default class VectorDataApi { */ async queryVectors(options: QueryVectorsOptions): Promise> { try { - const data = await post( - this.fetch, - `${this.url}/QueryVectors`, - options, - { headers: this.headers } - ) + const data = await post(this.fetch, `${this.url}/QueryVectors`, options, { + headers: this.headers, + }) return { data, error: null } } catch (error) { if (this.shouldThrowOnError) { @@ -336,12 +324,9 @@ export default class VectorDataApi { throw new Error('Keys batch size must be between 1 and 500 items') } - const data = await post( - this.fetch, - `${this.url}/DeleteVectors`, - options, - { headers: this.headers } - ) + const data = await post(this.fetch, `${this.url}/DeleteVectors`, options, { + headers: this.headers, + }) return { data: data || {}, error: null } } catch (error) { if (this.shouldThrowOnError) { diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts b/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts index 8d353f866..8f9bca0f6 100644 --- a/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts +++ b/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts @@ -92,12 +92,9 @@ export default class VectorIndexApi { */ async createIndex(options: CreateIndexOptions): Promise> { try { - const data = await post( - this.fetch, - `${this.url}/CreateIndex`, - options, - { headers: this.headers } - ) + const data = await post(this.fetch, `${this.url}/CreateIndex`, options, { + headers: this.headers, + }) return { data: data || {}, error: null } } catch (error) { if (this.shouldThrowOnError) { @@ -190,12 +187,9 @@ export default class VectorIndexApi { */ async listIndexes(options: ListIndexesOptions): Promise> { try { - const data = await post( - this.fetch, - `${this.url}/ListIndexes`, - options, - { headers: this.headers } - ) + const data = await post(this.fetch, `${this.url}/ListIndexes`, options, { + headers: this.headers, + }) return { data, error: null } } catch (error) { if (this.shouldThrowOnError) { diff --git a/packages/integrations/storage-vectors-js/src/lib/constants.ts b/packages/integrations/storage-vectors-js/src/lib/constants.ts index 3643113bf..a462277ab 100644 --- a/packages/integrations/storage-vectors-js/src/lib/constants.ts +++ b/packages/integrations/storage-vectors-js/src/lib/constants.ts @@ -4,5 +4,5 @@ */ export const DEFAULT_HEADERS = { 'X-Client-Info': `storage-vectors-js/0.0.1`, - 'Content-Type': 'application/json' + 'Content-Type': 'application/json', } diff --git a/packages/integrations/storage-vectors-js/src/lib/helpers.ts b/packages/integrations/storage-vectors-js/src/lib/helpers.ts index a9252b436..ed7d54955 100644 --- a/packages/integrations/storage-vectors-js/src/lib/helpers.ts +++ b/packages/integrations/storage-vectors-js/src/lib/helpers.ts @@ -81,7 +81,10 @@ export const normalizeToFloat32 = (values: number[]): number[] => { * @param expectedDimension - Expected vector dimension * @throws Error if dimensions don't match */ -export const validateVectorDimension = (vector: { float32: number[] }, expectedDimension?: number): void => { +export const validateVectorDimension = ( + vector: { float32: number[] }, + expectedDimension?: number +): void => { if (expectedDimension !== undefined && vector.float32.length !== expectedDimension) { throw new Error( `Vector dimension mismatch: expected ${expectedDimension}, got ${vector.float32.length}` diff --git a/packages/integrations/storage-vectors-js/tsconfig.spec.json b/packages/integrations/storage-vectors-js/tsconfig.spec.json index 47a3f1d46..4ed97de50 100644 --- a/packages/integrations/storage-vectors-js/tsconfig.spec.json +++ b/packages/integrations/storage-vectors-js/tsconfig.spec.json @@ -26,8 +26,5 @@ "src/**/*.d.ts", "src/__tests__/**/*.ts" ], - "exclude": [ - "node_modules", - "dist" - ] + "exclude": ["node_modules", "dist"] } From b4f8a01c3188e1df0c27457e32d8f8f5a488e061 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 18:11:44 +0300 Subject: [PATCH 20/36] chore(storage): depend on build of vector --- packages/core/storage-js/package.json | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/packages/core/storage-js/package.json b/packages/core/storage-js/package.json index 40c3c3073..3753f6fe1 100644 --- a/packages/core/storage-js/package.json +++ b/packages/core/storage-js/package.json @@ -59,5 +59,20 @@ "unpkg": "dist/umd/supabase.js", "publishConfig": { "access": "public" + }, + "nx": { + "targets": { + "build:module": { + "dependsOn": [ + "^build:module", + { + "projects": [ + "storage-vectors-js" + ], + "target": "build" + } + ] + } + } } } From 2f4cab56f872f7dbc74d6a24cff49553cc7526d6 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 20:16:58 +0300 Subject: [PATCH 21/36] chore(ci): update deno setup --- packages/core/supabase-js/test/deno/setup-deps.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/core/supabase-js/test/deno/setup-deps.js b/packages/core/supabase-js/test/deno/setup-deps.js index 689080743..672c018a8 100755 --- a/packages/core/supabase-js/test/deno/setup-deps.js +++ b/packages/core/supabase-js/test/deno/setup-deps.js @@ -6,6 +6,7 @@ const path = require('node:path') // Get the directory of the script const scriptDir = __dirname const projectRoot = path.dirname(path.dirname(scriptDir)) +const monorepoRoot = path.dirname(path.dirname(path.dirname(path.dirname(scriptDir)))) // Read package.json from main project const packageJsonPath = path.join(projectRoot, 'package.json') @@ -44,6 +45,10 @@ try { console.warn('Warning: Could not read existing deno.json, creating new one') } +// Path to storage-vectors-js (workspace package, not published to npm) +const storageVectorsPath = path.join(monorepoRoot, 'packages/integrations/storage-vectors-js/dist/index.js') +const storageVectorsUrl = `file://${storageVectorsPath}` + // Update imports in deno.json denoJson.imports = { '@supabase/realtime-js': `npm:@supabase/realtime-js@${versions.realtime}`, @@ -54,6 +59,7 @@ denoJson.imports = { process.env.STORAGE_JS_ENTRY === 'main' ? `npm:@supabase/storage-js@${versions.storage}/dist/main/index.js` : `npm:@supabase/storage-js@${versions.storage}/dist/module/index.js`, + '@supabase/storage-vectors-js': storageVectorsUrl, '@supabase/node-fetch': `npm:@supabase/node-fetch@${versions.node_fetch}`, } From 6d5a7e9dd063c45509f003e75a213a49e6ea1ed3 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 20:42:12 +0300 Subject: [PATCH 22/36] chore(ci): update deno setup again --- packages/core/supabase-js/test/deno/setup-deps.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/core/supabase-js/test/deno/setup-deps.js b/packages/core/supabase-js/test/deno/setup-deps.js index 672c018a8..79fe6d498 100755 --- a/packages/core/supabase-js/test/deno/setup-deps.js +++ b/packages/core/supabase-js/test/deno/setup-deps.js @@ -6,7 +6,7 @@ const path = require('node:path') // Get the directory of the script const scriptDir = __dirname const projectRoot = path.dirname(path.dirname(scriptDir)) -const monorepoRoot = path.dirname(path.dirname(path.dirname(path.dirname(scriptDir)))) +const monorepoRoot = path.dirname(path.dirname(path.dirname(path.dirname(path.dirname(scriptDir))))) // Read package.json from main project const packageJsonPath = path.join(projectRoot, 'package.json') From 8142faf6251cb2f376af60287cdfbe3f95d761c5 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 20:26:38 +0300 Subject: [PATCH 23/36] chore(postgrest): restore docker image --- packages/core/postgrest-js/test/db/docker-compose.yml | 2 +- packages/core/postgrest-js/test/transforms.test.ts | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/core/postgrest-js/test/db/docker-compose.yml b/packages/core/postgrest-js/test/db/docker-compose.yml index 285733de0..c69a68e2e 100644 --- a/packages/core/postgrest-js/test/db/docker-compose.yml +++ b/packages/core/postgrest-js/test/db/docker-compose.yml @@ -29,7 +29,7 @@ services: depends_on: - db db: - image: postgis/postgis:15-3.4-alpine + image: supabase/postgres:15.1.0.37 ports: - '5432:5432' volumes: diff --git a/packages/core/postgrest-js/test/transforms.test.ts b/packages/core/postgrest-js/test/transforms.test.ts index f775ff2e8..0294b6746 100644 --- a/packages/core/postgrest-js/test/transforms.test.ts +++ b/packages/core/postgrest-js/test/transforms.test.ts @@ -405,6 +405,7 @@ test('explain with options', async () => { "Plan": Any, "Query Identifier": Any, "Settings": Object { + "effective_cache_size": "128MB", "search_path": "\\"public\\", \\"extensions\\"", }, }, From 73508ef976ac86f31cfd0f8f1b02ed11fcbb32c8 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 21:12:53 +0300 Subject: [PATCH 24/36] chore(ci): fix deno --- deno.lock | 30 ++++++++++----- packages/core/supabase-js/package.json | 38 ++++++++++++++++++- .../core/supabase-js/test/deno/setup-deps.js | 10 ++--- .../integrations/storage-vectors-js/README.md | 6 +-- .../src/lib/StorageVectorsClient.ts | 2 +- 5 files changed, 67 insertions(+), 19 deletions(-) diff --git a/deno.lock b/deno.lock index 4335a6e66..47112df27 100644 --- a/deno.lock +++ b/deno.lock @@ -93,13 +93,14 @@ "npm:@commitlint/cli@^19.8.1", "npm:@commitlint/config-conventional@^19.8.1", "npm:@commitlint/cz-commitlint@^19.8.1", - "npm:@nx/eslint@21.5.2", - "npm:@nx/jest@21.5.2", - "npm:@nx/js@21.5.2", - "npm:@nx/playwright@21.5.2", - "npm:@nx/vite@21.5.2", - "npm:@nx/web@21.5.2", - "npm:@nx/webpack@21.5.2", + "npm:@nx/eslint-plugin@21.6.2", + "npm:@nx/eslint@21.6.2", + "npm:@nx/jest@21.6.2", + "npm:@nx/js@21.6.2", + "npm:@nx/playwright@21.6.2", + "npm:@nx/vite@21.6.2", + "npm:@nx/web@21.6.2", + "npm:@nx/webpack@21.6.2", "npm:@supabase/node-fetch@2.6.15", "npm:@swc/core@~1.5.7", "npm:@swc/helpers@~0.5.11", @@ -107,7 +108,7 @@ "npm:@types/jest@^29.5.14", "npm:@types/jsonwebtoken@^8.5.8", "npm:@types/node-fetch@^2.6.4", - "npm:@types/node@20", + "npm:@types/node@20.19.9", "npm:@typescript-eslint/eslint-plugin@^7.18.0", "npm:@typescript-eslint/parser@^7.18.0", "npm:@vitest/ui@3", @@ -123,8 +124,9 @@ "npm:jest-mock-server@0.1", "npm:jest@^29.7.0", "npm:jiti@2.4.2", + "npm:jsonc-eslint-parser@^2.1.0", "npm:jsonwebtoken@9", - "npm:nx@21.5.2", + "npm:nx@21.6.2", "npm:prettier@^3.6.2", "npm:rimraf@^6.0.1", "npm:semantic-release-plugin-update-version-in-files@^1.1.0", @@ -170,6 +172,7 @@ "npm:cpy-cli@5", "npm:jest@^28.1.0", "npm:node-abort-controller@^3.0.1", + "npm:prettier@^2.6.2", "npm:ts-jest@^28.0.3", "npm:tstyche@^4.3.0", "npm:type-fest@^4.32.0", @@ -233,6 +236,15 @@ "npm:webpack@^5.69.1" ] } + }, + "packages/integrations/storage-vectors-js": { + "packageJson": { + "dependencies": [ + "npm:@supabase/node-fetch@^2.6.13", + "npm:@types/node@^24.7.2", + "npm:tslib@^2.8.1" + ] + } } } } diff --git a/packages/core/supabase-js/package.json b/packages/core/supabase-js/package.json index db6845b35..0fbdf18a8 100644 --- a/packages/core/supabase-js/package.json +++ b/packages/core/supabase-js/package.json @@ -73,5 +73,41 @@ "webpack-cli": "^4.9.2" }, "jsdelivr": "dist/umd/supabase.js", - "unpkg": "dist/umd/supabase.js" + "unpkg": "dist/umd/supabase.js", + "nx": { + "targets": { + "test:integration:browser": { + "dependsOn": [ + { + "projects": [ + "storage-vectors-js" + ], + "target": "build" + }, + { + "projects": [ + "storage-js" + ], + "target": "build" + } + ] + }, + "test:edge-functions": { + "dependsOn": [ + { + "projects": [ + "storage-vectors-js" + ], + "target": "build" + }, + { + "projects": [ + "storage-js" + ], + "target": "build" + } + ] + } + } + } } diff --git a/packages/core/supabase-js/test/deno/setup-deps.js b/packages/core/supabase-js/test/deno/setup-deps.js index 79fe6d498..bc3fcc6cc 100755 --- a/packages/core/supabase-js/test/deno/setup-deps.js +++ b/packages/core/supabase-js/test/deno/setup-deps.js @@ -45,20 +45,20 @@ try { console.warn('Warning: Could not read existing deno.json, creating new one') } -// Path to storage-vectors-js (workspace package, not published to npm) +// Paths to workspace packages (not published to npm or need local builds) const storageVectorsPath = path.join(monorepoRoot, 'packages/integrations/storage-vectors-js/dist/index.js') const storageVectorsUrl = `file://${storageVectorsPath}` +const storageJsPath = path.join(monorepoRoot, 'packages/core/storage-js/dist/module/index.js') +const storageJsUrl = `file://${storageJsPath}` + // Update imports in deno.json denoJson.imports = { '@supabase/realtime-js': `npm:@supabase/realtime-js@${versions.realtime}`, '@supabase/functions-js': `npm:@supabase/functions-js@${versions.functions}`, '@supabase/postgrest-js': `npm:@supabase/postgrest-js@${versions.postgrest}`, '@supabase/auth-js': `npm:@supabase/auth-js@${versions.auth}`, - '@supabase/storage-js': - process.env.STORAGE_JS_ENTRY === 'main' - ? `npm:@supabase/storage-js@${versions.storage}/dist/main/index.js` - : `npm:@supabase/storage-js@${versions.storage}/dist/module/index.js`, + '@supabase/storage-js': storageJsUrl, '@supabase/storage-vectors-js': storageVectorsUrl, '@supabase/node-fetch': `npm:@supabase/node-fetch@${versions.node_fetch}`, } diff --git a/packages/integrations/storage-vectors-js/README.md b/packages/integrations/storage-vectors-js/README.md index 950703a8b..5be679182 100644 --- a/packages/integrations/storage-vectors-js/README.md +++ b/packages/integrations/storage-vectors-js/README.md @@ -5,7 +5,7 @@ TypeScript client library for Supabase S3 Vector Buckets - a bottomless database ## Installation ```bash -npm install @supabase/storage-vectors-js +npm install @supabase/storage-js ``` ## Features @@ -23,7 +23,7 @@ npm install @supabase/storage-vectors-js ## Quick Start ```typescript -import { StorageVectorsClient } from '@supabase/storage-vectors-js' +import { StorageVectorsClient } from '@supabase/storage-js' // Initialize client const client = new StorageVectorsClient('https://api.example.com', { @@ -362,7 +362,7 @@ await index.queryVectors({ Provide a custom fetch implementation: ```typescript -import { StorageVectorsClient } from '@supabase/storage-vectors-js' +import { StorageVectorsClient } from '@supabase/storage-js' const client = new StorageVectorsClient(url, { fetch: customFetch, diff --git a/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts b/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts index 5a0e8988b..4f738286e 100644 --- a/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts +++ b/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts @@ -32,7 +32,7 @@ export interface StorageVectorsClientOptions { * * @example * ```typescript - * import { StorageVectorsClient } from '@supabase/storage-vectors-js' + * import { StorageVectorsClient } from '@supabase/storage' * * const client = new StorageVectorsClient('https://api.example.com', { * headers: { 'Authorization': 'Bearer token' } From a01e68ab85e64bdf64e64ec0f57ce52534c3ceab Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Thu, 16 Oct 2025 21:16:16 +0300 Subject: [PATCH 25/36] chore(repo): remove bad files --- .../lib/rest/types/common/common/common.ts | 56 -------- .../src/lib/rest/types/common/common/rpc.ts | 135 ------------------ 2 files changed, 191 deletions(-) delete mode 100644 packages/core/supabase-js/src/lib/rest/types/common/common/common.ts delete mode 100644 packages/core/supabase-js/src/lib/rest/types/common/common/rpc.ts diff --git a/packages/core/supabase-js/src/lib/rest/types/common/common/common.ts b/packages/core/supabase-js/src/lib/rest/types/common/common/common.ts deleted file mode 100644 index 9ad962ef5..000000000 --- a/packages/core/supabase-js/src/lib/rest/types/common/common/common.ts +++ /dev/null @@ -1,56 +0,0 @@ -// Types that are shared between supabase-js and postgrest-js - -export type Fetch = typeof fetch - -export type GenericRelationship = { - foreignKeyName: string - columns: string[] - isOneToOne?: boolean - referencedRelation: string - referencedColumns: string[] -} - -export type GenericTable = { - Row: Record - Insert: Record - Update: Record - Relationships: GenericRelationship[] -} - -export type GenericUpdatableView = { - Row: Record - Insert: Record - Update: Record - Relationships: GenericRelationship[] -} - -export type GenericNonUpdatableView = { - Row: Record - Relationships: GenericRelationship[] -} - -export type GenericView = GenericUpdatableView | GenericNonUpdatableView - -export type GenericSetofOption = { - isSetofReturn?: boolean | undefined - isOneToOne?: boolean | undefined - isNotNullable?: boolean | undefined - to: string - from: string -} - -export type GenericFunction = { - Args: Record | never - Returns: unknown - SetofOptions?: GenericSetofOption -} - -export type GenericSchema = { - Tables: Record - Views: Record - Functions: Record -} - -export type ClientServerOptions = { - PostgrestVersion?: string -} diff --git a/packages/core/supabase-js/src/lib/rest/types/common/common/rpc.ts b/packages/core/supabase-js/src/lib/rest/types/common/common/rpc.ts deleted file mode 100644 index 52e57419a..000000000 --- a/packages/core/supabase-js/src/lib/rest/types/common/common/rpc.ts +++ /dev/null @@ -1,135 +0,0 @@ -import type { GenericFunction, GenericSchema, GenericSetofOption } from './common' - -// Functions matching utils -type IsMatchingArgs< - FnArgs extends GenericFunction['Args'], - PassedArgs extends GenericFunction['Args'], -> = [FnArgs] extends [Record] - ? PassedArgs extends Record - ? true - : false - : keyof PassedArgs extends keyof FnArgs - ? PassedArgs extends FnArgs - ? true - : false - : false - -type MatchingFunctionArgs< - Fn extends GenericFunction, - Args extends GenericFunction['Args'], -> = Fn extends { Args: infer A extends GenericFunction['Args'] } - ? IsMatchingArgs extends true - ? Fn - : never - : false - -type FindMatchingFunctionByArgs< - FnUnion, - Args extends GenericFunction['Args'], -> = FnUnion extends infer Fn extends GenericFunction ? MatchingFunctionArgs : false - -// Types for working with database schemas -type TablesAndViews = Schema['Tables'] & Exclude - -// Utility types for working with unions -type UnionToIntersection = (U extends any ? (k: U) => void : never) extends (k: infer I) => void - ? I - : never - -type LastOf = - UnionToIntersection T : never> extends () => infer R ? R : never - -type IsAny = 0 extends 1 & T ? true : false - -type ExactMatch = [T] extends [S] ? ([S] extends [T] ? true : false) : false - -type ExtractExactFunction = Fns extends infer F - ? F extends GenericFunction - ? ExactMatch extends true - ? F - : never - : never - : never - -type IsNever = [T] extends [never] ? true : false - -type RpcFunctionNotFound = { - Row: any - Result: { - error: true - } & "Couldn't infer function definition matching provided arguments" - RelationName: FnName - Relationships: null -} - -export type GetRpcFunctionFilterBuilderByArgs< - Schema extends GenericSchema, - FnName extends string & keyof Schema['Functions'], - Args, -> = { - 0: Schema['Functions'][FnName] - // If the Args is exactly never (function call without any params) - 1: IsAny extends true - ? any - : IsNever extends true - ? // This is for retro compatibility, if the funcition is defined with an single return and an union of Args - // we fallback to the last function definition matched by name - IsNever> extends true - ? LastOf - : ExtractExactFunction - : Args extends Record - ? LastOf - : // Otherwise, we attempt to match with one of the function definition in the union based - // on the function arguments provided - Args extends GenericFunction['Args'] - ? // This is for retro compatibility, if the funcition is defined with an single return and an union of Args - // we fallback to the last function definition matched by name - IsNever< - LastOf> - > extends true - ? LastOf - : // Otherwise, we use the arguments based function definition narrowing to get the right value - LastOf> - : // If we can't find a matching function by args, we try to find one by function name - ExtractExactFunction extends GenericFunction - ? ExtractExactFunction - : any -}[1] extends infer Fn - ? // If we are dealing with an non-typed client everything is any - IsAny extends true - ? { Row: any; Result: any; RelationName: FnName; Relationships: null } - : // Otherwise, we use the arguments based function definition narrowing to get the rigt value - Fn extends GenericFunction - ? { - Row: Fn['SetofOptions'] extends GenericSetofOption - ? Fn['SetofOptions']['isSetofReturn'] extends true - ? TablesAndViews[Fn['SetofOptions']['to']]['Row'] - : TablesAndViews[Fn['SetofOptions']['to']]['Row'] - : Fn['Returns'] extends any[] - ? Fn['Returns'][number] extends Record - ? Fn['Returns'][number] - : never - : Fn['Returns'] extends Record - ? Fn['Returns'] - : never - Result: Fn['SetofOptions'] extends GenericSetofOption - ? Fn['SetofOptions']['isSetofReturn'] extends true - ? Fn['SetofOptions']['isOneToOne'] extends true - ? Fn['Returns'][] - : Fn['Returns'] - : Fn['Returns'] - : Fn['Returns'] - RelationName: Fn['SetofOptions'] extends GenericSetofOption - ? Fn['SetofOptions']['to'] - : FnName - Relationships: Fn['SetofOptions'] extends GenericSetofOption - ? Fn['SetofOptions']['to'] extends keyof Schema['Tables'] - ? Schema['Tables'][Fn['SetofOptions']['to']]['Relationships'] - : Schema['Views'][Fn['SetofOptions']['to']]['Relationships'] - : null - } - : // If we failed to find the function by argument, we still pass with any but also add an overridable - Fn extends false - ? RpcFunctionNotFound - : RpcFunctionNotFound - : RpcFunctionNotFound From ea9bcfe62b50315fb9554509810febad8516a33c Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 11:25:21 +0300 Subject: [PATCH 26/36] chore(ci): fix deno issues --- .../core/supabase-js/test/deno/setup-deps.js | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/packages/core/supabase-js/test/deno/setup-deps.js b/packages/core/supabase-js/test/deno/setup-deps.js index bc3fcc6cc..1a6460c01 100755 --- a/packages/core/supabase-js/test/deno/setup-deps.js +++ b/packages/core/supabase-js/test/deno/setup-deps.js @@ -52,17 +52,31 @@ const storageVectorsUrl = `file://${storageVectorsPath}` const storageJsPath = path.join(monorepoRoot, 'packages/core/storage-js/dist/module/index.js') const storageJsUrl = `file://${storageJsPath}` +// Determine storage-js entry point based on environment variable +// For Deno 1.x (uses npm package): set STORAGE_JS_ENTRY=main +// For Deno 2.x (uses local build): don't set or set to 'module' +const useLocalStorageJs = process.env.STORAGE_JS_ENTRY !== 'main' +const storageJsImport = useLocalStorageJs + ? storageJsUrl + : process.env.STORAGE_JS_ENTRY === 'main' + ? `npm:@supabase/storage-js@${versions.storage}/dist/main/index.js` + : `npm:@supabase/storage-js@${versions.storage}/dist/module/index.js` + // Update imports in deno.json denoJson.imports = { '@supabase/realtime-js': `npm:@supabase/realtime-js@${versions.realtime}`, '@supabase/functions-js': `npm:@supabase/functions-js@${versions.functions}`, '@supabase/postgrest-js': `npm:@supabase/postgrest-js@${versions.postgrest}`, '@supabase/auth-js': `npm:@supabase/auth-js@${versions.auth}`, - '@supabase/storage-js': storageJsUrl, - '@supabase/storage-vectors-js': storageVectorsUrl, + '@supabase/storage-js': storageJsImport, '@supabase/node-fetch': `npm:@supabase/node-fetch@${versions.node_fetch}`, } +// Only add storage-vectors-js when using local storage-js build +if (useLocalStorageJs) { + denoJson.imports['@supabase/storage-vectors-js'] = storageVectorsUrl +} + // Ensure Node types are available for Deno type-checking of .d.ts files denoJson.compilerOptions = denoJson.compilerOptions || {} denoJson.compilerOptions.types = Array.isArray(denoJson.compilerOptions.types) From 4f3c8bd0eafe75a92882c0347a8414ca5a07fd4c Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 11:42:50 +0300 Subject: [PATCH 27/36] chore(storage): update documentation for storage vectors --- packages/core/storage-js/README.md | 423 ++++++++++++++- .../integrations/storage-vectors-js/README.md | 483 +++++------------- 2 files changed, 537 insertions(+), 369 deletions(-) diff --git a/packages/core/storage-js/README.md b/packages/core/storage-js/README.md index 3d9c5c482..019bf428d 100644 --- a/packages/core/storage-js/README.md +++ b/packages/core/storage-js/README.md @@ -10,7 +10,7 @@

Supabase Storage JS SDK

-

JavaScript SDK to interact with Supabase Storage.

+

JavaScript SDK to interact with Supabase Storage, including file storage and vector embeddings.

Guides @@ -29,6 +29,14 @@ [![pkg.pr.new](https://pkg.pr.new/badge/supabase/storage-js)](https://pkg.pr.new/~/supabase/storage-js) +## Features + +- **File Storage**: Upload, download, list, move, and delete files +- **Access Control**: Public and private buckets with fine-grained permissions +- **Signed URLs**: Generate time-limited URLs for secure file access +- **Image Transformations**: On-the-fly image resizing and optimization +- **Vector Embeddings**: Store and query high-dimensional embeddings with similarity search + ## Quick Start Guide ### Installing the module @@ -175,6 +183,419 @@ const storageClient = new StorageClient(STORAGE_URL, { const { data, error } = await storageClient.from('public-bucket').getPublicUrl('path/to/file') ``` +## Vector Embeddings + +Supabase Storage provides built-in support for storing and querying high-dimensional vector embeddings, powered by S3 Vectors. This enables semantic search, similarity matching, and AI-powered applications without needing a separate vector database. + +> **Note:** Vector embeddings functionality is available in `@supabase/storage-js` v2.76 and later. + +### Features + +- **Vector Buckets**: Organize vector indexes into logical containers +- **Vector Indexes**: Define schemas with configurable dimensions and distance metrics +- **Batch Operations**: Insert/update/delete up to 500 vectors per request +- **Similarity Search**: Query for nearest neighbors using cosine, euclidean, or dot product distance +- **Metadata Filtering**: Store and filter vectors by arbitrary JSON metadata +- **Pagination**: Efficiently scan large vector datasets +- **Parallel Scanning**: Distribute scans across multiple workers for high throughput +- **Cross-platform**: Works in Node.js, browsers, and edge runtimes + +### Quick Start + +```typescript +import { StorageVectorsClient } from '@supabase/storage-js' + +// Initialize client +const vectorClient = new StorageVectorsClient('https://your-project.supabase.co/storage/v1', { + headers: { Authorization: 'Bearer YOUR_TOKEN' }, +}) + +// Create a vector bucket +await vectorClient.createVectorBucket('embeddings-prod') + +// Create an index +const bucket = vectorClient.from('embeddings-prod') +await bucket.createIndex({ + indexName: 'documents-openai', + dataType: 'float32', + dimension: 1536, + distanceMetric: 'cosine', +}) + +// Insert vectors +const index = bucket.index('documents-openai') +await index.putVectors({ + vectors: [ + { + key: 'doc-1', + data: { float32: [0.1, 0.2, 0.3 /* ...1536 dimensions */] }, + metadata: { title: 'Introduction', category: 'docs' }, + }, + ], +}) + +// Query similar vectors +const { data, error } = await index.queryVectors({ + queryVector: { float32: [0.15, 0.25, 0.35 /* ...1536 dimensions */] }, + topK: 5, + returnDistance: true, + returnMetadata: true, +}) + +if (data) { + data.matches.forEach((match) => { + console.log(`${match.key}: distance=${match.distance}`) + console.log('Metadata:', match.metadata) + }) +} +``` + +### API Reference + +#### Client Initialization + +```typescript +const vectorClient = new StorageVectorsClient(url, options?) +``` + +**Options:** + +- `headers?: Record` - Custom HTTP headers (e.g., Authorization) +- `fetch?: Fetch` - Custom fetch implementation + +#### Vector Buckets + +Vector buckets are top-level containers for organizing vector indexes. + +##### Create Bucket + +```typescript +const { data, error } = await vectorClient.createVectorBucket('my-bucket') +``` + +##### Get Bucket + +```typescript +const { data, error } = await vectorClient.getVectorBucket('my-bucket') +console.log('Created at:', new Date(data.vectorBucket.creationTime! * 1000)) +``` + +##### List Buckets + +```typescript +const { data, error } = await vectorClient.listVectorBuckets({ + prefix: 'prod-', + maxResults: 100, +}) + +// Pagination +if (data?.nextToken) { + const next = await vectorClient.listVectorBuckets({ nextToken: data.nextToken }) +} +``` + +##### Delete Bucket + +```typescript +// Bucket must be empty (all indexes deleted first) +const { error } = await vectorClient.deleteVectorBucket('my-bucket') +``` + +#### Vector Indexes + +Vector indexes define the schema for embeddings including dimension and distance metric. + +##### Create Index + +```typescript +const bucket = vectorClient.from('my-bucket') + +await bucket.createIndex({ + indexName: 'my-index', + dataType: 'float32', + dimension: 1536, + distanceMetric: 'cosine', // 'cosine' | 'euclidean' | 'dotproduct' + metadataConfiguration: { + nonFilterableMetadataKeys: ['raw_text', 'internal_id'], + }, +}) +``` + +**Distance Metrics:** + +- `cosine` - Cosine similarity (normalized dot product) +- `euclidean` - Euclidean distance (L2 norm) +- `dotproduct` - Dot product similarity + +##### Get Index + +```typescript +const { data, error } = await bucket.getIndex('my-index') +console.log('Dimension:', data?.index.dimension) +console.log('Distance metric:', data?.index.distanceMetric) +``` + +##### List Indexes + +```typescript +const { data, error } = await bucket.listIndexes({ + prefix: 'documents-', + maxResults: 100, +}) +``` + +##### Delete Index + +```typescript +// Deletes index and all its vectors +await bucket.deleteIndex('my-index') +``` + +#### Vector Operations + +##### Insert/Update Vectors (Upsert) + +```typescript +const index = vectorClient.from('my-bucket').index('my-index') + +await index.putVectors({ + vectors: [ + { + key: 'unique-id-1', + data: { + float32: [ + /* 1536 numbers */ + ], + }, + metadata: { + title: 'Document Title', + category: 'technical', + page: 1, + }, + }, + // ... up to 500 vectors per request + ], +}) +``` + +**Limitations:** + +- 1-500 vectors per request +- Vectors must match index dimension +- Keys must be unique within index + +##### Get Vectors by Key + +```typescript +const { data, error } = await index.getVectors({ + keys: ['doc-1', 'doc-2', 'doc-3'], + returnData: true, // Include embeddings + returnMetadata: true, // Include metadata +}) + +data?.vectors.forEach((v) => { + console.log(v.key, v.metadata) +}) +``` + +##### Query Similar Vectors (ANN Search) + +```typescript +const { data, error } = await index.queryVectors({ + queryVector: { + float32: [ + /* 1536 numbers */ + ], + }, + topK: 10, + filter: { + category: 'technical', + published: true, + }, + returnDistance: true, + returnMetadata: true, +}) + +// Results ordered by similarity +data?.matches.forEach((match) => { + console.log(`${match.key}: distance=${match.distance}`) +}) +``` + +**Filter Syntax:** +The `filter` parameter accepts arbitrary JSON for metadata filtering. Non-filterable keys (configured at index creation) cannot be used in filters but can still be returned. + +##### List/Scan Vectors + +```typescript +// Simple pagination +let nextToken: string | undefined +do { + const { data } = await index.listVectors({ + maxResults: 500, + nextToken, + returnMetadata: true, + }) + + console.log('Batch:', data?.vectors.length) + nextToken = data?.nextToken +} while (nextToken) + +// Parallel scanning (4 workers) +const workers = [0, 1, 2, 3].map(async (segmentIndex) => { + const { data } = await index.listVectors({ + segmentCount: 4, + segmentIndex, + returnMetadata: true, + }) + return data?.vectors || [] +}) + +const results = await Promise.all(workers) +const allVectors = results.flat() +``` + +**Limitations:** + +- `maxResults`: 1-1000 (default: 500) +- `segmentCount`: 1-16 +- Response may be limited by 1MB size + +##### Delete Vectors + +```typescript +await index.deleteVectors({ + keys: ['doc-1', 'doc-2', 'doc-3'], + // ... up to 500 keys per request +}) +``` + +### Error Handling + +The library uses a consistent error handling pattern: + +```typescript +const { data, error } = await vectorClient.createVectorBucket('my-bucket') + +if (error) { + console.error('Error:', error.message) + console.error('Status:', error.status) + console.error('Code:', error.statusCode) +} +``` + +#### Error Codes + +| Code | HTTP | Description | +| ---------------------------- | ---- | ----------------------- | +| `InternalError` | 500 | Internal server error | +| `S3VectorConflictException` | 409 | Resource already exists | +| `S3VectorNotFoundException` | 404 | Resource not found | +| `S3VectorBucketNotEmpty` | 400 | Bucket contains indexes | +| `S3VectorMaxBucketsExceeded` | 400 | Bucket quota exceeded | +| `S3VectorMaxIndexesExceeded` | 400 | Index quota exceeded | + +#### Throwing Errors + +You can configure the client to throw errors instead: + +```typescript +const vectorClient = new StorageVectorsClient(url, options) +vectorClient.throwOnError() + +try { + const { data } = await vectorClient.createVectorBucket('my-bucket') + // data is guaranteed to be present +} catch (error) { + if (error instanceof StorageVectorsApiError) { + console.error('API Error:', error.statusCode) + } +} +``` + +### Advanced Usage + +#### Scoped Clients + +Create scoped clients for cleaner code: + +```typescript +// Bucket-scoped operations +const bucket = vectorClient.from('embeddings-prod') +await bucket.createIndex({ + /* ... */ +}) +await bucket.listIndexes() + +// Index-scoped operations +const index = bucket.index('documents-openai') +await index.putVectors({ + /* ... */ +}) +await index.queryVectors({ + /* ... */ +}) +``` + +#### Custom Fetch + +Provide a custom fetch implementation: + +```typescript +import { StorageVectorsClient } from '@supabase/storage-js' + +const vectorClient = new StorageVectorsClient(url, { + fetch: customFetch, + headers: { + /* ... */ + }, +}) +``` + +#### Batch Processing + +Process large datasets in batches: + +```typescript +async function insertLargeDataset(vectors: VectorObject[]) { + const batchSize = 500 + + for (let i = 0; i < vectors.length; i += batchSize) { + const batch = vectors.slice(i, i + batchSize) + await index.putVectors({ vectors: batch }) + console.log(`Inserted ${i + batch.length}/${vectors.length}`) + } +} +``` + +#### Float32 Validation + +Ensure vectors are properly normalized to float32: + +```typescript +import { normalizeToFloat32 } from '@supabase/storage-js' + +const vector = normalizeToFloat32([0.1, 0.2, 0.3 /* ... */]) +``` + +### Type Definitions + +The library exports comprehensive TypeScript types: + +```typescript +import type { + VectorBucket, + VectorIndex, + VectorData, + VectorObject, + VectorMatch, + VectorMetadata, + DistanceMetric, + ApiResponse, + StorageVectorsError, +} from '@supabase/storage-js' +``` + ## Development This package is part of the [Supabase JavaScript monorepo](https://github.com/supabase/supabase-js). To work on this package: diff --git a/packages/integrations/storage-vectors-js/README.md b/packages/integrations/storage-vectors-js/README.md index 5be679182..2d3ad52f9 100644 --- a/packages/integrations/storage-vectors-js/README.md +++ b/packages/integrations/storage-vectors-js/README.md @@ -1,422 +1,169 @@ -# @supabase/storage-vectors-js +# @supabase/storage-vectors-js (Internal Package) -TypeScript client library for Supabase S3 Vector Buckets - a bottomless database service for storing and querying high-dimensional embeddings backed by S3 Vectors. +> **⚠️ Note for Users:** This is an internal implementation package. For user-facing documentation on vector embeddings, see the [Vector Embeddings section in @supabase/storage-js](../../core/storage-js/README.md#vector-embeddings). -## Installation +## Overview -```bash -npm install @supabase/storage-js -``` +`@supabase/storage-vectors-js` is a private, internal package within the Supabase JS monorepo that provides vector embeddings functionality for Supabase Storage. This package is **not published to npm separately** - instead, it is bundled with and re-exported through `@supabase/storage-js`. -## Features +## Architecture -- **Vector Buckets**: Organize vector indexes into logical containers -- **Vector Indexes**: Define schemas with configurable dimensions and distance metrics -- **Batch Operations**: Insert/update/delete up to 500 vectors per request -- **Similarity Search**: Query for nearest neighbors using cosine, euclidean, or dot product distance -- **Metadata Filtering**: Store and filter vectors by arbitrary JSON metadata -- **Pagination**: Efficiently scan large vector datasets -- **Parallel Scanning**: Distribute scans across multiple workers for high throughput -- **TypeScript**: Full type safety with comprehensive type definitions -- **Cross-platform**: Works in Node.js, browsers, and edge runtimes +### Integration Pattern -## Quick Start +The integration follows a **re-export pattern**: ```typescript -import { StorageVectorsClient } from '@supabase/storage-js' - -// Initialize client -const client = new StorageVectorsClient('https://api.example.com', { - headers: { Authorization: 'Bearer YOUR_TOKEN' }, -}) - -// Create a vector bucket -await client.createVectorBucket('embeddings-prod') - -// Create an index -const bucket = client.from('embeddings-prod') -await bucket.createIndex({ - indexName: 'documents-openai', - dataType: 'float32', - dimension: 1536, - distanceMetric: 'cosine', -}) - -// Insert vectors -const index = bucket.index('documents-openai') -await index.putVectors({ - vectors: [ - { - key: 'doc-1', - data: { float32: [0.1, 0.2, 0.3 /* ...1536 dimensions */] }, - metadata: { title: 'Introduction', category: 'docs' }, - }, - ], -}) - -// Query similar vectors -const { data, error } = await index.queryVectors({ - queryVector: { float32: [0.15, 0.25, 0.35 /* ...1536 dimensions */] }, - topK: 5, - returnDistance: true, - returnMetadata: true, -}) - -if (data) { - data.matches.forEach((match) => { - console.log(`${match.key}: distance=${match.distance}`) - console.log('Metadata:', match.metadata) - }) -} +// In packages/core/storage-js/src/index.ts +export * from '@supabase/storage-vectors-js' ``` -## API Reference +This means: +- All exports from `storage-vectors-js` are transparently available through `storage-js` +- Users import from `@supabase/storage-js` and get everything, including vector types and classes +- The internal package structure is hidden from end users -### Client Initialization - -```typescript -const client = new StorageVectorsClient(url, options?) -``` +### Package Configuration -**Options:** +**Build coordination in storage-js:** -- `headers?: Record` - Custom HTTP headers (e.g., Authorization) -- `fetch?: Fetch` - Custom fetch implementation - -### Vector Buckets - -Vector buckets are top-level containers for organizing vector indexes. - -#### Create Bucket - -```typescript -const { data, error } = await client.createVectorBucket('my-bucket') -``` - -#### Get Bucket - -```typescript -const { data, error } = await client.getVectorBucket('my-bucket') -console.log('Created at:', new Date(data.vectorBucket.creationTime! * 1000)) -``` - -#### List Buckets - -```typescript -const { data, error } = await client.listVectorBuckets({ - prefix: 'prod-', - maxResults: 100, -}) - -// Pagination -if (data?.nextToken) { - const next = await client.listVectorBuckets({ nextToken: data.nextToken }) +```json +{ + "nx": { + "targets": { + "build:module": { + "dependsOn": [ + "^build:module", + { + "projects": ["storage-vectors-js"], + "target": "build" + } + ] + } + } + } } ``` -#### Delete Bucket +This ensures that when storage-js is built, storage-vectors-js is built first. -```typescript -// Bucket must be empty (all indexes deleted first) -const { error } = await client.deleteVectorBucket('my-bucket') -``` - -### Vector Indexes - -Vector indexes define the schema for embeddings including dimension and distance metric. - -#### Create Index - -```typescript -const bucket = client.bucket('my-bucket') - -await bucket.createIndex({ - indexName: 'my-index', - dataType: 'float32', - dimension: 1536, - distanceMetric: 'cosine', // 'cosine' | 'euclidean' | 'dotproduct' - metadataConfiguration: { - nonFilterableMetadataKeys: ['raw_text', 'internal_id'], - }, -}) -``` - -**Distance Metrics:** - -- `cosine` - Cosine similarity (normalized dot product) -- `euclidean` - Euclidean distance (L2 norm) -- `dotproduct` - Dot product similarity - -#### Get Index +### Module Structure -```typescript -const { data, error } = await bucket.getIndex('my-index') -console.log('Dimension:', data?.index.dimension) -console.log('Distance metric:', data?.index.distanceMetric) -``` - -#### List Indexes - -```typescript -const { data, error } = await bucket.listIndexes({ - prefix: 'documents-', - maxResults: 100, -}) ``` - -#### Delete Index - -```typescript -// Deletes index and all its vectors -await bucket.deleteIndex('my-index') +packages/integrations/storage-vectors-js/ +├── src/ +│ ├── index.ts # Main export file +│ └── lib/ +│ ├── StorageVectorsClient.ts # Main client class +│ ├── VectorBucketApi.ts # Bucket operations +│ ├── VectorIndexApi.ts # Index operations +│ ├── VectorDataApi.ts # Vector data operations +│ ├── types.ts # Type definitions +│ ├── errors.ts # Error handling +│ ├── fetch.ts # Fetch utilities +│ ├── helpers.ts # Helper functions +│ └── constants.ts # Constants +├── package.json # Package configuration +├── tsconfig.json # Root TypeScript config +├── tsconfig.lib.json # Library build config +├── tsconfig.spec.json # Test config +├── jest.config.ts # Jest configuration +└── README.md # This file (contributor guide) ``` -### Vector Operations +## Development -#### Insert/Update Vectors (Upsert) +### Building -```typescript -const index = client.bucket('my-bucket').index('my-index') - -await index.putVectors({ - vectors: [ - { - key: 'unique-id-1', - data: { - float32: [ - /* 1536 numbers */ - ], - }, - metadata: { - title: 'Document Title', - category: 'technical', - page: 1, - }, - }, - // ... up to 500 vectors per request - ], -}) -``` - -**Limitations:** - -- 1-500 vectors per request -- Vectors must match index dimension -- Keys must be unique within index - -#### Get Vectors by Key - -```typescript -const { data, error } = await index.getVectors({ - keys: ['doc-1', 'doc-2', 'doc-3'], - returnData: true, // Include embeddings (requires permission) - returnMetadata: true, // Include metadata (requires permission) -}) - -data?.vectors.forEach((v) => { - console.log(v.key, v.metadata) -}) -``` - -#### Query Similar Vectors (ANN Search) - -```typescript -const { data, error } = await index.queryVectors({ - queryVector: { - float32: [ - /* 1536 numbers */ - ], - }, - topK: 10, - filter: { - category: 'technical', - published: true, - }, - returnDistance: true, - returnMetadata: true, -}) - -// Results ordered by similarity -data?.matches.forEach((match) => { - console.log(`${match.key}: distance=${match.distance}`) -}) -``` - -**Filter Syntax:** -The `filter` parameter accepts arbitrary JSON for metadata filtering. Non-filterable keys (configured at index creation) cannot be used in filters but can still be returned. - -#### List/Scan Vectors +```bash +# From monorepo root +npx nx build storage-vectors-js -```typescript -// Simple pagination -let nextToken: string | undefined -do { - const { data } = await index.listVectors({ - maxResults: 500, - nextToken, - returnMetadata: true, - }) - - console.log('Batch:', data?.vectors.length) - nextToken = data?.nextToken -} while (nextToken) - -// Parallel scanning (4 workers) -const workers = [0, 1, 2, 3].map(async (segmentIndex) => { - const { data } = await index.listVectors({ - segmentCount: 4, - segmentIndex, - returnMetadata: true, - }) - return data?.vectors || [] -}) - -const results = await Promise.all(workers) -const allVectors = results.flat() +# Watch mode for development +npx nx build storage-vectors-js --watch ``` -**Limitations:** +### Testing -- `maxResults`: 1-1000 (default: 500) -- `segmentCount`: 1-16 -- Response may be limited by 1MB size +```bash +# Run tests +npx nx test storage-vectors-js -#### Delete Vectors +# Watch mode +npx nx test storage-vectors-js --watch -```typescript -await index.deleteVectors({ - keys: ['doc-1', 'doc-2', 'doc-3'], - // ... up to 500 keys per request -}) +# With coverage +npx nx test storage-vectors-js --coverage ``` -## Error Handling - -The library uses a consistent error handling pattern: +### Integration with Storage-JS -```typescript -const { data, error } = await client.createVectorBucket('my-bucket') +When developing features that affect storage-js: -if (error) { - console.error('Error:', error.message) - console.error('Status:', error.status) - console.error('Code:', error.statusCode) -} -``` +1. Make changes in `storage-vectors-js/src/` +2. Build `storage-vectors-js`: `npx nx build storage-vectors-js` +3. `storage-js` automatically picks up the changes via TypeScript project references +4. Test in `storage-js`: `npx nx test storage-js` +5. Run affected tests: `npx nx affected --target=test` -### Error Codes +### Adding New Exports -| Code | HTTP | Description | -| ---------------------------- | ---- | ----------------------- | -| `InternalError` | 500 | Internal server error | -| `S3VectorConflictException` | 409 | Resource already exists | -| `S3VectorNotFoundException` | 404 | Resource not found | -| `S3VectorBucketNotEmpty` | 400 | Bucket contains indexes | -| `S3VectorMaxBucketsExceeded` | 400 | Bucket quota exceeded | -| `S3VectorMaxIndexesExceeded` | 400 | Index quota exceeded | +When adding new types, classes, or functions: -### Throwing Errors +1. Add them to the appropriate file in `src/lib/` +2. Export them from `src/index.ts` +3. They will automatically be available in `@supabase/storage-js` via the re-export -You can configure the client to throw errors instead: +Example: ```typescript -const client = new StorageVectorsClient(url, options) -client.throwOnError() - -try { - const { data } = await client.createVectorBucket('my-bucket') - // data is guaranteed to be present -} catch (error) { - if (error instanceof StorageVectorsApiError) { - console.error('API Error:', error.statusCode) - } +// In src/lib/new-feature.ts +export class NewVectorFeature { + // Implementation } -``` - -## Advanced Usage - -### Scoped Clients - -Create scoped clients for cleaner code: -```typescript -// Bucket-scoped operations -const bucket = client.bucket('embeddings-prod') -await bucket.createIndex({ - /* ... */ -}) -await bucket.listIndexes() - -// Index-scoped operations -const index = bucket.index('documents-openai') -await index.putVectors({ - /* ... */ -}) -await index.queryVectors({ - /* ... */ -}) -``` - -### Custom Fetch +// In src/index.ts +export { NewVectorFeature } from './lib/new-feature' -Provide a custom fetch implementation: - -```typescript -import { StorageVectorsClient } from '@supabase/storage-js' - -const client = new StorageVectorsClient(url, { - fetch: customFetch, - headers: { - /* ... */ - }, -}) +// Now automatically available in storage-js +import { NewVectorFeature } from '@supabase/storage-js' ``` -### Batch Processing +## Key Exports -Process large datasets in batches: +### Client Classes -```typescript -async function insertLargeDataset(vectors: VectorObject[]) { - const batchSize = 500 - - for (let i = 0; i < vectors.length; i += batchSize) { - const batch = vectors.slice(i, i + batchSize) - await index.putVectors({ vectors: batch }) - console.log(`Inserted ${i + batch.length}/${vectors.length}`) - } -} -``` +- `StorageVectorsClient` - Main client for vector operations +- `VectorBucketScope` - Scoped operations for a bucket +- `VectorIndexScope` - Scoped operations for an index +- `VectorBucketApi` - Bucket management methods +- `VectorIndexApi` - Index management methods +- `VectorDataApi` - Vector data operations -### Float32 Validation +### Types -Ensure vectors are properly normalized to float32: +- `VectorBucket` - Vector bucket metadata +- `VectorIndex` - Vector index configuration +- `VectorData` - Vector data representation +- `VectorObject` - Individual vector with metadata +- `VectorMatch` - Query result with similarity score +- `VectorMetadata` - Arbitrary JSON metadata +- `DistanceMetric` - 'cosine' | 'euclidean' | 'dotproduct' -```typescript -import { normalizeToFloat32 } from '@supabase/storage-vectors-js' - -const vector = normalizeToFloat32([0.1, 0.2, 0.3 /* ... */]) -``` +### Error Handling -## Type Definitions +- `StorageVectorsError` - Base error class +- `StorageVectorsApiError` - API-specific errors +- `isStorageVectorsError()` - Type guard function -The library exports comprehensive TypeScript types: +### Utilities -```typescript -import type { - VectorBucket, - VectorIndex, - VectorData, - VectorObject, - VectorMatch, - VectorMetadata, - DistanceMetric, - ApiResponse, - StorageVectorsError, -} from '@supabase/storage-vectors-js' -``` +- `resolveFetch()` - Fetch implementation resolver +- `normalizeToFloat32()` - Vector normalization helper +- `validateVectorDimension()` - Dimension validation -## Requirements +## Related Documentation -- Node.js 14+ or modern browser with fetch support -- TypeScript 4.5+ (for type checking) +- **User Documentation**: [storage-js README](../../core/storage-js/README.md#vector-embeddings) +- **Contributing Guide**: [CONTRIBUTING.md](../../../CONTRIBUTING.md) +- **Testing Guide**: [TESTING.md](../../../docs/TESTING.md) +- **Monorepo Architecture**: [CLAUDE.md](../../../CLAUDE.md) From 4a06991a346c3c07c990b991e99c2d932bba0baf Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 11:56:46 +0300 Subject: [PATCH 28/36] chore(storage): docs and deno test setup --- packages/core/storage-js/README.md | 41 ++++++++++++++++--- .../storage-vectors-js/package.json | 16 +++++--- .../storage-vectors-js/project.json | 18 ++++++++ .../src/lib/StorageVectorsClient.ts | 21 ++++++++-- .../storage-vectors-js/tsconfig.lib.json | 2 +- .../storage-vectors-js/tsconfig.module.json | 7 ++++ 6 files changed, 90 insertions(+), 15 deletions(-) create mode 100644 packages/integrations/storage-vectors-js/project.json create mode 100644 packages/integrations/storage-vectors-js/tsconfig.module.json diff --git a/packages/core/storage-js/README.md b/packages/core/storage-js/README.md index 019bf428d..f974be1ff 100644 --- a/packages/core/storage-js/README.md +++ b/packages/core/storage-js/README.md @@ -202,19 +202,26 @@ Supabase Storage provides built-in support for storing and querying high-dimensi ### Quick Start +#### Using with StorageClient (Recommended) + +If you already have a `StorageClient` instance for regular file operations, access vector functionality through the `vectors` property: + ```typescript -import { StorageVectorsClient } from '@supabase/storage-js' +import { StorageClient } from '@supabase/storage-js' -// Initialize client -const vectorClient = new StorageVectorsClient('https://your-project.supabase.co/storage/v1', { - headers: { Authorization: 'Bearer YOUR_TOKEN' }, +const storageClient = new StorageClient('https://your-project.supabase.co/storage/v1', { + apikey: 'YOUR_API_KEY', + Authorization: 'Bearer YOUR_TOKEN', }) +// Access vector operations +const vectors = storageClient.vectors + // Create a vector bucket -await vectorClient.createVectorBucket('embeddings-prod') +await vectors.createVectorBucket('embeddings-prod') // Create an index -const bucket = vectorClient.from('embeddings-prod') +const bucket = vectors.from('embeddings-prod') await bucket.createIndex({ indexName: 'documents-openai', dataType: 'float32', @@ -250,6 +257,28 @@ if (data) { } ``` +#### Standalone Usage + +For vector-only applications that don't need regular file storage operations, you can create a dedicated vector client: + +```typescript +import { StorageVectorsClient } from '@supabase/storage-js' + +// Initialize standalone vector client +const vectorClient = new StorageVectorsClient('https://your-project.supabase.co/storage/v1', { + headers: { Authorization: 'Bearer YOUR_TOKEN' }, +}) + +// Use the same API as shown above +await vectorClient.createVectorBucket('embeddings-prod') +const bucket = vectorClient.from('embeddings-prod') +// ... rest of operations +``` + +> **When to use each pattern:** +> - Use `storageClient.vectors` when working with both files and vectors in the same application +> - Use `new StorageVectorsClient()` for applications that only need vector operations without file storage + ### API Reference #### Client Initialization diff --git a/packages/integrations/storage-vectors-js/package.json b/packages/integrations/storage-vectors-js/package.json index c733fdcee..121244cb2 100644 --- a/packages/integrations/storage-vectors-js/package.json +++ b/packages/integrations/storage-vectors-js/package.json @@ -2,15 +2,17 @@ "name": "@supabase/storage-vectors-js", "version": "0.0.0", "private": true, - "main": "./dist/index.js", - "types": "./dist/index.d.ts", + "main": "./dist/main/index.js", + "module": "./dist/module/index.js", + "types": "./dist/module/index.d.ts", "exports": { "./package.json": "./package.json", ".": { "@supabase-js/source": "./src/index.ts", - "types": "./dist/index.d.ts", - "require": "./dist/index.js", - "default": "./dist/index.js" + "types": "./dist/module/index.d.ts", + "require": "./dist/main/index.js", + "import": "./dist/module/index.js", + "default": "./dist/module/index.js" } }, "files": [ @@ -18,6 +20,10 @@ "!**/*.tsbuildinfo" ], "scripts": { + "clean": "rimraf dist", + "build": "npm run clean && npm run build:main && npm run build:module", + "build:main": "tsc -p tsconfig.lib.json", + "build:module": "tsc -p tsconfig.module.json", "test": "jest", "test:watch": "jest --watch", "test:coverage": "jest --coverage", diff --git a/packages/integrations/storage-vectors-js/project.json b/packages/integrations/storage-vectors-js/project.json new file mode 100644 index 000000000..3d7258673 --- /dev/null +++ b/packages/integrations/storage-vectors-js/project.json @@ -0,0 +1,18 @@ +{ + "name": "storage-vectors-js", + "$schema": "../../../node_modules/nx/schemas/project-schema.json", + "sourceRoot": "packages/integrations/storage-vectors-js/src", + "projectType": "library", + "targets": { + "build": { + "executor": "nx:run-script", + "options": { + "script": "build" + }, + "dependsOn": ["^build"], + "inputs": ["production", "^production"], + "outputs": ["{projectRoot}/dist"], + "cache": true + } + } +} diff --git a/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts b/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts index 4f738286e..060f310c1 100644 --- a/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts +++ b/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts @@ -30,9 +30,24 @@ export interface StorageVectorsClientOptions { * Main client for interacting with S3 Vectors API * Provides access to bucket, index, and vector data operations * - * @example + * **Usage Patterns:** + * + * 1. **Via StorageClient (recommended for most use cases):** + * ```typescript + * import { StorageClient } from '@supabase/storage-js' + * + * const storageClient = new StorageClient(url, headers) + * const vectors = storageClient.vectors + * + * // Use vector operations + * await vectors.createVectorBucket('embeddings-prod') + * const bucket = vectors.from('embeddings-prod') + * await bucket.createIndex({ ... }) + * ``` + * + * 2. **Standalone (for vector-only applications):** * ```typescript - * import { StorageVectorsClient } from '@supabase/storage' + * import { StorageVectorsClient } from '@supabase/storage-js' * * const client = new StorageVectorsClient('https://api.example.com', { * headers: { 'Authorization': 'Bearer token' } @@ -42,7 +57,7 @@ export interface StorageVectorsClientOptions { * await client.createVectorBucket('embeddings-prod') * * // Access index operations via buckets - * const bucket = client.bucket('embeddings-prod') + * const bucket = client.from('embeddings-prod') * await bucket.createIndex({ * indexName: 'documents', * dataType: 'float32', diff --git a/packages/integrations/storage-vectors-js/tsconfig.lib.json b/packages/integrations/storage-vectors-js/tsconfig.lib.json index 0be08ac4f..607f647fe 100644 --- a/packages/integrations/storage-vectors-js/tsconfig.lib.json +++ b/packages/integrations/storage-vectors-js/tsconfig.lib.json @@ -3,7 +3,7 @@ "compilerOptions": { "baseUrl": ".", "rootDir": "src", - "outDir": "dist", + "outDir": "dist/main", "target": "ES2017", "module": "CommonJS", "moduleResolution": "Node", diff --git a/packages/integrations/storage-vectors-js/tsconfig.module.json b/packages/integrations/storage-vectors-js/tsconfig.module.json new file mode 100644 index 000000000..0755dd2ed --- /dev/null +++ b/packages/integrations/storage-vectors-js/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.lib.json", + "compilerOptions": { + "module": "ES2020", + "outDir": "dist/module" + } +} From 62591a2b695645d662a31023b898e7c29816f24c Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 12:16:06 +0300 Subject: [PATCH 29/36] chore(ci): deno --- packages/core/supabase-js/test/deno/setup-deps.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/core/supabase-js/test/deno/setup-deps.js b/packages/core/supabase-js/test/deno/setup-deps.js index 1a6460c01..e9e4b533e 100755 --- a/packages/core/supabase-js/test/deno/setup-deps.js +++ b/packages/core/supabase-js/test/deno/setup-deps.js @@ -46,7 +46,7 @@ try { } // Paths to workspace packages (not published to npm or need local builds) -const storageVectorsPath = path.join(monorepoRoot, 'packages/integrations/storage-vectors-js/dist/index.js') +const storageVectorsPath = path.join(monorepoRoot, 'packages/integrations/storage-vectors-js/dist/module/index.js') const storageVectorsUrl = `file://${storageVectorsPath}` const storageJsPath = path.join(monorepoRoot, 'packages/core/storage-js/dist/module/index.js') From 19ffb8f2230e5844895578240eabe76a12e24f26 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 13:01:19 +0300 Subject: [PATCH 30/36] fix(storage): move vector inside storage-js --- deno.lock | 9 - package-lock.json | 32 ---- packages/core/storage-js/README.md | 2 + packages/core/storage-js/package.json | 15 -- packages/core/storage-js/src/StorageClient.ts | 2 +- packages/core/storage-js/src/index.ts | 2 +- packages/core/storage-js/src/lib/constants.ts | 5 +- .../src/lib/vectors}/StorageVectorsClient.ts | 0 .../src/lib/vectors}/VectorBucketApi.ts | 2 +- .../src/lib/vectors}/VectorDataApi.ts | 2 +- .../src/lib/vectors}/VectorIndexApi.ts | 2 +- .../storage-js/src/lib/vectors}/errors.ts | 0 .../storage-js/src/lib/vectors}/fetch.ts | 0 .../storage-js/src/lib/vectors}/helpers.ts | 0 .../storage-js/src/lib/vectors}/index.ts | 27 ++- .../storage-js/src/lib/vectors}/types.ts | 0 .../storage-js/test}/bucket-api.spec.ts | 0 .../storage-js/test}/e2e-workflow.spec.ts | 0 .../storage-js/test}/helpers.ts | 2 +- .../storage-js/test}/index-api.spec.ts | 0 .../storage-js/test}/mock-server.ts | 2 +- .../storage-js/test}/setup.ts | 0 .../storage-js/test}/vector-data-api.spec.ts | 0 packages/core/storage-js/tsconfig.json | 6 +- packages/core/storage-js/tsconfig.lib.json | 6 +- packages/core/supabase-js/package.json | 12 -- .../core/supabase-js/test/deno/setup-deps.js | 24 +-- .../storage-vectors-js/.eslintrc.json | 30 ---- .../storage-vectors-js/.spec.swcrc | 22 --- .../integrations/storage-vectors-js/README.md | 169 ------------------ .../storage-vectors-js/jest.config.ts | 20 --- .../storage-vectors-js/package.json | 43 ----- .../storage-vectors-js/project.json | 18 -- .../storage-vectors-js/src/lib/constants.ts | 8 - .../storage-vectors-js/tsconfig.json | 13 -- .../storage-vectors-js/tsconfig.lib.json | 22 --- .../storage-vectors-js/tsconfig.module.json | 7 - .../storage-vectors-js/tsconfig.spec.json | 30 ---- tsconfig.json | 3 - 39 files changed, 26 insertions(+), 511 deletions(-) rename packages/{integrations/storage-vectors-js/src/lib => core/storage-js/src/lib/vectors}/StorageVectorsClient.ts (100%) rename packages/{integrations/storage-vectors-js/src/lib => core/storage-js/src/lib/vectors}/VectorBucketApi.ts (99%) rename packages/{integrations/storage-vectors-js/src/lib => core/storage-js/src/lib/vectors}/VectorDataApi.ts (99%) rename packages/{integrations/storage-vectors-js/src/lib => core/storage-js/src/lib/vectors}/VectorIndexApi.ts (99%) rename packages/{integrations/storage-vectors-js/src/lib => core/storage-js/src/lib/vectors}/errors.ts (100%) rename packages/{integrations/storage-vectors-js/src/lib => core/storage-js/src/lib/vectors}/fetch.ts (100%) rename packages/{integrations/storage-vectors-js/src/lib => core/storage-js/src/lib/vectors}/helpers.ts (100%) rename packages/{integrations/storage-vectors-js/src => core/storage-js/src/lib/vectors}/index.ts (61%) rename packages/{integrations/storage-vectors-js/src/lib => core/storage-js/src/lib/vectors}/types.ts (100%) rename packages/{integrations/storage-vectors-js/src/__tests__ => core/storage-js/test}/bucket-api.spec.ts (100%) rename packages/{integrations/storage-vectors-js/src/__tests__ => core/storage-js/test}/e2e-workflow.spec.ts (100%) rename packages/{integrations/storage-vectors-js/src/__tests__ => core/storage-js/test}/helpers.ts (98%) rename packages/{integrations/storage-vectors-js/src/__tests__ => core/storage-js/test}/index-api.spec.ts (100%) rename packages/{integrations/storage-vectors-js/src/__tests__ => core/storage-js/test}/mock-server.ts (99%) rename packages/{integrations/storage-vectors-js/src/__tests__ => core/storage-js/test}/setup.ts (100%) rename packages/{integrations/storage-vectors-js/src/__tests__ => core/storage-js/test}/vector-data-api.spec.ts (100%) delete mode 100644 packages/integrations/storage-vectors-js/.eslintrc.json delete mode 100644 packages/integrations/storage-vectors-js/.spec.swcrc delete mode 100644 packages/integrations/storage-vectors-js/README.md delete mode 100644 packages/integrations/storage-vectors-js/jest.config.ts delete mode 100644 packages/integrations/storage-vectors-js/package.json delete mode 100644 packages/integrations/storage-vectors-js/project.json delete mode 100644 packages/integrations/storage-vectors-js/src/lib/constants.ts delete mode 100644 packages/integrations/storage-vectors-js/tsconfig.json delete mode 100644 packages/integrations/storage-vectors-js/tsconfig.lib.json delete mode 100644 packages/integrations/storage-vectors-js/tsconfig.module.json delete mode 100644 packages/integrations/storage-vectors-js/tsconfig.spec.json diff --git a/deno.lock b/deno.lock index 47112df27..9c6d7c036 100644 --- a/deno.lock +++ b/deno.lock @@ -237,15 +237,6 @@ ] } }, - "packages/integrations/storage-vectors-js": { - "packageJson": { - "dependencies": [ - "npm:@supabase/node-fetch@^2.6.13", - "npm:@types/node@^24.7.2", - "npm:tslib@^2.8.1" - ] - } - } } } } diff --git a/package-lock.json b/package-lock.json index 620d9c0aa..5ebb6815e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6576,10 +6576,6 @@ "resolved": "packages/core/storage-js", "link": true }, - "node_modules/@supabase/storage-vectors-js": { - "resolved": "packages/integrations/storage-vectors-js", - "link": true - }, "node_modules/@supabase/supabase-js": { "resolved": "packages/core/supabase-js", "link": true @@ -37214,34 +37210,6 @@ "version": "4.0.0", "dev": true, "license": "ISC" - }, - "packages/integrations/storage-vectors-js": { - "name": "@supabase/storage-vectors-js", - "version": "0.0.0", - "dependencies": { - "@supabase/node-fetch": "^2.6.13" - }, - "devDependencies": { - "@types/node": "^24.7.2", - "tslib": "^2.8.1" - } - }, - "packages/integrations/storage-vectors-js/node_modules/@types/node": { - "version": "24.7.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.7.2.tgz", - "integrity": "sha512-/NbVmcGTP+lj5oa4yiYxxeBjRivKQ5Ns1eSZeB99ExsEQ6rX5XYU1Zy/gGxY/ilqtD4Etx9mKyrPxZRetiahhA==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~7.14.0" - } - }, - "packages/integrations/storage-vectors-js/node_modules/undici-types": { - "version": "7.14.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.14.0.tgz", - "integrity": "sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA==", - "dev": true, - "license": "MIT" } } } diff --git a/packages/core/storage-js/README.md b/packages/core/storage-js/README.md index f974be1ff..a144a3982 100644 --- a/packages/core/storage-js/README.md +++ b/packages/core/storage-js/README.md @@ -27,6 +27,7 @@ [![Package](https://img.shields.io/npm/v/@supabase/storage-js)](https://www.npmjs.com/package/@supabase/storage-js) [![License: MIT](https://img.shields.io/npm/l/@supabase/supabase-js)](#license) [![pkg.pr.new](https://pkg.pr.new/badge/supabase/storage-js)](https://pkg.pr.new/~/supabase/storage-js) + ## Features @@ -276,6 +277,7 @@ const bucket = vectorClient.from('embeddings-prod') ``` > **When to use each pattern:** +> > - Use `storageClient.vectors` when working with both files and vectors in the same application > - Use `new StorageVectorsClient()` for applications that only need vector operations without file storage diff --git a/packages/core/storage-js/package.json b/packages/core/storage-js/package.json index 3753f6fe1..40c3c3073 100644 --- a/packages/core/storage-js/package.json +++ b/packages/core/storage-js/package.json @@ -59,20 +59,5 @@ "unpkg": "dist/umd/supabase.js", "publishConfig": { "access": "public" - }, - "nx": { - "targets": { - "build:module": { - "dependsOn": [ - "^build:module", - { - "projects": [ - "storage-vectors-js" - ], - "target": "build" - } - ] - } - } } } diff --git a/packages/core/storage-js/src/StorageClient.ts b/packages/core/storage-js/src/StorageClient.ts index 2127187b8..eb54528fa 100644 --- a/packages/core/storage-js/src/StorageClient.ts +++ b/packages/core/storage-js/src/StorageClient.ts @@ -2,7 +2,7 @@ import StorageFileApi from './packages/StorageFileApi' import StorageBucketApi from './packages/StorageBucketApi' import StorageAnalyticsApi from './packages/StorageAnalyticsApi' import { Fetch } from './lib/fetch' -import { StorageVectorsClient } from '@supabase/storage-vectors-js' +import { StorageVectorsClient } from './lib/vectors' export interface StorageClientOptions { useNewHostname?: boolean diff --git a/packages/core/storage-js/src/index.ts b/packages/core/storage-js/src/index.ts index b6b86b6b7..15794f0e4 100644 --- a/packages/core/storage-js/src/index.ts +++ b/packages/core/storage-js/src/index.ts @@ -3,4 +3,4 @@ export type { StorageClientOptions } from './StorageClient' export { default as StorageAnalyticsApi } from './packages/StorageAnalyticsApi' export * from './lib/types' export * from './lib/errors' -export * from '@supabase/storage-vectors-js' +export * from './lib/vectors' diff --git a/packages/core/storage-js/src/lib/constants.ts b/packages/core/storage-js/src/lib/constants.ts index dc81a349c..2cc479ce3 100644 --- a/packages/core/storage-js/src/lib/constants.ts +++ b/packages/core/storage-js/src/lib/constants.ts @@ -1,2 +1,5 @@ import { version } from './version' -export const DEFAULT_HEADERS = { 'X-Client-Info': `storage-js/${version}` } +export const DEFAULT_HEADERS = { + 'X-Client-Info': `storage-js/${version}`, + 'Content-Type': 'application/json', +} diff --git a/packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts b/packages/core/storage-js/src/lib/vectors/StorageVectorsClient.ts similarity index 100% rename from packages/integrations/storage-vectors-js/src/lib/StorageVectorsClient.ts rename to packages/core/storage-js/src/lib/vectors/StorageVectorsClient.ts diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts b/packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts similarity index 99% rename from packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts rename to packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts index 4700c7ddc..cd5665bad 100644 --- a/packages/integrations/storage-vectors-js/src/lib/VectorBucketApi.ts +++ b/packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts @@ -1,4 +1,4 @@ -import { DEFAULT_HEADERS } from './constants' +import { DEFAULT_HEADERS } from '../constants' import { isStorageVectorsError } from './errors' import { Fetch, post } from './fetch' import { resolveFetch } from './helpers' diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts b/packages/core/storage-js/src/lib/vectors/VectorDataApi.ts similarity index 99% rename from packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts rename to packages/core/storage-js/src/lib/vectors/VectorDataApi.ts index 99d6f33e3..fd2cac8f9 100644 --- a/packages/integrations/storage-vectors-js/src/lib/VectorDataApi.ts +++ b/packages/core/storage-js/src/lib/vectors/VectorDataApi.ts @@ -1,4 +1,4 @@ -import { DEFAULT_HEADERS } from './constants' +import { DEFAULT_HEADERS } from '../constants' import { isStorageVectorsError } from './errors' import { Fetch, post } from './fetch' import { resolveFetch } from './helpers' diff --git a/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts b/packages/core/storage-js/src/lib/vectors/VectorIndexApi.ts similarity index 99% rename from packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts rename to packages/core/storage-js/src/lib/vectors/VectorIndexApi.ts index 8f9bca0f6..c8efe7cf9 100644 --- a/packages/integrations/storage-vectors-js/src/lib/VectorIndexApi.ts +++ b/packages/core/storage-js/src/lib/vectors/VectorIndexApi.ts @@ -1,4 +1,4 @@ -import { DEFAULT_HEADERS } from './constants' +import { DEFAULT_HEADERS } from '../constants' import { isStorageVectorsError } from './errors' import { Fetch, post } from './fetch' import { resolveFetch } from './helpers' diff --git a/packages/integrations/storage-vectors-js/src/lib/errors.ts b/packages/core/storage-js/src/lib/vectors/errors.ts similarity index 100% rename from packages/integrations/storage-vectors-js/src/lib/errors.ts rename to packages/core/storage-js/src/lib/vectors/errors.ts diff --git a/packages/integrations/storage-vectors-js/src/lib/fetch.ts b/packages/core/storage-js/src/lib/vectors/fetch.ts similarity index 100% rename from packages/integrations/storage-vectors-js/src/lib/fetch.ts rename to packages/core/storage-js/src/lib/vectors/fetch.ts diff --git a/packages/integrations/storage-vectors-js/src/lib/helpers.ts b/packages/core/storage-js/src/lib/vectors/helpers.ts similarity index 100% rename from packages/integrations/storage-vectors-js/src/lib/helpers.ts rename to packages/core/storage-js/src/lib/vectors/helpers.ts diff --git a/packages/integrations/storage-vectors-js/src/index.ts b/packages/core/storage-js/src/lib/vectors/index.ts similarity index 61% rename from packages/integrations/storage-vectors-js/src/index.ts rename to packages/core/storage-js/src/lib/vectors/index.ts index e5ac2a4fe..c6d301e29 100644 --- a/packages/integrations/storage-vectors-js/src/index.ts +++ b/packages/core/storage-js/src/lib/vectors/index.ts @@ -1,16 +1,12 @@ // Main client -export { - StorageVectorsClient, - VectorBucketScope, - VectorIndexScope, -} from './lib/StorageVectorsClient' -export type { StorageVectorsClientOptions } from './lib/StorageVectorsClient' +export { StorageVectorsClient, VectorBucketScope, VectorIndexScope } from './StorageVectorsClient' +export type { StorageVectorsClientOptions } from './StorageVectorsClient' // API classes (for advanced usage) -export { default as VectorBucketApi } from './lib/VectorBucketApi' -export { default as VectorIndexApi } from './lib/VectorIndexApi' -export { default as VectorDataApi } from './lib/VectorDataApi' -export type { CreateIndexOptions } from './lib/VectorIndexApi' +export { default as VectorBucketApi } from './VectorBucketApi' +export { default as VectorIndexApi } from './VectorIndexApi' +export { default as VectorDataApi } from './VectorDataApi' +export type { CreateIndexOptions } from './VectorIndexApi' // Types export type { @@ -46,7 +42,7 @@ export type { SuccessResponse, ErrorResponse, VectorFetchParameters, -} from './lib/types' +} from './types' // Errors export { @@ -55,10 +51,10 @@ export { StorageVectorsUnknownError, StorageVectorsErrorCode, isStorageVectorsError, -} from './lib/errors' +} from './errors' // Fetch utilities (for custom implementations) -export type { Fetch, FetchOptions, RequestMethodType } from './lib/fetch' +export type { Fetch, FetchOptions, RequestMethodType } from './fetch' // Helper utilities export { @@ -67,7 +63,4 @@ export { isPlainObject, normalizeToFloat32, validateVectorDimension, -} from './lib/helpers' - -// Constants -export { DEFAULT_HEADERS } from './lib/constants' +} from './helpers' diff --git a/packages/integrations/storage-vectors-js/src/lib/types.ts b/packages/core/storage-js/src/lib/vectors/types.ts similarity index 100% rename from packages/integrations/storage-vectors-js/src/lib/types.ts rename to packages/core/storage-js/src/lib/vectors/types.ts diff --git a/packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts b/packages/core/storage-js/test/bucket-api.spec.ts similarity index 100% rename from packages/integrations/storage-vectors-js/src/__tests__/bucket-api.spec.ts rename to packages/core/storage-js/test/bucket-api.spec.ts diff --git a/packages/integrations/storage-vectors-js/src/__tests__/e2e-workflow.spec.ts b/packages/core/storage-js/test/e2e-workflow.spec.ts similarity index 100% rename from packages/integrations/storage-vectors-js/src/__tests__/e2e-workflow.spec.ts rename to packages/core/storage-js/test/e2e-workflow.spec.ts diff --git a/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts b/packages/core/storage-js/test/helpers.ts similarity index 98% rename from packages/integrations/storage-vectors-js/src/__tests__/helpers.ts rename to packages/core/storage-js/test/helpers.ts index 22b96b081..901e9c94a 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/helpers.ts +++ b/packages/core/storage-js/test/helpers.ts @@ -4,7 +4,7 @@ /// -import { StorageVectorsClient } from '../lib/StorageVectorsClient' +import { StorageVectorsClient } from '../src/lib/vectors' import { createMockFetch, resetMockStorage } from './mock-server' import { getTestConfig } from './setup' diff --git a/packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts b/packages/core/storage-js/test/index-api.spec.ts similarity index 100% rename from packages/integrations/storage-vectors-js/src/__tests__/index-api.spec.ts rename to packages/core/storage-js/test/index-api.spec.ts diff --git a/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts b/packages/core/storage-js/test/mock-server.ts similarity index 99% rename from packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts rename to packages/core/storage-js/test/mock-server.ts index 4c1f23c48..89a78a2a8 100644 --- a/packages/integrations/storage-vectors-js/src/__tests__/mock-server.ts +++ b/packages/core/storage-js/test/mock-server.ts @@ -6,7 +6,7 @@ /// import { testData } from './setup' -import type { Fetch } from '../lib/fetch' +import type { Fetch } from '../src/lib/vectors' interface MockResponse { status: number diff --git a/packages/integrations/storage-vectors-js/src/__tests__/setup.ts b/packages/core/storage-js/test/setup.ts similarity index 100% rename from packages/integrations/storage-vectors-js/src/__tests__/setup.ts rename to packages/core/storage-js/test/setup.ts diff --git a/packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts b/packages/core/storage-js/test/vector-data-api.spec.ts similarity index 100% rename from packages/integrations/storage-vectors-js/src/__tests__/vector-data-api.spec.ts rename to packages/core/storage-js/test/vector-data-api.spec.ts diff --git a/packages/core/storage-js/tsconfig.json b/packages/core/storage-js/tsconfig.json index 348331e6a..97c0f7abd 100644 --- a/packages/core/storage-js/tsconfig.json +++ b/packages/core/storage-js/tsconfig.json @@ -23,9 +23,5 @@ "noImplicitOverride": false, "isolatedModules": false }, - "references": [ - { - "path": "../../integrations/storage-vectors-js" - } - ] + "references": [] } diff --git a/packages/core/storage-js/tsconfig.lib.json b/packages/core/storage-js/tsconfig.lib.json index d2201f3eb..8bce1b756 100644 --- a/packages/core/storage-js/tsconfig.lib.json +++ b/packages/core/storage-js/tsconfig.lib.json @@ -16,9 +16,5 @@ }, "include": ["src/**/*.ts"], "exclude": ["src/**/*.spec.ts", "src/**/*.test.ts"], - "references": [ - { - "path": "../../integrations/storage-vectors-js/tsconfig.lib.json" - } - ] + "references": [] } diff --git a/packages/core/supabase-js/package.json b/packages/core/supabase-js/package.json index 0fbdf18a8..cdf213ee5 100644 --- a/packages/core/supabase-js/package.json +++ b/packages/core/supabase-js/package.json @@ -78,12 +78,6 @@ "targets": { "test:integration:browser": { "dependsOn": [ - { - "projects": [ - "storage-vectors-js" - ], - "target": "build" - }, { "projects": [ "storage-js" @@ -94,12 +88,6 @@ }, "test:edge-functions": { "dependsOn": [ - { - "projects": [ - "storage-vectors-js" - ], - "target": "build" - }, { "projects": [ "storage-js" diff --git a/packages/core/supabase-js/test/deno/setup-deps.js b/packages/core/supabase-js/test/deno/setup-deps.js index e9e4b533e..acc0813ac 100755 --- a/packages/core/supabase-js/test/deno/setup-deps.js +++ b/packages/core/supabase-js/test/deno/setup-deps.js @@ -45,38 +45,16 @@ try { console.warn('Warning: Could not read existing deno.json, creating new one') } -// Paths to workspace packages (not published to npm or need local builds) -const storageVectorsPath = path.join(monorepoRoot, 'packages/integrations/storage-vectors-js/dist/module/index.js') -const storageVectorsUrl = `file://${storageVectorsPath}` - -const storageJsPath = path.join(monorepoRoot, 'packages/core/storage-js/dist/module/index.js') -const storageJsUrl = `file://${storageJsPath}` - -// Determine storage-js entry point based on environment variable -// For Deno 1.x (uses npm package): set STORAGE_JS_ENTRY=main -// For Deno 2.x (uses local build): don't set or set to 'module' -const useLocalStorageJs = process.env.STORAGE_JS_ENTRY !== 'main' -const storageJsImport = useLocalStorageJs - ? storageJsUrl - : process.env.STORAGE_JS_ENTRY === 'main' - ? `npm:@supabase/storage-js@${versions.storage}/dist/main/index.js` - : `npm:@supabase/storage-js@${versions.storage}/dist/module/index.js` - // Update imports in deno.json denoJson.imports = { '@supabase/realtime-js': `npm:@supabase/realtime-js@${versions.realtime}`, '@supabase/functions-js': `npm:@supabase/functions-js@${versions.functions}`, '@supabase/postgrest-js': `npm:@supabase/postgrest-js@${versions.postgrest}`, '@supabase/auth-js': `npm:@supabase/auth-js@${versions.auth}`, - '@supabase/storage-js': storageJsImport, + '@supabase/storage-js': `npm:@supabase/auth-js@${versions.storage}`, '@supabase/node-fetch': `npm:@supabase/node-fetch@${versions.node_fetch}`, } -// Only add storage-vectors-js when using local storage-js build -if (useLocalStorageJs) { - denoJson.imports['@supabase/storage-vectors-js'] = storageVectorsUrl -} - // Ensure Node types are available for Deno type-checking of .d.ts files denoJson.compilerOptions = denoJson.compilerOptions || {} denoJson.compilerOptions.types = Array.isArray(denoJson.compilerOptions.types) diff --git a/packages/integrations/storage-vectors-js/.eslintrc.json b/packages/integrations/storage-vectors-js/.eslintrc.json deleted file mode 100644 index 4f6cb6e9c..000000000 --- a/packages/integrations/storage-vectors-js/.eslintrc.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "extends": ["../../../.eslintrc.json"], - "ignorePatterns": ["!**/*", "**/out-tsc"], - "overrides": [ - { - "files": ["*.ts", "*.tsx", "*.js", "*.jsx"], - "rules": {} - }, - { - "files": ["*.ts", "*.tsx"], - "rules": {} - }, - { - "files": ["*.js", "*.jsx"], - "rules": {} - }, - { - "files": ["*.json"], - "parser": "jsonc-eslint-parser", - "rules": { - "@nx/dependency-checks": [ - "error", - { - "ignoredFiles": ["{projectRoot}/eslint.config.{js,cjs,mjs,ts,cts,mts}"] - } - ] - } - } - ] -} diff --git a/packages/integrations/storage-vectors-js/.spec.swcrc b/packages/integrations/storage-vectors-js/.spec.swcrc deleted file mode 100644 index 3b52a5376..000000000 --- a/packages/integrations/storage-vectors-js/.spec.swcrc +++ /dev/null @@ -1,22 +0,0 @@ -{ - "jsc": { - "target": "es2017", - "parser": { - "syntax": "typescript", - "decorators": true, - "dynamicImport": true - }, - "transform": { - "decoratorMetadata": true, - "legacyDecorator": true - }, - "keepClassNames": true, - "externalHelpers": true, - "loose": true - }, - "module": { - "type": "es6" - }, - "sourceMaps": true, - "exclude": [] -} diff --git a/packages/integrations/storage-vectors-js/README.md b/packages/integrations/storage-vectors-js/README.md deleted file mode 100644 index 2d3ad52f9..000000000 --- a/packages/integrations/storage-vectors-js/README.md +++ /dev/null @@ -1,169 +0,0 @@ -# @supabase/storage-vectors-js (Internal Package) - -> **⚠️ Note for Users:** This is an internal implementation package. For user-facing documentation on vector embeddings, see the [Vector Embeddings section in @supabase/storage-js](../../core/storage-js/README.md#vector-embeddings). - -## Overview - -`@supabase/storage-vectors-js` is a private, internal package within the Supabase JS monorepo that provides vector embeddings functionality for Supabase Storage. This package is **not published to npm separately** - instead, it is bundled with and re-exported through `@supabase/storage-js`. - -## Architecture - -### Integration Pattern - -The integration follows a **re-export pattern**: - -```typescript -// In packages/core/storage-js/src/index.ts -export * from '@supabase/storage-vectors-js' -``` - -This means: -- All exports from `storage-vectors-js` are transparently available through `storage-js` -- Users import from `@supabase/storage-js` and get everything, including vector types and classes -- The internal package structure is hidden from end users - -### Package Configuration - -**Build coordination in storage-js:** - -```json -{ - "nx": { - "targets": { - "build:module": { - "dependsOn": [ - "^build:module", - { - "projects": ["storage-vectors-js"], - "target": "build" - } - ] - } - } - } -} -``` - -This ensures that when storage-js is built, storage-vectors-js is built first. - -### Module Structure - -``` -packages/integrations/storage-vectors-js/ -├── src/ -│ ├── index.ts # Main export file -│ └── lib/ -│ ├── StorageVectorsClient.ts # Main client class -│ ├── VectorBucketApi.ts # Bucket operations -│ ├── VectorIndexApi.ts # Index operations -│ ├── VectorDataApi.ts # Vector data operations -│ ├── types.ts # Type definitions -│ ├── errors.ts # Error handling -│ ├── fetch.ts # Fetch utilities -│ ├── helpers.ts # Helper functions -│ └── constants.ts # Constants -├── package.json # Package configuration -├── tsconfig.json # Root TypeScript config -├── tsconfig.lib.json # Library build config -├── tsconfig.spec.json # Test config -├── jest.config.ts # Jest configuration -└── README.md # This file (contributor guide) -``` - -## Development - -### Building - -```bash -# From monorepo root -npx nx build storage-vectors-js - -# Watch mode for development -npx nx build storage-vectors-js --watch -``` - -### Testing - -```bash -# Run tests -npx nx test storage-vectors-js - -# Watch mode -npx nx test storage-vectors-js --watch - -# With coverage -npx nx test storage-vectors-js --coverage -``` - -### Integration with Storage-JS - -When developing features that affect storage-js: - -1. Make changes in `storage-vectors-js/src/` -2. Build `storage-vectors-js`: `npx nx build storage-vectors-js` -3. `storage-js` automatically picks up the changes via TypeScript project references -4. Test in `storage-js`: `npx nx test storage-js` -5. Run affected tests: `npx nx affected --target=test` - -### Adding New Exports - -When adding new types, classes, or functions: - -1. Add them to the appropriate file in `src/lib/` -2. Export them from `src/index.ts` -3. They will automatically be available in `@supabase/storage-js` via the re-export - -Example: - -```typescript -// In src/lib/new-feature.ts -export class NewVectorFeature { - // Implementation -} - -// In src/index.ts -export { NewVectorFeature } from './lib/new-feature' - -// Now automatically available in storage-js -import { NewVectorFeature } from '@supabase/storage-js' -``` - -## Key Exports - -### Client Classes - -- `StorageVectorsClient` - Main client for vector operations -- `VectorBucketScope` - Scoped operations for a bucket -- `VectorIndexScope` - Scoped operations for an index -- `VectorBucketApi` - Bucket management methods -- `VectorIndexApi` - Index management methods -- `VectorDataApi` - Vector data operations - -### Types - -- `VectorBucket` - Vector bucket metadata -- `VectorIndex` - Vector index configuration -- `VectorData` - Vector data representation -- `VectorObject` - Individual vector with metadata -- `VectorMatch` - Query result with similarity score -- `VectorMetadata` - Arbitrary JSON metadata -- `DistanceMetric` - 'cosine' | 'euclidean' | 'dotproduct' - -### Error Handling - -- `StorageVectorsError` - Base error class -- `StorageVectorsApiError` - API-specific errors -- `isStorageVectorsError()` - Type guard function - -### Utilities - -- `resolveFetch()` - Fetch implementation resolver -- `normalizeToFloat32()` - Vector normalization helper -- `validateVectorDimension()` - Dimension validation - -## Related Documentation - -- **User Documentation**: [storage-js README](../../core/storage-js/README.md#vector-embeddings) -- **Contributing Guide**: [CONTRIBUTING.md](../../../CONTRIBUTING.md) -- **Testing Guide**: [TESTING.md](../../../docs/TESTING.md) -- **Monorepo Architecture**: [CLAUDE.md](../../../CLAUDE.md) diff --git a/packages/integrations/storage-vectors-js/jest.config.ts b/packages/integrations/storage-vectors-js/jest.config.ts deleted file mode 100644 index fb15bbd1a..000000000 --- a/packages/integrations/storage-vectors-js/jest.config.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* eslint-disable */ -const { readFileSync } = require('fs') -const { join } = require('path') - -// Reading the SWC compilation config for the spec files -const swcJestConfig = JSON.parse(readFileSync(join(__dirname, '.spec.swcrc'), 'utf-8')) - -// Disable .swcrc look-up by SWC core because we're passing in swcJestConfig ourselves -swcJestConfig.swcrc = false - -module.exports = { - displayName: 'storage-vectors-js', - preset: '../../../jest.preset.js', - testEnvironment: 'node', - transform: { - '^.+\\.[tj]s$': ['@swc/jest', swcJestConfig], - }, - moduleFileExtensions: ['ts', 'js', 'html'], - coverageDirectory: 'test-output/jest/coverage', -} diff --git a/packages/integrations/storage-vectors-js/package.json b/packages/integrations/storage-vectors-js/package.json deleted file mode 100644 index 121244cb2..000000000 --- a/packages/integrations/storage-vectors-js/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "@supabase/storage-vectors-js", - "version": "0.0.0", - "private": true, - "main": "./dist/main/index.js", - "module": "./dist/module/index.js", - "types": "./dist/module/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "@supabase-js/source": "./src/index.ts", - "types": "./dist/module/index.d.ts", - "require": "./dist/main/index.js", - "import": "./dist/module/index.js", - "default": "./dist/module/index.js" - } - }, - "files": [ - "dist", - "!**/*.tsbuildinfo" - ], - "scripts": { - "clean": "rimraf dist", - "build": "npm run clean && npm run build:main && npm run build:module", - "build:main": "tsc -p tsconfig.lib.json", - "build:module": "tsc -p tsconfig.module.json", - "test": "jest", - "test:watch": "jest --watch", - "test:coverage": "jest --coverage", - "test:real": "USE_MOCK_SERVER=false jest", - "test:mock": "USE_MOCK_SERVER=true jest" - }, - "nx": { - "name": "storage-vectors-js" - }, - "dependencies": { - "@supabase/node-fetch": "^2.6.13" - }, - "devDependencies": { - "@types/node": "^24.7.2", - "tslib": "^2.8.1" - } -} diff --git a/packages/integrations/storage-vectors-js/project.json b/packages/integrations/storage-vectors-js/project.json deleted file mode 100644 index 3d7258673..000000000 --- a/packages/integrations/storage-vectors-js/project.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "storage-vectors-js", - "$schema": "../../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "packages/integrations/storage-vectors-js/src", - "projectType": "library", - "targets": { - "build": { - "executor": "nx:run-script", - "options": { - "script": "build" - }, - "dependsOn": ["^build"], - "inputs": ["production", "^production"], - "outputs": ["{projectRoot}/dist"], - "cache": true - } - } -} diff --git a/packages/integrations/storage-vectors-js/src/lib/constants.ts b/packages/integrations/storage-vectors-js/src/lib/constants.ts deleted file mode 100644 index a462277ab..000000000 --- a/packages/integrations/storage-vectors-js/src/lib/constants.ts +++ /dev/null @@ -1,8 +0,0 @@ -/** - * Default HTTP headers for all requests - * Includes client identification for analytics and debugging - */ -export const DEFAULT_HEADERS = { - 'X-Client-Info': `storage-vectors-js/0.0.1`, - 'Content-Type': 'application/json', -} diff --git a/packages/integrations/storage-vectors-js/tsconfig.json b/packages/integrations/storage-vectors-js/tsconfig.json deleted file mode 100644 index 667a3463d..000000000 --- a/packages/integrations/storage-vectors-js/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../../../tsconfig.base.json", - "files": [], - "include": [], - "references": [ - { - "path": "./tsconfig.lib.json" - }, - { - "path": "./tsconfig.spec.json" - } - ] -} diff --git a/packages/integrations/storage-vectors-js/tsconfig.lib.json b/packages/integrations/storage-vectors-js/tsconfig.lib.json deleted file mode 100644 index 607f647fe..000000000 --- a/packages/integrations/storage-vectors-js/tsconfig.lib.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "extends": "../../../tsconfig.base.json", - "compilerOptions": { - "baseUrl": ".", - "rootDir": "src", - "outDir": "dist/main", - "target": "ES2017", - "module": "CommonJS", - "moduleResolution": "Node", - "lib": ["ES2022", "dom"], - "tsBuildInfoFile": "dist/tsconfig.lib.tsbuildinfo", - "emitDeclarationOnly": false, - "forceConsistentCasingInFileNames": true, - "importHelpers": false, - "esModuleInterop": true, - "strict": true, - "types": ["node"] - }, - "include": ["src/**/*.ts"], - "references": [], - "exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts", "src/**/__tests__/**/*"] -} diff --git a/packages/integrations/storage-vectors-js/tsconfig.module.json b/packages/integrations/storage-vectors-js/tsconfig.module.json deleted file mode 100644 index 0755dd2ed..000000000 --- a/packages/integrations/storage-vectors-js/tsconfig.module.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "./tsconfig.lib.json", - "compilerOptions": { - "module": "ES2020", - "outDir": "dist/module" - } -} diff --git a/packages/integrations/storage-vectors-js/tsconfig.spec.json b/packages/integrations/storage-vectors-js/tsconfig.spec.json deleted file mode 100644 index 4ed97de50..000000000 --- a/packages/integrations/storage-vectors-js/tsconfig.spec.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "compilerOptions": { - "outDir": "./out-tsc/jest", - "types": ["jest", "node"], - "module": "commonjs", - "moduleResolution": "node", - "esModuleInterop": true, - "allowSyntheticDefaultImports": true, - "resolveJsonModule": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "composite": false, - "declaration": false, - "declarationMap": false, - "emitDeclarationOnly": false, - "noEmit": true, - "target": "es2022", - "lib": ["es2022"] - }, - "include": [ - "jest.config.ts", - "jest.config.cjs", - "src/**/*.test.ts", - "src/**/*.spec.ts", - "src/**/*.d.ts", - "src/__tests__/**/*.ts" - ], - "exclude": ["node_modules", "dist"] -} diff --git a/tsconfig.json b/tsconfig.json index c7b31066f..69433af3e 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -17,9 +17,6 @@ }, { "path": "./packages/core/auth-js" - }, - { - "path": "./packages/integrations/storage-vectors-js" } ] } From a3a7700c7a2e1c34aee71ae6040ff872168e6d67 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 13:07:42 +0300 Subject: [PATCH 31/36] chore(ci): restore settings --- package-lock.json | 3 +-- package.json | 3 +-- packages/core/supabase-js/test/deno/setup-deps.js | 8 +++++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/package-lock.json b/package-lock.json index 5ebb6815e..fd5c568a7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,8 +15,7 @@ "packages/core/postgrest-js", "packages/core/realtime-js", "packages/core/storage-js", - "packages/core/supabase-js", - "packages/integrations/*" + "packages/core/supabase-js" ], "devDependencies": { "@commitlint/cli": "^19.8.1", diff --git a/package.json b/package.json index fea23590c..ffa5d9d97 100644 --- a/package.json +++ b/package.json @@ -75,8 +75,7 @@ "packages/core/postgrest-js", "packages/core/realtime-js", "packages/core/storage-js", - "packages/core/supabase-js", - "packages/integrations/*" + "packages/core/supabase-js" ], "nx": { "includedScripts": [], diff --git a/packages/core/supabase-js/test/deno/setup-deps.js b/packages/core/supabase-js/test/deno/setup-deps.js index acc0813ac..d8f1a436b 100755 --- a/packages/core/supabase-js/test/deno/setup-deps.js +++ b/packages/core/supabase-js/test/deno/setup-deps.js @@ -6,7 +6,6 @@ const path = require('node:path') // Get the directory of the script const scriptDir = __dirname const projectRoot = path.dirname(path.dirname(scriptDir)) -const monorepoRoot = path.dirname(path.dirname(path.dirname(path.dirname(path.dirname(scriptDir))))) // Read package.json from main project const packageJsonPath = path.join(projectRoot, 'package.json') @@ -51,7 +50,10 @@ denoJson.imports = { '@supabase/functions-js': `npm:@supabase/functions-js@${versions.functions}`, '@supabase/postgrest-js': `npm:@supabase/postgrest-js@${versions.postgrest}`, '@supabase/auth-js': `npm:@supabase/auth-js@${versions.auth}`, - '@supabase/storage-js': `npm:@supabase/auth-js@${versions.storage}`, + '@supabase/storage-js': + process.env.STORAGE_JS_ENTRY === 'main' + ? `npm:@supabase/storage-js@${versions.storage}/dist/main/index.js` + : `npm:@supabase/storage-js@${versions.storage}/dist/module/index.js`, '@supabase/node-fetch': `npm:@supabase/node-fetch@${versions.node_fetch}`, } @@ -65,4 +67,4 @@ denoJson.compilerOptions.types = Array.isArray(denoJson.compilerOptions.types) fs.writeFileSync(denoJsonPath, JSON.stringify(denoJson, null, 2) + '\n') console.log('Updated deno.json with versions from package.json') -console.log('Versions used:', versions) +console.log('Versions used:', versions) \ No newline at end of file From 20104622bac6f914622584202cc319185bc67f0c Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 16:07:27 +0300 Subject: [PATCH 32/36] chore(ci): deno lock --- deno.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deno.lock b/deno.lock index 9c6d7c036..6e42c9b5d 100644 --- a/deno.lock +++ b/deno.lock @@ -236,7 +236,7 @@ "npm:webpack@^5.69.1" ] } - }, + } } } } From ea92e50f68ed64a3651ab8e5e1d07373b4ba2044 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 16:21:06 +0300 Subject: [PATCH 33/36] chore(repo): update some readmes --- packages/core/auth-js/README.md | 8 +------- packages/core/postgrest-js/README.md | 8 +------- packages/core/storage-js/README.md | 8 +------- packages/core/supabase-js/README.md | 9 +++++---- 4 files changed, 8 insertions(+), 25 deletions(-) diff --git a/packages/core/auth-js/README.md b/packages/core/auth-js/README.md index 88d63419e..90c45de70 100644 --- a/packages/core/auth-js/README.md +++ b/packages/core/auth-js/README.md @@ -155,10 +155,4 @@ The Docker setup includes: We welcome contributions! Please see our [Contributing Guide](../../../CONTRIBUTING.md) for details on how to get started. -For major changes or if you're unsure about something, please open an issue first to discuss your proposed changes. - -## Sponsors - -We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products don't exist we build them and open source them ourselves. - -[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase) +For major changes or if you're unsure about something, please open an issue first to discuss your proposed changes. \ No newline at end of file diff --git a/packages/core/postgrest-js/README.md b/packages/core/postgrest-js/README.md index 130d12d64..df9b1467a 100644 --- a/packages/core/postgrest-js/README.md +++ b/packages/core/postgrest-js/README.md @@ -196,10 +196,4 @@ For major changes or if you're unsure about something, please open an issue firs ## License -This repo is licensed under MIT License. - -## Sponsors - -We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products don’t exist we build them and open source them ourselves. Thanks to these sponsors who are making the OSS ecosystem better for everyone. - -[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase) +This repo is licensed under MIT License. \ No newline at end of file diff --git a/packages/core/storage-js/README.md b/packages/core/storage-js/README.md index a144a3982..306109585 100644 --- a/packages/core/storage-js/README.md +++ b/packages/core/storage-js/README.md @@ -875,10 +875,4 @@ The test infrastructure (`infra/docker-compose.yml`) includes: We welcome contributions! Please see our [Contributing Guide](../../../CONTRIBUTING.md) for details on how to get started. -For major changes or if you're unsure about something, please open an issue first to discuss your proposed changes. - -## Sponsors - -We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products don’t exist we build them and open source them ourselves. Thanks to these sponsors who are making the OSS ecosystem better for everyone. - -[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase) +For major changes or if you're unsure about something, please open an issue first to discuss your proposed changes. \ No newline at end of file diff --git a/packages/core/supabase-js/README.md b/packages/core/supabase-js/README.md index 6c7d6eeda..9bf7a7549 100644 --- a/packages/core/supabase-js/README.md +++ b/packages/core/supabase-js/README.md @@ -185,11 +185,11 @@ cd ../../.. | `test:integration:browser` | Browser tests using Deno + Puppeteer | Supabase running + Deno installed | | `test:edge-functions` | Edge Functions tests | Supabase running + Deno installed | | `test:types` | TypeScript type checking + JSR validation | None | -| `test:deno` | Deno runtime compatibility tests | Supabase running + Deno installed | | `test:bun` | Bun runtime compatibility tests | Supabase running + Bun installed | +| `test:node:playwright` | WebSocket browser tests | Supabase running + Playwright | +| Deno (see section below) | Deno runtime compatibility tests | Supabase running + Deno installed | | Expo (see section below) | React Native/Expo tests | Supabase running + dependencies updated | | Next.js (see below) | Next.js SSR tests | Supabase running + dependencies updated | -| `test:node:playwright` | WebSocket browser tests | Supabase running + Playwright | #### Unit Testing @@ -249,10 +249,11 @@ npx nx test:all supabase-js # Prerequisites: # 1. Supabase must be running (see Prerequisites) # 2. Update test dependencies and pack current build -npx nx update:test-deps:expo supabase-js +cd packages/core/supabase-js +npm run update:test-deps:expo # Run Expo tests from the Expo test project -cd packages/core/supabase-js/test/integration/expo +cd test/integration/expo npm install npm test cd ../../.. From fd4f198f573c104122f283d89186fcace435b7b4 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 16:56:42 +0300 Subject: [PATCH 34/36] fix(storage): fixed the vector tests --- packages/core/storage-js/src/lib/constants.ts | 1 - .../src/lib/vectors/VectorBucketApi.ts | 2 +- .../src/lib/vectors/VectorDataApi.ts | 2 +- .../src/lib/vectors/VectorIndexApi.ts | 2 +- .../storage-js/src/lib/vectors/constants.ts | 5 ++ .../core/storage-js/src/lib/vectors/fetch.ts | 46 ++++++++++++----- .../core/storage-js/test/bucket-api.spec.ts | 4 +- packages/core/storage-js/test/mock-server.ts | 50 ++++++++++++++----- 8 files changed, 80 insertions(+), 32 deletions(-) create mode 100644 packages/core/storage-js/src/lib/vectors/constants.ts diff --git a/packages/core/storage-js/src/lib/constants.ts b/packages/core/storage-js/src/lib/constants.ts index 2cc479ce3..6c8b41ae1 100644 --- a/packages/core/storage-js/src/lib/constants.ts +++ b/packages/core/storage-js/src/lib/constants.ts @@ -1,5 +1,4 @@ import { version } from './version' export const DEFAULT_HEADERS = { 'X-Client-Info': `storage-js/${version}`, - 'Content-Type': 'application/json', } diff --git a/packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts b/packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts index cd5665bad..4700c7ddc 100644 --- a/packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts +++ b/packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts @@ -1,4 +1,4 @@ -import { DEFAULT_HEADERS } from '../constants' +import { DEFAULT_HEADERS } from './constants' import { isStorageVectorsError } from './errors' import { Fetch, post } from './fetch' import { resolveFetch } from './helpers' diff --git a/packages/core/storage-js/src/lib/vectors/VectorDataApi.ts b/packages/core/storage-js/src/lib/vectors/VectorDataApi.ts index fd2cac8f9..99d6f33e3 100644 --- a/packages/core/storage-js/src/lib/vectors/VectorDataApi.ts +++ b/packages/core/storage-js/src/lib/vectors/VectorDataApi.ts @@ -1,4 +1,4 @@ -import { DEFAULT_HEADERS } from '../constants' +import { DEFAULT_HEADERS } from './constants' import { isStorageVectorsError } from './errors' import { Fetch, post } from './fetch' import { resolveFetch } from './helpers' diff --git a/packages/core/storage-js/src/lib/vectors/VectorIndexApi.ts b/packages/core/storage-js/src/lib/vectors/VectorIndexApi.ts index c8efe7cf9..8f9bca0f6 100644 --- a/packages/core/storage-js/src/lib/vectors/VectorIndexApi.ts +++ b/packages/core/storage-js/src/lib/vectors/VectorIndexApi.ts @@ -1,4 +1,4 @@ -import { DEFAULT_HEADERS } from '../constants' +import { DEFAULT_HEADERS } from './constants' import { isStorageVectorsError } from './errors' import { Fetch, post } from './fetch' import { resolveFetch } from './helpers' diff --git a/packages/core/storage-js/src/lib/vectors/constants.ts b/packages/core/storage-js/src/lib/vectors/constants.ts new file mode 100644 index 000000000..7273379b9 --- /dev/null +++ b/packages/core/storage-js/src/lib/vectors/constants.ts @@ -0,0 +1,5 @@ +import { version } from '../version' +export const DEFAULT_HEADERS = { + 'X-Client-Info': `storage-js/${version}`, + 'Content-Type': 'application/json', +} diff --git a/packages/core/storage-js/src/lib/vectors/fetch.ts b/packages/core/storage-js/src/lib/vectors/fetch.ts index 29a7b8589..c18d2901f 100644 --- a/packages/core/storage-js/src/lib/vectors/fetch.ts +++ b/packages/core/storage-js/src/lib/vectors/fetch.ts @@ -40,19 +40,39 @@ const handleError = async ( reject: (reason?: any) => void, options?: FetchOptions ) => { - const Res = await resolveResponse() - - if (error instanceof Res && !options?.noResolveJson) { - error - .json() - .then((err: any) => { - const status = error.status || 500 - const statusCode = err?.statusCode || err?.code || status + '' - reject(new StorageVectorsApiError(_getErrorMessage(err), status, statusCode)) - }) - .catch((err: any) => { - reject(new StorageVectorsUnknownError(_getErrorMessage(err), err)) - }) + // Check if error is a Response-like object (has status and ok properties) + // This is more reliable than instanceof which can fail across realms + const isResponseLike = + error && + typeof error === 'object' && + 'status' in error && + 'ok' in error && + typeof (error as any).status === 'number' + + if (isResponseLike && !options?.noResolveJson) { + const status = (error as any).status || 500 + const responseError = error as any + + // Try to parse JSON body if available + if (typeof responseError.json === 'function') { + responseError + .json() + .then((err: any) => { + const statusCode = err?.statusCode || err?.code || status + '' + reject(new StorageVectorsApiError(_getErrorMessage(err), status, statusCode)) + }) + .catch(() => { + // If JSON parsing fails, create an ApiError with the HTTP status code + const statusCode = status + '' + const message = responseError.statusText || `HTTP ${status} error` + reject(new StorageVectorsApiError(message, status, statusCode)) + }) + } else { + // No json() method available, create error from status + const statusCode = status + '' + const message = responseError.statusText || `HTTP ${status} error` + reject(new StorageVectorsApiError(message, status, statusCode)) + } } else { reject(new StorageVectorsUnknownError(_getErrorMessage(error), error)) } diff --git a/packages/core/storage-js/test/bucket-api.spec.ts b/packages/core/storage-js/test/bucket-api.spec.ts index 44399d643..03498fe56 100644 --- a/packages/core/storage-js/test/bucket-api.spec.ts +++ b/packages/core/storage-js/test/bucket-api.spec.ts @@ -184,7 +184,7 @@ describe('VectorBucketApi Integration Tests', () => { const response = await client.deleteVectorBucket('non-existent-bucket') const error = assertErrorResponse(response) - assertErrorCode(error, 409) + assertErrorCode(error, 404) }) it('should return error when bucket is not empty', async () => { @@ -205,7 +205,7 @@ describe('VectorBucketApi Integration Tests', () => { const response = await client.deleteVectorBucket(bucketName) const error = assertErrorResponse(response) - assertErrorCode(error, 409) + assertErrorCode(error, 400) expect(error.message).toContain('not empty') }) diff --git a/packages/core/storage-js/test/mock-server.ts b/packages/core/storage-js/test/mock-server.ts index 89a78a2a8..bd7b0eb0e 100644 --- a/packages/core/storage-js/test/mock-server.ts +++ b/packages/core/storage-js/test/mock-server.ts @@ -172,7 +172,13 @@ const storage = new MockStorage() */ export function createMockFetch(): Fetch { return async (input: string | URL | Request, init?: RequestInit): Promise => { - const url = input instanceof Request ? input.url : input + // Handle different input types safely without assuming Request constructor exists + const url = + typeof input === 'string' + ? input + : input instanceof URL + ? input.toString() + : (input as any).url || String(input) const urlStr = url.toString() const endpoint = urlStr.split('/').pop() || '' const body = init?.body ? JSON.parse(init.body as string) : {} @@ -195,12 +201,30 @@ export function createMockFetch(): Fetch { // Create mock Response object const responseBody = JSON.stringify(response.error || response.data || {}) - return new Response(responseBody, { + + // Check if Response constructor is available (Node 18+, modern browsers) + if (typeof Response !== 'undefined') { + return new Response(responseBody, { + status: response.status, + headers: { + 'Content-Type': 'application/json', + }, + }) as any + } + + // Fallback: Create a minimal Response-like object for older environments + const mockResponse: any = { + ok: response.status >= 200 && response.status < 300, status: response.status, + statusText: response.status === 200 ? 'OK' : 'Error', headers: { - 'Content-Type': 'application/json', + get: (key: string) => (key.toLowerCase() === 'content-type' ? 'application/json' : null), }, - }) as any + json: async () => JSON.parse(responseBody), + text: async () => responseBody, + } + + return mockResponse as Response } } @@ -324,7 +348,7 @@ function handleDeleteBucket(body: any): MockResponse { return { status: 404, error: { - statusCode: 409, + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -336,7 +360,7 @@ function handleDeleteBucket(body: any): MockResponse { return { status: 400, error: { - statusCode: 409, + statusCode: 400, error: 'Bad Request', message: `Bucket '${vectorBucketName}' is not empty`, }, @@ -355,7 +379,7 @@ function handleCreateIndex(body: any): MockResponse { return { status: 404, error: { - statusCode: 409, + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -388,7 +412,7 @@ function handleGetIndex(body: any): MockResponse { return { status: 404, error: { - statusCode: 409, + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -400,7 +424,7 @@ function handleGetIndex(body: any): MockResponse { return { status: 404, error: { - statusCode: 409, + statusCode: 404, error: 'Not Found', message: `Index '${indexName}' not found`, }, @@ -420,7 +444,7 @@ function handleListIndexes(body: any): MockResponse { return { status: 404, error: { - statusCode: 409, + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -445,7 +469,7 @@ function handleDeleteIndex(body: any): MockResponse { return { status: 404, error: { - statusCode: 409, + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, @@ -456,7 +480,7 @@ function handleDeleteIndex(body: any): MockResponse { return { status: 404, error: { - statusCode: 409, + statusCode: 404, error: 'Not Found', message: `Index '${indexName}' not found`, }, @@ -608,7 +632,7 @@ function handleQueryVectors(body: any): MockResponse { return { status: 404, error: { - statusCode: 409, + statusCode: 404, error: 'Not Found', message: `Bucket '${vectorBucketName}' not found`, }, From 17a0dd59de4d20f4a099bf1d2fd2f346cd5a411e Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 17:50:40 +0300 Subject: [PATCH 35/36] chore(storage): add typedoc everywhere --- packages/core/storage-js/README.md | 471 +++++++++++++++++- packages/core/storage-js/src/StorageClient.ts | 2 +- packages/core/storage-js/src/lib/index.ts | 1 + packages/core/storage-js/src/lib/types.ts | 14 + .../src/packages/StorageAnalyticsApi.ts | 6 + 5 files changed, 478 insertions(+), 16 deletions(-) diff --git a/packages/core/storage-js/README.md b/packages/core/storage-js/README.md index 306109585..185ad95be 100644 --- a/packages/core/storage-js/README.md +++ b/packages/core/storage-js/README.md @@ -37,6 +37,7 @@ - **Signed URLs**: Generate time-limited URLs for secure file access - **Image Transformations**: On-the-fly image resizing and optimization - **Vector Embeddings**: Store and query high-dimensional embeddings with similarity search +- **Analytics Buckets**: Iceberg table-based buckets optimized for analytical queries and data processing ## Quick Start Guide @@ -48,6 +49,33 @@ npm install @supabase/storage-js ### Connecting to the storage backend +There are two ways to use the Storage SDK: + +#### Option 1: Via Supabase Client (Recommended) + +If you're already using `@supabase/supabase-js`, access storage through the client: + +```js +import { createClient } from '@supabase/supabase-js' + +const supabase = createClient( + 'https://.supabase.co', + '' +) + +// Access storage +const storage = supabase.storage + +// Access different bucket types +const regularBucket = storage.from('my-bucket') +const vectorBucket = storage.vectors.from('embeddings-bucket') +const analyticsBucket = storage.analytics // Analytics API +``` + +#### Option 2: Standalone StorageClient + +For applications that only need storage functionality: + ```js import { StorageClient } from '@supabase/storage-js' @@ -58,8 +86,79 @@ const storageClient = new StorageClient(STORAGE_URL, { apikey: SERVICE_KEY, Authorization: `Bearer ${SERVICE_KEY}`, }) + +// Access different bucket types +const regularBucket = storageClient.from('my-bucket') +const vectorBucket = storageClient.vectors.from('embeddings-bucket') +const analyticsBucket = storageClient.analytics // Analytics API ``` +> **When to use each approach:** +> - Use `supabase.storage` when working with other Supabase features (auth, database, etc.) +> - Use `new StorageClient()` for storage-only applications or when you need fine-grained control + +### Understanding Bucket Types + +Supabase Storage supports three types of buckets, each optimized for different use cases: + +#### 1. Regular Storage Buckets (File Storage) + +Standard buckets for storing files, images, videos, and other assets. + +```js +// Create regular storage bucket +const { data, error } = await storageClient.createBucket('my-files', { + public: false +}) + +// Upload files +await storageClient.from('my-files').upload('avatar.png', file) +``` + +**Use cases:** User uploads, media assets, documents, backups + +#### 2. Vector Buckets (Embeddings Storage) + +Specialized buckets for storing and querying high-dimensional vector embeddings. + +```js +// Create vector bucket +await storageClient.vectors.createVectorBucket('embeddings-prod') + +// Create index and insert vectors +const bucket = storageClient.vectors.from('embeddings-prod') +await bucket.createIndex({ + indexName: 'documents', + dimension: 1536, + distanceMetric: 'cosine' +}) +``` + +**Use cases:** Semantic search, AI-powered recommendations, similarity matching + +**[See full Vector Embeddings documentation below](#vector-embeddings)** + +#### 3. Analytics Buckets + +Specialized buckets using Apache Iceberg table format, optimized for analytical queries and large-scale data processing. + +```js +// Create analytics bucket +await storageClient.analytics.createBucket('analytics-data') + +// List analytics buckets +const { data, error } = await storageClient.analytics.listBuckets() + +// Delete analytics bucket +await storageClient.analytics.deleteBucket('analytics-data') +``` + +**Use cases:** Time-series data, analytical queries, data lakes, large-scale data processing, business intelligence + +**[See full Analytics Buckets documentation below](#analytics-buckets)** + +--- + ### Handling resources #### Handling Storage Buckets @@ -184,6 +283,324 @@ const storageClient = new StorageClient(STORAGE_URL, { const { data, error } = await storageClient.from('public-bucket').getPublicUrl('path/to/file') ``` +## Analytics Buckets + +Supabase Storage provides specialized analytics buckets using Apache Iceberg table format, optimized for analytical workloads and large-scale data processing. These buckets are designed for data lake architectures, time-series data, and business intelligence applications. + +### What are Analytics Buckets? + +Analytics buckets use the Apache Iceberg open table format, providing: +- **ACID transactions** for data consistency +- **Schema evolution** without data rewrites +- **Time travel** to query historical data +- **Efficient metadata management** for large datasets +- **Optimized for analytical queries** rather than individual file operations + +### When to Use Analytics Buckets + +**Use analytics buckets for:** +- Time-series data (logs, metrics, events) +- Data lake architectures +- Business intelligence and reporting +- Large-scale batch processing +- Analytical workloads requiring ACID guarantees + +**Use regular storage buckets for:** +- User file uploads (images, documents, videos) +- Individual file management +- Content delivery +- Simple object storage needs + +### Quick Start + +You can access analytics functionality through the `analytics` property on your storage client: + +#### Via Supabase Client + +```typescript +import { createClient } from '@supabase/supabase-js' + +const supabase = createClient( + 'https://your-project.supabase.co', + 'your-anon-key' +) + +// Access analytics operations +const analytics = supabase.storage.analytics + +// Create an analytics bucket +const { data, error } = await analytics.createBucket('analytics-data') +if (error) { + console.error('Failed to create analytics bucket:', error.message) +} else { + console.log('Created bucket:', data.name) +} +``` + +#### Via StorageClient + +```typescript +import { StorageClient } from '@supabase/storage-js' + +const storageClient = new StorageClient('https://your-project.supabase.co/storage/v1', { + apikey: 'YOUR_API_KEY', + Authorization: 'Bearer YOUR_TOKEN', +}) + +// Access analytics operations +const analytics = storageClient.analytics + +// Create an analytics bucket +await analytics.createBucket('analytics-data') +``` + +### API Reference + +#### Create Analytics Bucket + +Creates a new analytics bucket using Iceberg table format: + +```typescript +const { data, error } = await analytics.createBucket('my-analytics-bucket') + +if (error) { + console.error('Error:', error.message) +} else { + console.log('Created bucket:', data) +} +``` + +**Returns:** +```typescript +{ + data: { + id: string + type: 'ANALYTICS' + format: string + created_at: string + updated_at: string + } | null + error: StorageError | null +} +``` + +#### List Analytics Buckets + +Retrieves all analytics buckets in your project with optional filtering and pagination: + +```typescript +const { data, error } = await analytics.listBuckets({ + limit: 10, + offset: 0, + sortColumn: 'created_at', + sortOrder: 'desc', + search: 'prod' +}) + +if (data) { + console.log(`Found ${data.length} analytics buckets`) + data.forEach(bucket => { + console.log(`- ${bucket.id} (created: ${bucket.created_at})`) + }) +} +``` + +**Parameters:** +- `limit?: number` - Maximum number of buckets to return +- `offset?: number` - Number of buckets to skip (for pagination) +- `sortColumn?: 'id' | 'name' | 'created_at' | 'updated_at'` - Column to sort by +- `sortOrder?: 'asc' | 'desc'` - Sort direction +- `search?: string` - Search term to filter bucket names + +**Returns:** +```typescript +{ + data: AnalyticBucket[] | null + error: StorageError | null +} +``` + +**Example with Pagination:** + +```typescript +// Fetch first page +const firstPage = await analytics.listBuckets({ + limit: 100, + offset: 0, + sortColumn: 'created_at', + sortOrder: 'desc' +}) + +// Fetch second page +const secondPage = await analytics.listBuckets({ + limit: 100, + offset: 100, + sortColumn: 'created_at', + sortOrder: 'desc' +}) +``` + +#### Delete Analytics Bucket + +Deletes an analytics bucket. The bucket must be empty before deletion. + +```typescript +const { data, error } = await analytics.deleteBucket('old-analytics-bucket') + +if (error) { + console.error('Failed to delete:', error.message) +} else { + console.log('Bucket deleted:', data.message) +} +``` + +**Returns:** +```typescript +{ + data: { message: string } | null + error: StorageError | null +} +``` + +> **Note:** A bucket cannot be deleted if it contains data. You must empty the bucket first. + +### Error Handling + +Analytics buckets use the same error handling pattern as the rest of the Storage SDK: + +```typescript +const { data, error } = await analytics.createBucket('my-bucket') + +if (error) { + console.error('Error:', error.message) + console.error('Status:', error.status) + console.error('Status Code:', error.statusCode) + // Handle error appropriately +} +``` + +#### Throwing Errors + +You can configure the client to throw errors instead of returning them: + +```typescript +const analytics = storageClient.analytics +analytics.throwOnError() + +try { + const { data } = await analytics.createBucket('my-bucket') + // data is guaranteed to be present + console.log('Success:', data) +} catch (error) { + if (error instanceof StorageApiError) { + console.error('API Error:', error.statusCode, error.message) + } +} +``` + +### TypeScript Types + +The library exports TypeScript types for analytics buckets: + +```typescript +import type { + AnalyticBucket, + BucketType, + StorageError, +} from '@supabase/storage-js' + +// AnalyticBucket type +interface AnalyticBucket { + id: string + type: 'ANALYTICS' + format: string + created_at: string + updated_at: string +} +``` + +### Common Patterns + +#### Checking if a Bucket Exists + +```typescript +async function bucketExists(bucketName: string): Promise { + const { data, error } = await analytics.listBuckets({ + search: bucketName + }) + + if (error) { + console.error('Error checking bucket:', error.message) + return false + } + + return data?.some(bucket => bucket.id === bucketName) ?? false +} +``` + +#### Creating Bucket with Error Handling + +```typescript +async function ensureAnalyticsBucket(bucketName: string) { + // Try to create the bucket + const { data, error } = await analytics.createBucket(bucketName) + + if (error) { + // Check if bucket already exists (conflict error) + if (error.statusCode === '409') { + console.log(`Bucket '${bucketName}' already exists`) + return { success: true, created: false } + } + + // Other error occurred + console.error('Failed to create bucket:', error.message) + return { success: false, error } + } + + console.log(`Created new bucket: '${bucketName}'`) + return { success: true, created: true, data } +} +``` + +#### Listing All Buckets with Pagination + +```typescript +async function getAllAnalyticsBuckets() { + const allBuckets: AnalyticBucket[] = [] + let offset = 0 + const limit = 100 + + while (true) { + const { data, error } = await analytics.listBuckets({ + limit, + offset, + sortColumn: 'created_at', + sortOrder: 'desc' + }) + + if (error) { + console.error('Error fetching buckets:', error.message) + break + } + + if (!data || data.length === 0) { + break + } + + allBuckets.push(...data) + + // If we got fewer results than the limit, we've reached the end + if (data.length < limit) { + break + } + + offset += limit + } + + return allBuckets +} +``` + ## Vector Embeddings Supabase Storage provides built-in support for storing and querying high-dimensional vector embeddings, powered by S3 Vectors. This enables semantic search, similarity matching, and AI-powered applications without needing a separate vector database. @@ -203,20 +620,22 @@ Supabase Storage provides built-in support for storing and querying high-dimensi ### Quick Start -#### Using with StorageClient (Recommended) +You can access vector functionality in three ways, depending on your use case: -If you already have a `StorageClient` instance for regular file operations, access vector functionality through the `vectors` property: +#### Option 1: Via Supabase Client (Most Common) + +If you're using the full Supabase client: ```typescript -import { StorageClient } from '@supabase/storage-js' +import { createClient } from '@supabase/supabase-js' -const storageClient = new StorageClient('https://your-project.supabase.co/storage/v1', { - apikey: 'YOUR_API_KEY', - Authorization: 'Bearer YOUR_TOKEN', -}) +const supabase = createClient( + 'https://your-project.supabase.co', + 'your-anon-key' +) -// Access vector operations -const vectors = storageClient.vectors +// Access vector operations through storage +const vectors = supabase.storage.vectors // Create a vector bucket await vectors.createVectorBucket('embeddings-prod') @@ -258,9 +677,30 @@ if (data) { } ``` -#### Standalone Usage +#### Option 2: Via StorageClient + +If you're using the standalone `StorageClient` for storage operations, access vectors through the `vectors` property: + +```typescript +import { StorageClient } from '@supabase/storage-js' + +const storageClient = new StorageClient('https://your-project.supabase.co/storage/v1', { + apikey: 'YOUR_API_KEY', + Authorization: 'Bearer YOUR_TOKEN', +}) + +// Access vector operations +const vectors = storageClient.vectors + +// Use the same API as shown in Option 1 +await vectors.createVectorBucket('embeddings-prod') +const bucket = vectors.from('embeddings-prod') +// ... rest of operations +``` + +#### Option 3: Standalone Vector Client -For vector-only applications that don't need regular file storage operations, you can create a dedicated vector client: +For vector-only applications that don't need regular file storage operations: ```typescript import { StorageVectorsClient } from '@supabase/storage-js' @@ -270,16 +710,17 @@ const vectorClient = new StorageVectorsClient('https://your-project.supabase.co/ headers: { Authorization: 'Bearer YOUR_TOKEN' }, }) -// Use the same API as shown above +// Use the same API as shown in Option 1 await vectorClient.createVectorBucket('embeddings-prod') const bucket = vectorClient.from('embeddings-prod') // ... rest of operations ``` -> **When to use each pattern:** +> **When to use each approach:** > -> - Use `storageClient.vectors` when working with both files and vectors in the same application -> - Use `new StorageVectorsClient()` for applications that only need vector operations without file storage +> - **Option 1**: When using other Supabase features (auth, database, realtime) +> - **Option 2**: When working with both file storage and vectors +> - **Option 3**: For dedicated vector-only applications without file storage ### API Reference diff --git a/packages/core/storage-js/src/StorageClient.ts b/packages/core/storage-js/src/StorageClient.ts index eb54528fa..25da7fc07 100644 --- a/packages/core/storage-js/src/StorageClient.ts +++ b/packages/core/storage-js/src/StorageClient.ts @@ -49,7 +49,7 @@ export class StorageClient extends StorageBucketApi { * const analytics = client.storage.analytics * * // Create an analytics bucket - * await analytics.createBucket('my-analytics-bucket', { public: false }) + * await analytics.createBucket('my-analytics-bucket') * * // List all analytics buckets * const { data: buckets } = await analytics.listBuckets() diff --git a/packages/core/storage-js/src/lib/index.ts b/packages/core/storage-js/src/lib/index.ts index fb0c99760..98399fae5 100644 --- a/packages/core/storage-js/src/lib/index.ts +++ b/packages/core/storage-js/src/lib/index.ts @@ -2,3 +2,4 @@ export * from '../packages/StorageBucketApi' export * from '../packages/StorageFileApi' export * from './types' export * from './constants' +export * from './vectors' diff --git a/packages/core/storage-js/src/lib/types.ts b/packages/core/storage-js/src/lib/types.ts index 2905c6858..99a2bd8fc 100644 --- a/packages/core/storage-js/src/lib/types.ts +++ b/packages/core/storage-js/src/lib/types.ts @@ -1,5 +1,10 @@ import { StorageError } from './errors' +/** + * Type of storage bucket + * - STANDARD: Regular file storage buckets + * - ANALYTICS: Iceberg table-based buckets for analytical workloads + */ export type BucketType = 'STANDARD' | 'ANALYTICS' export interface Bucket { @@ -14,11 +19,20 @@ export interface Bucket { public: boolean } +/** + * Represents an Analytics Bucket using Apache Iceberg table format. + * Analytics buckets are optimized for analytical queries and data processing. + */ export interface AnalyticBucket { + /** Unique identifier for the bucket */ id: string + /** Bucket type - always 'ANALYTICS' for analytics buckets */ type: 'ANALYTICS' + /** Storage format used (e.g., 'iceberg') */ format: string + /** ISO 8601 timestamp of bucket creation */ created_at: string + /** ISO 8601 timestamp of last update */ updated_at: string } diff --git a/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts b/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts index d01dd6206..cd23f6877 100644 --- a/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts +++ b/packages/core/storage-js/src/packages/StorageAnalyticsApi.ts @@ -14,6 +14,12 @@ export default class StorageAnalyticsApi { protected fetch: Fetch protected shouldThrowOnError = false + /** + * Creates a new StorageAnalyticsApi instance + * @param url - The base URL for the storage API + * @param headers - HTTP headers to include in requests + * @param fetch - Optional custom fetch implementation + */ constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) { this.url = url.replace(/\/$/, '') this.headers = { ...DEFAULT_HEADERS, ...headers } From 06c217831141fad33c789e0365395b310f3c83d3 Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Fri, 17 Oct 2025 18:06:09 +0300 Subject: [PATCH 36/36] chore(storage): update names --- packages/core/storage-js/README.md | 22 +++--- .../src/lib/vectors/StorageVectorsClient.ts | 8 +-- .../src/lib/vectors/VectorBucketApi.ts | 26 ++++--- .../core/storage-js/test/bucket-api.spec.ts | 70 +++++++++---------- .../core/storage-js/test/e2e-workflow.spec.ts | 22 +++--- .../core/storage-js/test/index-api.spec.ts | 4 +- .../storage-js/test/vector-data-api.spec.ts | 2 +- 7 files changed, 80 insertions(+), 74 deletions(-) diff --git a/packages/core/storage-js/README.md b/packages/core/storage-js/README.md index 185ad95be..46a23bc80 100644 --- a/packages/core/storage-js/README.md +++ b/packages/core/storage-js/README.md @@ -123,7 +123,7 @@ Specialized buckets for storing and querying high-dimensional vector embeddings. ```js // Create vector bucket -await storageClient.vectors.createVectorBucket('embeddings-prod') +await storageClient.vectors.createBucket('embeddings-prod') // Create index and insert vectors const bucket = storageClient.vectors.from('embeddings-prod') @@ -638,7 +638,7 @@ const supabase = createClient( const vectors = supabase.storage.vectors // Create a vector bucket -await vectors.createVectorBucket('embeddings-prod') +await vectors.createBucket('embeddings-prod') // Create an index const bucket = vectors.from('embeddings-prod') @@ -693,7 +693,7 @@ const storageClient = new StorageClient('https://your-project.supabase.co/storag const vectors = storageClient.vectors // Use the same API as shown in Option 1 -await vectors.createVectorBucket('embeddings-prod') +await vectors.createBucket('embeddings-prod') const bucket = vectors.from('embeddings-prod') // ... rest of operations ``` @@ -711,7 +711,7 @@ const vectorClient = new StorageVectorsClient('https://your-project.supabase.co/ }) // Use the same API as shown in Option 1 -await vectorClient.createVectorBucket('embeddings-prod') +await vectorClient.createBucket('embeddings-prod') const bucket = vectorClient.from('embeddings-prod') // ... rest of operations ``` @@ -742,27 +742,27 @@ Vector buckets are top-level containers for organizing vector indexes. ##### Create Bucket ```typescript -const { data, error } = await vectorClient.createVectorBucket('my-bucket') +const { data, error } = await vectorClient.createBucket('my-bucket') ``` ##### Get Bucket ```typescript -const { data, error } = await vectorClient.getVectorBucket('my-bucket') +const { data, error } = await vectorClient.getBucket('my-bucket') console.log('Created at:', new Date(data.vectorBucket.creationTime! * 1000)) ``` ##### List Buckets ```typescript -const { data, error } = await vectorClient.listVectorBuckets({ +const { data, error } = await vectorClient.listBuckets({ prefix: 'prod-', maxResults: 100, }) // Pagination if (data?.nextToken) { - const next = await vectorClient.listVectorBuckets({ nextToken: data.nextToken }) + const next = await vectorClient.listBuckets({ nextToken: data.nextToken }) } ``` @@ -770,7 +770,7 @@ if (data?.nextToken) { ```typescript // Bucket must be empty (all indexes deleted first) -const { error } = await vectorClient.deleteVectorBucket('my-bucket') +const { error } = await vectorClient.deleteBucket('my-bucket') ``` #### Vector Indexes @@ -947,7 +947,7 @@ await index.deleteVectors({ The library uses a consistent error handling pattern: ```typescript -const { data, error } = await vectorClient.createVectorBucket('my-bucket') +const { data, error } = await vectorClient.createBucket('my-bucket') if (error) { console.error('Error:', error.message) @@ -976,7 +976,7 @@ const vectorClient = new StorageVectorsClient(url, options) vectorClient.throwOnError() try { - const { data } = await vectorClient.createVectorBucket('my-bucket') + const { data } = await vectorClient.createBucket('my-bucket') // data is guaranteed to be present } catch (error) { if (error instanceof StorageVectorsApiError) { diff --git a/packages/core/storage-js/src/lib/vectors/StorageVectorsClient.ts b/packages/core/storage-js/src/lib/vectors/StorageVectorsClient.ts index 060f310c1..9ce309b50 100644 --- a/packages/core/storage-js/src/lib/vectors/StorageVectorsClient.ts +++ b/packages/core/storage-js/src/lib/vectors/StorageVectorsClient.ts @@ -40,7 +40,7 @@ export interface StorageVectorsClientOptions { * const vectors = storageClient.vectors * * // Use vector operations - * await vectors.createVectorBucket('embeddings-prod') + * await vectors.createBucket('embeddings-prod') * const bucket = vectors.from('embeddings-prod') * await bucket.createIndex({ ... }) * ``` @@ -49,15 +49,15 @@ export interface StorageVectorsClientOptions { * ```typescript * import { StorageVectorsClient } from '@supabase/storage-js' * - * const client = new StorageVectorsClient('https://api.example.com', { + * const vectorsClient = new StorageVectorsClient('https://api.example.com', { * headers: { 'Authorization': 'Bearer token' } * }) * * // Access bucket operations - * await client.createVectorBucket('embeddings-prod') + * await vectorsClient.createBucket('embeddings-prod') * * // Access index operations via buckets - * const bucket = client.from('embeddings-prod') + * const bucket = vectorsClient.from('embeddings-prod') * await bucket.createIndex({ * indexName: 'documents', * dataType: 'float32', diff --git a/packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts b/packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts index 4700c7ddc..c4a49d2ce 100644 --- a/packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts +++ b/packages/core/storage-js/src/lib/vectors/VectorBucketApi.ts @@ -19,6 +19,12 @@ export default class VectorBucketApi { protected fetch: Fetch protected shouldThrowOnError = false + /** + * Creates a new VectorBucketApi instance + * @param url - The base URL for the storage vectors API + * @param headers - HTTP headers to include in requests + * @param fetch - Optional custom fetch implementation + */ constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) { this.url = url.replace(/\/$/, '') this.headers = { ...DEFAULT_HEADERS, ...headers } @@ -34,7 +40,7 @@ export default class VectorBucketApi { * ```typescript * const client = new VectorBucketApi(url, headers) * client.throwOnError() - * const { data } = await client.createVectorBucket('my-bucket') // throws on error + * const { data } = await client.createBucket('my-bucket') // throws on error * ``` */ public throwOnError(): this { @@ -56,13 +62,13 @@ export default class VectorBucketApi { * * @example * ```typescript - * const { data, error } = await client.createVectorBucket('embeddings-prod') + * const { data, error } = await client.createBucket('embeddings-prod') * if (error) { * console.error('Failed to create bucket:', error.message) * } * ``` */ - async createVectorBucket(vectorBucketName: string): Promise> { + async createBucket(vectorBucketName: string): Promise> { try { const data = await post( this.fetch, @@ -95,13 +101,13 @@ export default class VectorBucketApi { * * @example * ```typescript - * const { data, error } = await client.getVectorBucket('embeddings-prod') + * const { data, error } = await client.getBucket('embeddings-prod') * if (data) { * console.log('Bucket created at:', new Date(data.vectorBucket.creationTime! * 1000)) * } * ``` */ - async getVectorBucket( + async getBucket( vectorBucketName: string ): Promise> { try { @@ -139,17 +145,17 @@ export default class VectorBucketApi { * @example * ```typescript * // List all buckets with prefix 'prod-' - * const { data, error } = await client.listVectorBuckets({ prefix: 'prod-' }) + * const { data, error } = await client.listBuckets({ prefix: 'prod-' }) * if (data) { * console.log('Found buckets:', data.buckets.length) * // Fetch next page if available * if (data.nextToken) { - * const next = await client.listVectorBuckets({ nextToken: data.nextToken }) + * const next = await client.listBuckets({ nextToken: data.nextToken }) * } * } * ``` */ - async listVectorBuckets( + async listBuckets( options: ListVectorBucketsOptions = {} ): Promise> { try { @@ -183,13 +189,13 @@ export default class VectorBucketApi { * @example * ```typescript * // Delete all indexes first, then delete bucket - * const { error } = await client.deleteVectorBucket('old-bucket') + * const { error } = await client.deleteBucket('old-bucket') * if (error?.statusCode === 'S3VectorBucketNotEmpty') { * console.error('Must delete all indexes first') * } * ``` */ - async deleteVectorBucket(vectorBucketName: string): Promise> { + async deleteBucket(vectorBucketName: string): Promise> { try { const data = await post( this.fetch, diff --git a/packages/core/storage-js/test/bucket-api.spec.ts b/packages/core/storage-js/test/bucket-api.spec.ts index 03498fe56..deb51964b 100644 --- a/packages/core/storage-js/test/bucket-api.spec.ts +++ b/packages/core/storage-js/test/bucket-api.spec.ts @@ -20,11 +20,11 @@ describe('VectorBucketApi Integration Tests', () => { client = createTestClient() }) - describe('createVectorBucket', () => { + describe('createBucket', () => { it('should create a new vector bucket successfully', async () => { const bucketName = generateTestName('test-bucket') - const response = await client.createVectorBucket(bucketName) + const response = await client.createBucket(bucketName) assertSuccessResponse(response) expect(response.data).toEqual({}) @@ -34,10 +34,10 @@ describe('VectorBucketApi Integration Tests', () => { const bucketName = generateTestName('test-bucket') // Create bucket first time - await client.createVectorBucket(bucketName) + await client.createBucket(bucketName) // Try to create again - const response = await client.createVectorBucket(bucketName) + const response = await client.createBucket(bucketName) const error = assertErrorResponse(response) assertErrorCode(error, 409) @@ -48,23 +48,23 @@ describe('VectorBucketApi Integration Tests', () => { const bucket1 = generateTestName('test-bucket-1') const bucket2 = generateTestName('test-bucket-2') - const response1 = await client.createVectorBucket(bucket1) - const response2 = await client.createVectorBucket(bucket2) + const response1 = await client.createBucket(bucket1) + const response2 = await client.createBucket(bucket2) assertSuccessResponse(response1) assertSuccessResponse(response2) }) }) - describe('getVectorBucket', () => { + describe('getBucket', () => { it('should retrieve an existing bucket', async () => { const bucketName = generateTestName('test-bucket') // Create bucket - await client.createVectorBucket(bucketName) + await client.createBucket(bucketName) // Retrieve bucket - const response = await client.getVectorBucket(bucketName) + const response = await client.getBucket(bucketName) const data = assertSuccessResponse(response) expect(data.vectorBucket).toBeDefined() @@ -74,7 +74,7 @@ describe('VectorBucketApi Integration Tests', () => { }) it('should return not found error for non-existent bucket', async () => { - const response = await client.getVectorBucket('non-existent-bucket') + const response = await client.getBucket('non-existent-bucket') const error = assertErrorResponse(response) assertErrorCode(error, 404) @@ -84,8 +84,8 @@ describe('VectorBucketApi Integration Tests', () => { it('should return bucket with encryption configuration if set', async () => { const bucketName = generateTestName('test-bucket') - await client.createVectorBucket(bucketName) - const response = await client.getVectorBucket(bucketName) + await client.createBucket(bucketName) + const response = await client.getBucket(bucketName) const data = assertSuccessResponse(response) // Encryption configuration is optional @@ -95,15 +95,15 @@ describe('VectorBucketApi Integration Tests', () => { }) }) - describe('listVectorBuckets', () => { + describe('listBuckets', () => { it('should list all buckets', async () => { const bucket1 = generateTestName('test-bucket-1') const bucket2 = generateTestName('test-bucket-2') - await client.createVectorBucket(bucket1) - await client.createVectorBucket(bucket2) + await client.createBucket(bucket1) + await client.createBucket(bucket2) - const response = await client.listVectorBuckets() + const response = await client.listBuckets() const data = assertSuccessResponse(response) expect(data.buckets).toBeDefined() @@ -121,11 +121,11 @@ describe('VectorBucketApi Integration Tests', () => { const bucket2 = `${prefix}-bucket-2` const bucket3 = generateTestName('other-bucket') - await client.createVectorBucket(bucket1) - await client.createVectorBucket(bucket2) - await client.createVectorBucket(bucket3) + await client.createBucket(bucket1) + await client.createBucket(bucket2) + await client.createBucket(bucket3) - const response = await client.listVectorBuckets({ prefix }) + const response = await client.listBuckets({ prefix }) const data = assertSuccessResponse(response) expect(data.buckets.length).toBeGreaterThanOrEqual(2) @@ -142,7 +142,7 @@ describe('VectorBucketApi Integration Tests', () => { }) it('should support pagination with maxResults', async () => { - const response = await client.listVectorBuckets({ maxResults: 1 }) + const response = await client.listBuckets({ maxResults: 1 }) const data = assertSuccessResponse(response) expect(data.buckets.length).toBeLessThanOrEqual(1) @@ -154,7 +154,7 @@ describe('VectorBucketApi Integration Tests', () => { }) it('should return empty array when no buckets match prefix', async () => { - const response = await client.listVectorBuckets({ + const response = await client.listBuckets({ prefix: 'non-existent-prefix-' + Date.now(), }) @@ -164,24 +164,24 @@ describe('VectorBucketApi Integration Tests', () => { }) }) - describe('deleteVectorBucket', () => { + describe('deleteBucket', () => { it('should delete an empty bucket successfully', async () => { const bucketName = generateTestName('test-bucket') - await client.createVectorBucket(bucketName) + await client.createBucket(bucketName) - const response = await client.deleteVectorBucket(bucketName) + const response = await client.deleteBucket(bucketName) assertSuccessResponse(response) expect(response.data).toEqual({}) // Verify bucket is deleted - const getResponse = await client.getVectorBucket(bucketName) + const getResponse = await client.getBucket(bucketName) assertErrorResponse(getResponse) }) it('should return not found error for non-existent bucket', async () => { - const response = await client.deleteVectorBucket('non-existent-bucket') + const response = await client.deleteBucket('non-existent-bucket') const error = assertErrorResponse(response) assertErrorCode(error, 404) @@ -190,7 +190,7 @@ describe('VectorBucketApi Integration Tests', () => { it('should return error when bucket is not empty', async () => { const bucketName = generateTestName('test-bucket') - await client.createVectorBucket(bucketName) + await client.createBucket(bucketName) // Create an index in the bucket const bucket = client.from(bucketName) @@ -202,7 +202,7 @@ describe('VectorBucketApi Integration Tests', () => { }) // Try to delete bucket with index - const response = await client.deleteVectorBucket(bucketName) + const response = await client.deleteBucket(bucketName) const error = assertErrorResponse(response) assertErrorCode(error, 400) @@ -212,7 +212,7 @@ describe('VectorBucketApi Integration Tests', () => { it('should successfully delete bucket after removing all indexes', async () => { const bucketName = generateTestName('test-bucket') - await client.createVectorBucket(bucketName) + await client.createBucket(bucketName) const bucket = client.from(bucketName) await bucket.createIndex({ @@ -226,7 +226,7 @@ describe('VectorBucketApi Integration Tests', () => { await bucket.deleteIndex('test-index') // Now delete the bucket - const response = await client.deleteVectorBucket(bucketName) + const response = await client.deleteBucket(bucketName) assertSuccessResponse(response) }) @@ -236,15 +236,15 @@ describe('VectorBucketApi Integration Tests', () => { it('should throw error instead of returning error response', async () => { client.throwOnError() - await expect(client.getVectorBucket('non-existent-bucket')).rejects.toThrow() + await expect(client.getBucket('non-existent-bucket')).rejects.toThrow() }) it('should still return data on success', async () => { const bucketName = generateTestName('test-bucket') client.throwOnError() - await client.createVectorBucket(bucketName) - const response = await client.getVectorBucket(bucketName) + await client.createBucket(bucketName) + const response = await client.getBucket(bucketName) expect(response.data).toBeDefined() expect(response.error).toBeNull() @@ -255,7 +255,7 @@ describe('VectorBucketApi Integration Tests', () => { it('should create a bucket scope successfully', async () => { const bucketName = generateTestName('test-bucket') - await client.createVectorBucket(bucketName) + await client.createBucket(bucketName) const bucketScope = client.from(bucketName) diff --git a/packages/core/storage-js/test/e2e-workflow.spec.ts b/packages/core/storage-js/test/e2e-workflow.spec.ts index fc3bbd31e..1fe239788 100644 --- a/packages/core/storage-js/test/e2e-workflow.spec.ts +++ b/packages/core/storage-js/test/e2e-workflow.spec.ts @@ -25,11 +25,11 @@ describe('End-to-End Workflow Tests', () => { const indexName = generateTestName('e2e-index') // Step 1: Create bucket - const createBucketResponse = await client.createVectorBucket(bucketName) + const createBucketResponse = await client.createBucket(bucketName) assertSuccessResponse(createBucketResponse) // Step 2: Verify bucket exists - const getBucketResponse = await client.getVectorBucket(bucketName) + const getBucketResponse = await client.getBucket(bucketName) const bucketData = assertSuccessResponse(getBucketResponse) expect(bucketData.vectorBucket.vectorBucketName).toBe(bucketName) @@ -169,7 +169,7 @@ describe('End-to-End Workflow Tests', () => { assertSuccessResponse(deleteIndexResponse) // Step 14: Delete bucket - const deleteBucketResponse = await client.deleteVectorBucket(bucketName) + const deleteBucketResponse = await client.deleteBucket(bucketName) assertSuccessResponse(deleteBucketResponse) }) }) @@ -179,7 +179,7 @@ describe('End-to-End Workflow Tests', () => { const bucketName = generateTestName('multi-index-bucket') // Create bucket - await client.createVectorBucket(bucketName) + await client.createBucket(bucketName) const bucket = client.from(bucketName) // Create multiple indexes with different configurations @@ -249,7 +249,7 @@ describe('End-to-End Workflow Tests', () => { } // Delete bucket - await client.deleteVectorBucket(bucketName) + await client.deleteBucket(bucketName) }) }) @@ -259,7 +259,7 @@ describe('End-to-End Workflow Tests', () => { const indexName = generateTestName('semantic-index') // Setup - await client.createVectorBucket(bucketName) + await client.createBucket(bucketName) const bucket = client.from(bucketName) await bucket.createIndex({ indexName, @@ -365,7 +365,7 @@ describe('End-to-End Workflow Tests', () => { // Cleanup await bucket.deleteIndex(indexName) - await client.deleteVectorBucket(bucketName) + await client.deleteBucket(bucketName) }) }) @@ -375,7 +375,7 @@ describe('End-to-End Workflow Tests', () => { const indexName = generateTestName('batch-index') // Setup - await client.createVectorBucket(bucketName) + await client.createBucket(bucketName) const bucket = client.from(bucketName) await bucket.createIndex({ indexName, @@ -434,7 +434,7 @@ describe('End-to-End Workflow Tests', () => { // Cleanup await bucket.deleteIndex(indexName) - await client.deleteVectorBucket(bucketName) + await client.deleteBucket(bucketName) }) }) @@ -444,7 +444,7 @@ describe('End-to-End Workflow Tests', () => { const indexName = generateTestName('error-index') // Create bucket - await client.createVectorBucket(bucketName) + await client.createBucket(bucketName) const bucket = client.from(bucketName) // Try to create index in non-existent bucket (error) @@ -478,7 +478,7 @@ describe('End-to-End Workflow Tests', () => { // Cleanup await bucket.deleteIndex(indexName) - await client.deleteVectorBucket(bucketName) + await client.deleteBucket(bucketName) }) }) }) diff --git a/packages/core/storage-js/test/index-api.spec.ts b/packages/core/storage-js/test/index-api.spec.ts index 5bf975f2b..010f1eaca 100644 --- a/packages/core/storage-js/test/index-api.spec.ts +++ b/packages/core/storage-js/test/index-api.spec.ts @@ -20,7 +20,7 @@ describe('VectorIndexApi Integration Tests', () => { setupTest() client = createTestClient() testBucket = generateTestName('test-bucket') - await client.createVectorBucket(testBucket) + await client.createBucket(testBucket) }) describe('createIndex', () => { @@ -323,7 +323,7 @@ describe('VectorIndexApi Integration Tests', () => { it('should return empty array when no indexes exist', async () => { const emptyBucket = generateTestName('empty-bucket') - await client.createVectorBucket(emptyBucket) + await client.createBucket(emptyBucket) const bucket = client.from(emptyBucket) const response = await bucket.listIndexes() diff --git a/packages/core/storage-js/test/vector-data-api.spec.ts b/packages/core/storage-js/test/vector-data-api.spec.ts index 854b67c3b..20cbdbaf3 100644 --- a/packages/core/storage-js/test/vector-data-api.spec.ts +++ b/packages/core/storage-js/test/vector-data-api.spec.ts @@ -24,7 +24,7 @@ describe('VectorDataApi Integration Tests', () => { testBucket = generateTestName('test-bucket') testIndex = generateTestName('test-index') - await client.createVectorBucket(testBucket) + await client.createBucket(testBucket) const bucket = client.from(testBucket) await bucket.createIndex({ indexName: testIndex,