diff --git a/.changeset/dark-items-dig.md b/.changeset/dark-items-dig.md new file mode 100644 index 000000000..d11921c5e --- /dev/null +++ b/.changeset/dark-items-dig.md @@ -0,0 +1,5 @@ +--- +"@tanstack/powersync-db-collection": minor +--- + +Initial Release diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md new file mode 100644 index 000000000..0274a838c --- /dev/null +++ b/docs/collections/powersync-collection.md @@ -0,0 +1,202 @@ +--- +title: PowerSync Collection +--- + +# PowerSync Collection + +PowerSync collections provide seamless integration between TanStack DB and [PowerSync](https://powersync.com), enabling automatic synchronization between your in-memory TanStack DB collections and PowerSync's SQLite database. This gives you offline-ready persistence, real-time sync capabilities, and powerful conflict resolution. + +## Overview + +The `@tanstack/powersync-db-collection` package allows you to create collections that: + +- Automatically mirror the state of an underlying PowerSync SQLite database +- Reactively update when PowerSync records change +- Support optimistic mutations with rollback on error +- Provide persistence handlers to keep PowerSync in sync with TanStack DB transactions +- Use PowerSync's efficient SQLite-based storage engine +- Work with PowerSync's real-time sync features for offline-first scenarios +- Leverage PowerSync's built-in conflict resolution and data consistency guarantees +- Enable real-time synchronization with PostgreSQL, MongoDB and MySQL backends + +## 1. Installation + +Install the PowerSync collection package along with your preferred framework integration. +PowerSync currently works with Web, React Native and Node.js. The examples below use the Web SDK. +See the PowerSync quickstart [docs](https://docs.powersync.com/installation/quickstart-guide) for more details. + +```bash +npm install @tanstack/powersync-db-collection @powersync/web @journeyapps/wa-sqlite +``` + +### 2. Create a PowerSync Database and Schema + +```ts +import { Schema, Table, column } from "@powersync/web" + +// Define your schema +const APP_SCHEMA = new Schema({ + documents: new Table({ + name: column.text, + content: column.text, + created_at: column.text, + updated_at: column.text, + }), +}) + +type Document = (typeof APP_SCHEMA)["types"]["documents"] + +// Initialize PowerSync database +const db = new PowerSyncDatabase({ + database: { + dbFilename: "app.sqlite", + }, + schema: APP_SCHEMA, +}) +``` + +### 3. (optional) Configure Sync with a Backend + +```ts +import { + AbstractPowerSyncDatabase, + PowerSyncBackendConnector, + PowerSyncCredentials, +} from "@powersync/web" + +// TODO implement your logic here +class Connector implements PowerSyncBackendConnector { + fetchCredentials: () => Promise + + /** Upload local changes to the app backend. + * + * Use {@link AbstractPowerSyncDatabase.getCrudBatch} to get a batch of changes to upload. + * + * Any thrown errors will result in a retry after the configured wait period (default: 5 seconds). + */ + uploadData: (database: AbstractPowerSyncDatabase) => Promise +} + +// Configure the client to connect to a PowerSync service and your backend +db.connect(new Connector()) +``` + +### 4. Create a TanStack DB Collection + +There are two ways to create a collection: using type inference or using schema validation. + +#### Option 1: Using Type Inference + +```ts +import { createCollection } from "@tanstack/react-db" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: "documents", + }) +) +``` + +#### Option 2: Using Schema Validation + +```ts +import { createCollection } from "@tanstack/react-db" +import { + powerSyncCollectionOptions, + convertPowerSyncSchemaToSpecs, +} from "@tanstack/powersync-db-collection" + +// Convert PowerSync schema to TanStack DB schema +const schemas = convertPowerSyncSchemaToSpecs(APP_SCHEMA) + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: "documents", + schema: schemas.documents, // Use schema for runtime type validation + }) +) +``` + +With schema validation, the collection will validate all inputs at runtime to ensure they match the PowerSync schema types. This provides an extra layer of type safety beyond TypeScript's compile-time checks. + +## Features + +### Offline-First + +PowerSync collections are offline-first by default. All data is stored locally in a SQLite database, allowing your app to work without an internet connection. Changes are automatically synced when connectivity is restored. + +### Real-Time Sync + +When connected to a PowerSync backend, changes are automatically synchronized in real-time across all connected clients. The sync process handles: + +- Bi-directional sync with the server +- Conflict resolution +- Queue management for offline changes +- Automatic retries on connection loss + +### Optimistic Updates + +Updates to the collection are applied optimistically to the local state first, then synchronized with PowerSync and the backend. If an error occurs during sync, the changes are automatically rolled back. + +## Configuration Options + +The `powerSyncCollectionOptions` function accepts the following options: + +```ts +interface PowerSyncCollectionConfig { + database: PowerSyncDatabase // PowerSync database instance + tableName: string // Name of the table in PowerSync + schema?: Schema // Optional schema for validation +} +``` + +## Advanced Transactions + +When you need more control over transaction handling, such as batching multiple operations or handling complex transaction scenarios, you can use PowerSync's transaction system directly with TanStack DB transactions. + +```ts +import { createTransaction } from "@tanstack/react-db" +import { PowerSyncTransactor } from "@tanstack/powersync-db-collection" + +// Create a transaction that won't auto-commit +const batchTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + // Use PowerSyncTransactor to apply the transaction to PowerSync + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, +}) + +// Perform multiple operations in the transaction +batchTx.mutate(() => { + // Add multiple documents in a single transaction + for (let i = 0; i < 5; i++) { + documentsCollection.insert({ + id: crypto.randomUUID(), + name: `Document ${i}`, + content: `Content ${i}`, + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + }) + } +}) + +// Commit the transaction +await batchTx.commit() + +// Wait for the changes to be persisted +await batchTx.isPersisted.promise +``` + +This approach allows you to: + +- Batch multiple operations into a single transaction +- Control when the transaction is committed +- Ensure all operations are atomic +- Wait for persistence confirmation +- Handle complex transaction scenarios diff --git a/packages/powersync-db-collection/CHANGELOG.md b/packages/powersync-db-collection/CHANGELOG.md new file mode 100644 index 000000000..1c804e7e2 --- /dev/null +++ b/packages/powersync-db-collection/CHANGELOG.md @@ -0,0 +1 @@ +# @tanstack/powersync-db-collection diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json new file mode 100644 index 000000000..9e1e0213e --- /dev/null +++ b/packages/powersync-db-collection/package.json @@ -0,0 +1,71 @@ +{ + "name": "@tanstack/powersync-db-collection", + "description": "PowerSync collection for TanStack DB", + "version": "0.0.0", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@tanstack/db": "workspace:*", + "@tanstack/store": "^0.7.7", + "debug": "^4.4.3", + "p-defer": "^4.0.1" + }, + "peerDependencies": { + "@powersync/common": "^1.39.0" + }, + "devDependencies": { + "@powersync/common": "0.0.0-dev-20251003085035", + "@powersync/node": "0.0.0-dev-20251003085035", + "@types/debug": "^4.1.12", + "@vitest/coverage-istanbul": "^3.2.4" + }, + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.cts", + "default": "./dist/cjs/index.cjs" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist", + "src" + ], + "main": "dist/cjs/index.cjs", + "module": "dist/esm/index.js", + "packageManager": "pnpm@10.17.0", + "author": "JOURNEYAPPS", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "https://github.com/TanStack/db.git", + "directory": "packages/powersync-db-collection" + }, + "homepage": "https://tanstack.com/db", + "keywords": [ + "powersync", + "realtime", + "local-first", + "sync-engine", + "sync", + "replication", + "opfs", + "indexeddb", + "localstorage", + "optimistic", + "typescript" + ], + "scripts": { + "build": "vite build", + "dev": "vite build --watch", + "lint": "eslint . --fix", + "test": "npx vitest --run" + }, + "sideEffects": false, + "type": "module", + "types": "dist/esm/index.d.ts" +} diff --git a/packages/powersync-db-collection/src/PendingOperationStore.ts b/packages/powersync-db-collection/src/PendingOperationStore.ts new file mode 100644 index 000000000..c804067b3 --- /dev/null +++ b/packages/powersync-db-collection/src/PendingOperationStore.ts @@ -0,0 +1,54 @@ +import pDefer from "p-defer" +import type { DiffTriggerOperation } from "@powersync/common" +import type { DeferredPromise } from "p-defer" + +export type PendingOperation = { + tableName: string + operation: DiffTriggerOperation + id: string + timestamp: string +} + +/** + * Optimistic mutations have their optimistic state discarded once transactions have + * been applied. + * We need to ensure that an applied transaction has been observed by the sync diff trigger + * before resoling the transaction application call. + * This store allows registering a wait for a pending operation to have been observed. + */ +export class PendingOperationStore { + private pendingOperations = new Map>() + + /** + * Globally accessible PendingOperationStore + */ + static GLOBAL = new PendingOperationStore() + + /** + * @returns A promise which will resolve once the specified operation has been seen. + */ + waitFor(operation: PendingOperation): Promise { + const managedPromise = pDefer() + this.pendingOperations.set(operation, managedPromise) + return managedPromise.promise + } + + /** + * Marks a set of operations as seen. This will resolve any pending promises. + */ + resolvePendingFor(operations: Array) { + for (const operation of operations) { + for (const [pendingOp, deferred] of this.pendingOperations.entries()) { + if ( + pendingOp.tableName == operation.tableName && + pendingOp.operation == operation.operation && + pendingOp.id == operation.id && + pendingOp.timestamp == operation.timestamp + ) { + deferred.resolve() + this.pendingOperations.delete(pendingOp) + } + } + } + } +} diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts new file mode 100644 index 000000000..1c8b7445a --- /dev/null +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -0,0 +1,268 @@ +import { sanitizeSQL } from "@powersync/common" +import DebugModule from "debug" +import { PendingOperationStore } from "./PendingOperationStore" +import { asPowerSyncRecord, mapOperationToPowerSync } from "./helpers" +import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" +import type { PendingMutation, Transaction } from "@tanstack/db" +import type { PendingOperation } from "./PendingOperationStore" +import type { EnhancedPowerSyncCollectionConfig } from "./definitions" + +const debug = DebugModule.debug(`ts/db:powersync`) + +export type TransactorOptions = { + database: AbstractPowerSyncDatabase +} + +/** + * Applies mutations to the PowerSync database. This method is called automatically by the collection's + * insert, update, and delete operations. You typically don't need to call this directly unless you + * have special transaction requirements. + * + * @example + * ```typescript + * // Create a collection + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * + * const addTx = createTransaction({ + * autoCommit: false, + * mutationFn: async ({ transaction }) => { + * await new PowerSyncTransactor({database: db}).applyTransaction(transaction) + * }, + * }) + * + * addTx.mutate(() => { + * for (let i = 0; i < 5; i++) { + * collection.insert({ id: randomUUID(), name: `tx-${i}` }) + * } + * }) + * + * await addTx.commit() + * await addTx.isPersisted.promise + * ``` + * + * @param transaction - The transaction containing mutations to apply + * @returns A promise that resolves when the mutations have been persisted to PowerSync + */ +export class PowerSyncTransactor> { + database: AbstractPowerSyncDatabase + pendingOperationStore: PendingOperationStore + + constructor(options: TransactorOptions) { + this.database = options.database + this.pendingOperationStore = PendingOperationStore.GLOBAL + } + + /** + * Persists a {@link Transaction} to PowerSync's SQLite DB. + */ + async applyTransaction(transaction: Transaction) { + const { mutations } = transaction + + if (mutations.length == 0) { + return + } + /** + * The transaction might contain ops for different collections. + * We can do some optimizations for single collection transactions. + */ + const mutationsCollectionIds = mutations.map( + (mutation) => mutation.collection.id + ) + const collectionIds = Array.from(new Set(mutationsCollectionIds)) + const lastCollectionMutationIndexes = new Map() + const allCollections = collectionIds + .map((id) => mutations.find((mutation) => mutation.collection.id == id)!) + .map((mutation) => mutation.collection) + for (const collectionId of collectionIds) { + lastCollectionMutationIndexes.set( + collectionId, + mutationsCollectionIds.lastIndexOf(collectionId) + ) + } + + // Check all the observers are ready before taking a lock + await Promise.all( + allCollections.map(async (collection) => { + if (collection.isReady()) { + return + } + await new Promise((resolve) => collection.onFirstReady(resolve)) + }) + ) + + // Persist to PowerSync + const { whenComplete } = await this.database.writeTransaction( + async (tx) => { + const pendingOperations: Array = [] + + for (const [index, mutation] of mutations.entries()) { + /** + * Each collection processes events independently. We need to make sure the + * last operation for each collection has been seen. + */ + const shouldWait = + index == lastCollectionMutationIndexes.get(mutation.collection.id) + switch (mutation.type) { + case `insert`: + pendingOperations.push( + await this.handleInsert(mutation, tx, shouldWait) + ) + break + case `update`: + pendingOperations.push( + await this.handleUpdate(mutation, tx, shouldWait) + ) + break + case `delete`: + pendingOperations.push( + await this.handleDelete(mutation, tx, shouldWait) + ) + break + } + } + + /** + * Return a promise from the writeTransaction, without awaiting it. + * This promise will resolve once the entire transaction has been + * observed via the diff triggers. + * We return without awaiting in order to free the writeLock. + */ + return { + whenComplete: Promise.all( + pendingOperations + .filter((op) => !!op) + .map((op) => this.pendingOperationStore.waitFor(op)) + ), + } + } + ) + + // Wait for the change to be observed via the diff trigger + await whenComplete + } + + protected async handleInsert( + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean = false + ): Promise { + debug(`insert`, mutation) + + return this.handleOperationWithCompletion( + mutation, + context, + waitForCompletion, + async (tableName, mutation) => { + const keys = Object.keys(mutation.modified).map( + (key) => sanitizeSQL`${key}` + ) + + await context.execute( + ` + INSERT into ${tableName} + (${keys.join(`, `)}) + VALUES + (${keys.map((_) => `?`).join(`, `)}) + `, + Object.values(mutation.modified) + ) + } + ) + } + + protected async handleUpdate( + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean = false + ): Promise { + debug(`update`, mutation) + + return this.handleOperationWithCompletion( + mutation, + context, + waitForCompletion, + async (tableName, mutation) => { + const keys = Object.keys(mutation.modified).map( + (key) => sanitizeSQL`${key}` + ) + await context.execute( + ` + UPDATE ${tableName} + SET ${keys.map((key) => `${key} = ?`).join(`, `)} + WHERE id = ? + `, + [ + ...Object.values(mutation.modified), + asPowerSyncRecord(mutation.modified).id, + ] + ) + } + ) + } + + protected async handleDelete( + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean = false + ): Promise { + debug(`update`, mutation) + + return this.handleOperationWithCompletion( + mutation, + context, + waitForCompletion, + async (tableName, mutation) => { + await context.execute( + ` + DELETE FROM ${tableName} WHERE id = ? + `, + [asPowerSyncRecord(mutation.original).id] + ) + } + ) + } + + /** + * Helper function which wraps a persistence operation by: + * - Fetching the mutation's collection's SQLite table details + * - Executing the mutation + * - Returning the last pending diff op if required + */ + protected async handleOperationWithCompletion( + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean, + handler: (tableName: string, mutation: PendingMutation) => Promise + ): Promise { + const { tableName, trackedTableName } = ( + mutation.collection.config as EnhancedPowerSyncCollectionConfig + ).utils.getMeta() + + if (!tableName) { + throw new Error(`Could not get tableName from mutation's collection config. + The provided mutation might not have originated from PowerSync.`) + } + + await handler(sanitizeSQL`${tableName}`, mutation) + + if (!waitForCompletion) { + return null + } + + // Need to get the operation in order to wait for it + const diffOperation = await context.get<{ id: string; timestamp: string }>( + sanitizeSQL`SELECT id, timestamp FROM ${trackedTableName} ORDER BY timestamp DESC LIMIT 1` + ) + return { + tableName, + id: diffOperation.id, + operation: mapOperationToPowerSync(mutation.type), + timestamp: diffOperation.timestamp, + } + } +} diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts new file mode 100644 index 000000000..69f3b243f --- /dev/null +++ b/packages/powersync-db-collection/src/definitions.ts @@ -0,0 +1,91 @@ +import type { AbstractPowerSyncDatabase } from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" + +/** + * Configuration interface for PowerSync collection options + * @template T - The type of items in the collection + * @template TSchema - The schema type for validation + */ +/** + * Configuration options for creating a PowerSync collection. + * + * @example + * ```typescript + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * type Document = (typeof APP_SCHEMA)["types"]["documents"] + * + * const db = new PowerSyncDatabase({ + * database: { + * dbFilename: "test.sqlite", + * }, + * schema: APP_SCHEMA, + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * ``` + */ +export type PowerSyncCollectionConfig< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +> = Omit< + BaseCollectionConfig, + `onInsert` | `onUpdate` | `onDelete` | `getKey` +> & { + /** The name of the table in PowerSync database */ + tableName: string + /** The PowerSync database instance */ + database: AbstractPowerSyncDatabase + /** + * The maximum number of documents to read from the SQLite table + * in a single batch during the initial sync between PowerSync and the + * in-memory TanStack DB collection. + * + * @remarks + * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified. + * - Larger values reduce the number of round trips to the storage + * engine but increase memory usage per batch. + * - Smaller values may lower memory usage and allow earlier + * streaming of initial results, at the cost of more query calls. + */ + syncBatchSize?: number +} + +export type PowerSyncCollectionMeta = { + /** + * The SQLite table representing the collection. + */ + tableName: string + /** + * The internal table used to track diff for the collection. + */ + trackedTableName: string +} + +export type EnhancedPowerSyncCollectionConfig< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +> = CollectionConfig & { + id?: string + utils: PowerSyncCollectionUtils + schema?: TSchema +} + +export type PowerSyncCollectionUtils = { + getMeta: () => PowerSyncCollectionMeta +} + +/** + * Default value for {@link PowerSyncCollectionConfig#syncBatchSize} + */ +export const DEFAULT_BATCH_SIZE = 1000 diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts new file mode 100644 index 000000000..13ace1fb6 --- /dev/null +++ b/packages/powersync-db-collection/src/helpers.ts @@ -0,0 +1,46 @@ +import { DiffTriggerOperation } from "@powersync/common" + +/** + * All PowerSync table records have a uuid `id` column. + */ +export type PowerSyncRecord = { + id: string + [key: string]: unknown +} + +export function asPowerSyncRecord(record: any): PowerSyncRecord { + if (typeof record.id !== `string`) { + throw new Error(`Record must have a string id field`) + } + return record as PowerSyncRecord +} + +/** + * Maps {@link DiffTriggerOperation} to TanstackDB operations + */ +export function mapOperation(operation: DiffTriggerOperation) { + switch (operation) { + case DiffTriggerOperation.INSERT: + return `insert` + case DiffTriggerOperation.UPDATE: + return `update` + case DiffTriggerOperation.DELETE: + return `delete` + } +} + +/** + * Maps TanstackDB operations to {@link DiffTriggerOperation} + */ +export function mapOperationToPowerSync(operation: string) { + switch (operation) { + case `insert`: + return DiffTriggerOperation.INSERT + case `update`: + return DiffTriggerOperation.UPDATE + case `delete`: + return DiffTriggerOperation.DELETE + default: + throw new Error(`Unknown operation ${operation} received`) + } +} diff --git a/packages/powersync-db-collection/src/index.ts b/packages/powersync-db-collection/src/index.ts new file mode 100644 index 000000000..152f09076 --- /dev/null +++ b/packages/powersync-db-collection/src/index.ts @@ -0,0 +1,4 @@ +export * from "./definitions" +export * from "./powersync" +export * from "./PowerSyncTransactor" +export * from "./schema" diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts new file mode 100644 index 000000000..ed24ba369 --- /dev/null +++ b/packages/powersync-db-collection/src/powersync.ts @@ -0,0 +1,299 @@ +import { DiffTriggerOperation, sanitizeSQL } from "@powersync/common" +import { DEFAULT_BATCH_SIZE } from "./definitions" +import { asPowerSyncRecord, mapOperation } from "./helpers" +import { PendingOperationStore } from "./PendingOperationStore" +import { PowerSyncTransactor } from "./PowerSyncTransactor" +import type { TriggerDiffRecord } from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { + CollectionConfig, + InferSchemaOutput, + SyncConfig, +} from "@tanstack/db" +import type { + EnhancedPowerSyncCollectionConfig, + PowerSyncCollectionConfig, + PowerSyncCollectionUtils, +} from "./definitions" +import type { PendingOperation } from "./PendingOperationStore" + +/** + * Creates PowerSync collection options for use with a standard Collection + * + * @template TExplicit - The explicit type of items in the collection (highest priority) + * @template TSchema - The schema type for validation and type inference (second priority) + * @param config - Configuration options for the PowerSync collection + * @returns Collection options with utilities + */ + +// Overload for when schema is provided +/** + * Creates a PowerSync collection configuration with schema validation. + * + * @example + * ```typescript + * // With schema validation + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * schema: APP_SCHEMA, + * }) + * ) + * ``` + */ +export function powerSyncCollectionOptions( + config: PowerSyncCollectionConfig, T> +): CollectionConfig, string, T> & { + schema: T + utils: PowerSyncCollectionUtils +} + +/** + * Creates a PowerSync collection configuration without schema validation. + * + * @example + * ```typescript + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * type Document = (typeof APP_SCHEMA)["types"]["documents"] + * + * const db = new PowerSyncDatabase({ + * database: { + * dbFilename: "test.sqlite", + * }, + * schema: APP_SCHEMA, + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * ``` + */ +export function powerSyncCollectionOptions( + config: PowerSyncCollectionConfig & { + schema?: never + } +): CollectionConfig & { + schema?: never + utils: PowerSyncCollectionUtils +} + +/** + * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations. + */ +export function powerSyncCollectionOptions< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +>( + config: PowerSyncCollectionConfig +): EnhancedPowerSyncCollectionConfig { + const { + database, + tableName, + syncBatchSize = DEFAULT_BATCH_SIZE, + ...restConfig + } = config + + /** + * The onInsert, onUpdate, onDelete handlers should only return + * after we have written the changes to Tanstack DB. + * We currently only write to Tanstack DB from a diff trigger. + * We wait for the diff trigger to observe the change, + * and only then return from the on[X] handlers. + * This ensures that when the transaction is reported as + * complete to the caller, the in-memory state is already + * consistent with the database. + */ + const pendingOperationStore = PendingOperationStore.GLOBAL + // Keep the tracked table unique in case of multiple tabs. + const trackedTableName = `__${tableName}_tracking_${Math.floor( + Math.random() * 0xffffffff + ) + .toString(16) + .padStart(8, `0`)}` + + const transactor = new PowerSyncTransactor({ + database, + }) + + /** + * "sync" + * Notice that this describes the Sync between the local SQLite table + * and the in-memory tanstack-db collection. + */ + const sync: SyncConfig = { + sync: (params) => { + const { begin, write, commit, markReady } = params + const abortController = new AbortController() + + // The sync function needs to be synchronous + async function start() { + database.logger.info( + `Sync is starting for ${tableName} into ${trackedTableName}` + ) + database.onChangeWithCallback( + { + onChange: async () => { + await database + .writeTransaction(async (context) => { + begin() + const operations = await context.getAll( + `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` + ) + const pendingOperations: Array = [] + + for (const op of operations) { + const { id, operation, timestamp, value } = op + const parsedValue = { + id, + ...JSON.parse(value), + } + const parsedPreviousValue = + op.operation == DiffTriggerOperation.UPDATE + ? { id, ...JSON.parse(op.previous_value) } + : null + write({ + type: mapOperation(operation), + value: parsedValue, + previousValue: parsedPreviousValue, + }) + pendingOperations.push({ + id, + operation, + timestamp, + tableName, + }) + } + + // clear the current operations + await context.execute(`DELETE FROM ${trackedTableName}`) + + commit() + pendingOperationStore.resolvePendingFor(pendingOperations) + }) + .catch((error) => { + database.logger.error( + `An error has been detected in the sync handler`, + error + ) + }) + }, + }, + { + signal: abortController.signal, + triggerImmediate: false, + tables: [trackedTableName], + } + ) + + const disposeTracking = await database.triggers.createDiffTrigger({ + source: tableName, + destination: trackedTableName, + when: { + [DiffTriggerOperation.INSERT]: `TRUE`, + [DiffTriggerOperation.UPDATE]: `TRUE`, + [DiffTriggerOperation.DELETE]: `TRUE`, + }, + hooks: { + beforeCreate: async (context) => { + let currentBatchCount = syncBatchSize + let cursor = 0 + while (currentBatchCount == syncBatchSize) { + begin() + const batchItems = await context.getAll( + sanitizeSQL`SELECT * FROM ${tableName} LIMIT ? OFFSET ?`, + [syncBatchSize, cursor] + ) + currentBatchCount = batchItems.length + cursor += currentBatchCount + for (const row of batchItems) { + write({ + type: `insert`, + value: row, + }) + } + commit() + } + markReady() + database.logger.info( + `Sync is ready for ${tableName} into ${trackedTableName}` + ) + }, + }, + }) + + // If the abort controller was aborted while processing the request above + if (abortController.signal.aborted) { + await disposeTracking() + } else { + abortController.signal.addEventListener( + `abort`, + () => { + disposeTracking() + }, + { once: true } + ) + } + } + + start().catch((error) => + database.logger.error( + `Could not start syncing process for ${tableName} into ${trackedTableName}`, + error + ) + ) + + return () => { + database.logger.info( + `Sync has been stopped for ${tableName} into ${trackedTableName}` + ) + abortController.abort() + } + }, + // Expose the getSyncMetadata function + getSyncMetadata: undefined, + } + + const getKey = (record: T) => asPowerSyncRecord(record).id + + const outputConfig: EnhancedPowerSyncCollectionConfig = { + ...restConfig, + getKey, + // Syncing should start immediately since we need to monitor the changes for mutations + startSync: true, + sync, + onInsert: async (params) => { + // The transaction here should only ever contain a single insert mutation + return await transactor.applyTransaction(params.transaction) + }, + onUpdate: async (params) => { + // The transaction here should only ever contain a single update mutation + return await transactor.applyTransaction(params.transaction) + }, + onDelete: async (params) => { + // The transaction here should only ever contain a single delete mutation + return await transactor.applyTransaction(params.transaction) + }, + utils: { + getMeta: () => ({ + tableName, + trackedTableName, + }), + }, + } + return outputConfig +} diff --git a/packages/powersync-db-collection/src/schema.ts b/packages/powersync-db-collection/src/schema.ts new file mode 100644 index 000000000..dc894bf58 --- /dev/null +++ b/packages/powersync-db-collection/src/schema.ts @@ -0,0 +1,197 @@ +import { ColumnType } from "@powersync/common" +import type { + ColumnsType, + ExtractColumnValueType, + Schema, + Table, +} from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" + +/** + * Utility type that extracts the typed structure of a table based on its column definitions. + * Maps each column to its corresponding TypeScript type using ExtractColumnValueType. + * + * @template Columns - The ColumnsType definition containing column configurations + * @example + * ```typescript + * const table = new Table({ + * name: column.text, + * age: column.integer + * }) + * type TableType = ExtractedTable + * // Results in: { name: string | null, age: number | null } + * ``` + */ +type ExtractedTable = { + [K in keyof Columns]: ExtractColumnValueType +} & { + id: string +} + +/** + * Converts a PowerSync Table instance to a StandardSchemaV1 schema. + * Creates a schema that validates the structure and types of table records + * according to the PowerSync table definition. + * + * @template Columns - The ColumnsType definition containing column configurations + * @param table - The PowerSync Table instance to convert + * @returns A StandardSchemaV1 compatible schema with proper type validation + * + * @example + * ```typescript + * const usersTable = new Table({ + * name: column.text, + * age: column.integer + * }) + * + * const schema = convertTableToSchema(usersTable) + * // Now you can use this schema with powerSyncCollectionOptions + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "users", + * schema: schema + * }) + * ) + * ``` + */ +export function convertTableToSchema( + table: Table +): StandardSchemaV1> { + // Create validate function that checks types according to column definitions + const validate = ( + value: unknown + ): + | StandardSchemaV1.SuccessResult> + | StandardSchemaV1.FailureResult => { + if (typeof value != `object` || value == null) { + return { + issues: [ + { + message: `Value must be an object`, + }, + ], + } + } + + const issues: Array = [] + + // Check id field + if (!(`id` in value) || typeof (value as any).id != `string`) { + issues.push({ + message: `id field must be a string`, + path: [`id`], + }) + } + + // Check each column + for (const column of table.columns) { + const val = (value as ExtractedTable)[column.name] + + if (val == null) { + continue + } + + switch (column.type) { + case ColumnType.TEXT: + if (typeof val != `string`) { + issues.push({ + message: `${column.name} must be a string or null`, + path: [column.name], + }) + } + break + case ColumnType.INTEGER: + case ColumnType.REAL: + if (typeof val != `number`) { + issues.push({ + message: `${column.name} must be a number or null`, + path: [column.name], + }) + } + break + } + } + + if (issues.length > 0) { + return { issues } + } + + return { value: { ...value } as ExtractedTable } + } + + return { + "~standard": { + version: 1, + vendor: `powersync`, + validate, + types: { + input: {} as ExtractedTable, + output: {} as ExtractedTable, + }, + }, + } +} + +/** + * Converts an entire PowerSync Schema (containing multiple tables) into a collection of StandardSchemaV1 schemas. + * Each table in the schema is converted to its own StandardSchemaV1 schema while preserving all type information. + * + * @template Tables - A record type mapping table names to their Table definitions + * @param schema - The PowerSync Schema containing multiple table definitions + * @returns An object where each key is a table name and each value is that table's StandardSchemaV1 schema + * + * @example + * ```typescript + * const mySchema = new Schema({ + * users: new Table({ + * name: column.text, + * age: column.integer + * }), + * posts: new Table({ + * title: column.text, + * views: column.integer + * }) + * }) + * + * const standardizedSchemas = convertSchemaToSpecs(mySchema) + * // Result has type: + * // { + * // users: StandardSchemaV1<{ name: string | null, age: number | null }>, + * // posts: StandardSchemaV1<{ title: string | null, views: number | null }> + * // } + * + * // Can be used with collections: + * const usersCollection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "users", + * schema: standardizedSchemas.users + * }) + * ) + * ``` + */ +export function convertPowerSyncSchemaToSpecs< + Tables extends Record>, +>( + schema: Schema +): { + [TableName in keyof Tables]: StandardSchemaV1< + ExtractedTable + > +} { + // Create a map to store the standardized schemas + const standardizedSchemas = {} as { + [TableName in keyof Tables]: StandardSchemaV1< + ExtractedTable + > + } + + // Iterate through each table in the schema + schema.tables.forEach((table) => { + // Convert each table to a StandardSchemaV1 and store it in the result map + ;(standardizedSchemas as any)[table.name] = convertTableToSchema(table) + }) + + return standardizedSchemas +} diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts new file mode 100644 index 000000000..78dcca83b --- /dev/null +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -0,0 +1,567 @@ +import { randomUUID } from "node:crypto" +import { tmpdir } from "node:os" +import { + CrudEntry, + PowerSyncDatabase, + Schema, + Table, + column, +} from "@powersync/node" +import { + SchemaValidationError, + createCollection, + createTransaction, + eq, + liveQueryCollectionOptions, +} from "@tanstack/db" +import { describe, expect, it, onTestFinished, vi } from "vitest" +import { powerSyncCollectionOptions } from "../src" +import { PowerSyncTransactor } from "../src/PowerSyncTransactor" +import { convertPowerSyncSchemaToSpecs } from "../src/schema" +import type { AbstractPowerSyncDatabase } from "@powersync/node" + +const APP_SCHEMA = new Schema({ + users: new Table({ + name: column.text, + }), + documents: new Table({ + name: column.text, + }), +}) + +type Document = (typeof APP_SCHEMA)[`types`][`documents`] +type User = (typeof APP_SCHEMA)[`types`][`users`] + +describe(`PowerSync Integration`, () => { + async function createDatabase() { + const db = new PowerSyncDatabase({ + database: { + dbFilename: `test.sqlite`, + dbLocation: tmpdir(), + implementation: { type: `node:sqlite` }, + }, + schema: APP_SCHEMA, + }) + onTestFinished(async () => { + await db.disconnectAndClear() + await db.close() + }) + // Initial clear in case a test might have failed + await db.disconnectAndClear() + return db + } + + async function createTestData(db: AbstractPowerSyncDatabase) { + await db.execute(` + INSERT into documents (id, name) + VALUES + (uuid(), 'one'), + (uuid(), 'two'), + (uuid(), 'three') + `) + } + + describe(`schema`, () => { + it(`should accept a schema`, async () => { + const db = await createDatabase() + + // the collection should infer types and validate with the schema + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + schema: convertPowerSyncSchemaToSpecs(APP_SCHEMA).documents, + }) + ) + + collection.insert({ + id: randomUUID(), + name: `aname`, + }) + + collection.insert({ + id: randomUUID(), + name: null, + }) + + expect(collection.size).eq(2) + + // should validate inputs + try { + collection.insert({} as any) + console.log(`failed`) + } catch (ex) { + expect(ex instanceof SchemaValidationError).true + if (ex instanceof SchemaValidationError) { + expect(ex.message).contains(`id field must be a string`) + } + } + }) + }) + + describe(`sync`, () => { + it(`should initialize and fetch initial data`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + // Verify the collection state contains our items + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `two`, + `three`, + ]) + }) + + it(`should update when data syncs`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + // Verify the collection state contains our items + expect(collection.size).toBe(3) + + // Make an update, simulates a sync from another client + await db.execute(` + INSERT into documents (id, name) + VALUES + (uuid(), 'four') + `) + + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(4) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `two`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + + await db.execute(` + DELETE from documents + WHERE name = 'two' + `) + + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + + await db.execute(` + UPDATE documents + SET name = 'updated' + WHERE name = 'one' + `) + + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `updated`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + }) + + it(`should propagate collection mutations to PowerSync`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + // Verify the collection state contains our items + expect(collection.size).toBe(3) + + const id = randomUUID() + const tx = collection.insert({ + id, + name: `new`, + }) + + // The insert should optimistically update the collection + const newDoc = collection.get(id) + expect(newDoc).toBeDefined() + expect(newDoc!.name).toBe(`new`) + + await tx.isPersisted.promise + // The item should now be present in PowerSync + // We should also have patched it back in to Tanstack DB (removing the optimistic state) + + // Now do an update + await collection.update(id, (d) => (d.name = `updatedNew`)).isPersisted + .promise + + const updatedDoc = collection.get(id) + expect(updatedDoc).toBeDefined() + expect(updatedDoc!.name).toBe(`updatedNew`) + + await collection.delete(id).isPersisted.promise + + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + expect(crudEntries.length).toBe(6) + // We can only group transactions for similar operations + expect(crudEntries.map((e) => e.op)).toEqual([ + `PUT`, + `PUT`, + `PUT`, + `PUT`, + `PATCH`, + `DELETE`, + ]) + }) + + it(`should handle transactions`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + expect(collection.size).toBe(3) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, + }) + + addTx.mutate(() => { + for (let i = 0; i < 5; i++) { + collection.insert({ id: randomUUID(), name: `tx-${i}` }) + } + }) + + await addTx.commit() + await addTx.isPersisted.promise + + expect(collection.size).toBe(8) + + // fetch the ps_crud items + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + const lastTransactionId = + crudEntries[crudEntries.length - 1]?.transactionId + /** + * The last items, created in the same transaction, should be in the same + * PowerSync transaction. + */ + expect( + crudEntries + .reverse() + .slice(0, 5) + .every((crudEntry) => crudEntry.transactionId == lastTransactionId) + ).true + }) + + it(`should handle transactions with multiple collections`, async () => { + const db = await createDatabase() + await createTestData(db) + + const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => documentsCollection.cleanup()) + + const usersCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `users`, + }) + ) + onTestFinished(() => usersCollection.cleanup()) + + await documentsCollection.stateWhenReady() + await usersCollection.stateWhenReady() + + expect(documentsCollection.size).toBe(3) + expect(usersCollection.size).toBe(0) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, + }) + + addTx.mutate(() => { + for (let i = 0; i < 5; i++) { + documentsCollection.insert({ id: randomUUID(), name: `tx-${i}` }) + usersCollection.insert({ id: randomUUID(), name: `user` }) + } + }) + + await addTx.commit() + await addTx.isPersisted.promise + + expect(documentsCollection.size).toBe(8) + expect(usersCollection.size).toBe(5) + + // fetch the ps_crud items + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + const lastTransactionId = + crudEntries[crudEntries.length - 1]?.transactionId + /** + * The last items, created in the same transaction, should be in the same + * PowerSync transaction. + */ + expect( + crudEntries + .reverse() + .slice(0, 10) + .every((crudEntry) => crudEntry.transactionId == lastTransactionId) + ).true + }) + }) + + describe(`General use`, () => { + it(`should rollback transactions on error`, async () => { + const db = await createDatabase() + + // Create two collections for the same table + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, + }) + + expect(collection.size).eq(0) + const id = randomUUID() + // Attempt to insert invalid data + // We can only do this since we aren't using schema validation here + addTx.mutate(() => { + collection.insert({ + id, + name: new Error() as unknown as string, // This will cause a SQL error eventually + }) + }) + + // This should be present in the optimisic state, but should be reverted when attempting to persist + expect(collection.size).eq(1) + expect((collection.get(id)?.name as any) instanceof Error).true + + try { + await addTx.commit() + await addTx.isPersisted.promise + expect.fail(`Should have thrown an error`) + } catch (error) { + expect(error).toBeDefined() + // The collection should be in a clean state + expect(collection.size).toBe(0) + } + }) + + it(`should work with live queries`, async () => { + const db = await createDatabase() + + // Create two collections for the same table + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + const liveDocuments = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ document: collection }) + .where(({ document }) => eq(document.name, `book`)) + .select(({ document }) => ({ + id: document.id, + name: document.name, + })), + }) + ) + + expect(liveDocuments.size).eq(0) + + const bookNames = new Set() + + liveDocuments.subscribeChanges((changes) => { + changes + .map((change) => change.value.name) + .forEach((change) => bookNames.add(change!)) + }) + + await collection.insert({ + id: randomUUID(), + name: `notabook`, + }).isPersisted.promise + await collection.insert({ + id: randomUUID(), + name: `book`, + }).isPersisted.promise + + expect(collection.size).eq(2) + await vi.waitFor( + () => { + expect(Array.from(bookNames)).deep.equals([`book`]) + }, + { timeout: 1000 } + ) + }) + }) + + describe(`Multiple Clients`, async () => { + it(`should sync updates between multiple clients`, async () => { + const db = await createDatabase() + + // Create two collections for the same table + const collectionA = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collectionA.cleanup()) + await collectionA.stateWhenReady() + + const collectionB = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collectionB.cleanup()) + await collectionB.stateWhenReady() + + await createTestData(db) + + // Both collections should have the data present after insertion + await vi.waitFor( + () => { + expect(collectionA.size).eq(3) + expect(collectionB.size).eq(3) + }, + { timeout: 1000 } + ) + }) + }) + + describe(`Lifecycle`, async () => { + it(`should cleanup resources`, async () => { + const db = await createDatabase() + const collectionOptions = powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + + const meta = collectionOptions.utils.getMeta() + + const tableExists = async (): Promise => { + const result = await db.writeLock(async (tx) => { + return tx.get<{ count: number }>( + ` + SELECT COUNT(*) as count + FROM sqlite_temp_master + WHERE type='table' AND name = ? + `, + [meta.trackedTableName] + ) + }) + return result.count > 0 + } + + const collection = createCollection(collectionOptions) + await collection.stateWhenReady() + expect(await tableExists()).true + + await collection.cleanup() + + // It seems that even though `cleanup` is async, the sync disposer cannot be async + // We wait for the table to be deleted + await vi.waitFor( + async () => { + expect(await tableExists()).false + }, + { timeout: 1000 } + ) + }) + }) +}) diff --git a/packages/powersync-db-collection/tests/schema.test.ts b/packages/powersync-db-collection/tests/schema.test.ts new file mode 100644 index 000000000..62c562b76 --- /dev/null +++ b/packages/powersync-db-collection/tests/schema.test.ts @@ -0,0 +1,198 @@ +import { Schema, Table, column } from "@powersync/common" +import { describe, expect, it } from "vitest" +import { + convertPowerSyncSchemaToSpecs, + convertTableToSchema, +} from "../src/schema" +import type { StandardSchemaV1 } from "@standard-schema/spec" + +describe(`Schema Conversion`, () => { + describe(`convertTableToSchema`, () => { + it(`should convert a simple table with text and integer columns`, () => { + const table = new Table({ + name: column.text, + age: column.integer, + }) + + const schema = convertTableToSchema(table) + + // Test schema structure + expect(schema).toHaveProperty(`~standard`) + expect(schema[`~standard`].version).toBe(1) + expect(schema[`~standard`].vendor).toBe(`powersync`) + expect(schema[`~standard`].validate).toBeTypeOf(`function`) + + // Test validation with valid data + const validResult = schema[`~standard`].validate({ + id: `123`, + name: `John`, + age: 25, + }) as StandardSchemaV1.SuccessResult + + expect(validResult.issues).toBeUndefined() + expect(validResult.value).toEqual({ + id: `123`, + name: `John`, + age: 25, + }) + + // Test validation with invalid data + const invalidResult = schema[`~standard`].validate({ + id: `123`, + name: 123, // wrong type + age: `25`, // wrong type + }) as StandardSchemaV1.FailureResult + + expect(invalidResult.issues).toHaveLength(2) + expect(invalidResult.issues[0]?.message).toContain(`must be a string`) + expect(invalidResult.issues[1]?.message).toContain(`must be a number`) + }) + + it(`should handle null values correctly`, () => { + const table = new Table({ + name: column.text, + age: column.integer, + }) + + const schema = convertTableToSchema(table) + + // Test validation with null values + const result = schema[`~standard`].validate({ + id: `123`, + name: null, + age: null, + }) as StandardSchemaV1.SuccessResult + + expect(result.issues).toBeUndefined() + expect(result.value).toEqual({ + id: `123`, + name: null, + age: null, + }) + }) + + it(`should require id field`, () => { + const table = new Table({ + name: column.text, + }) + + const schema = convertTableToSchema(table) + + // Test validation without id + const result = schema[`~standard`].validate({ + name: `John`, + }) as StandardSchemaV1.FailureResult + + expect(result.issues).toHaveLength(1) + expect(result.issues[0]?.message).toContain(`id field must be a string`) + }) + + it(`should handle all column types`, () => { + const table = new Table({ + text_col: column.text, + int_col: column.integer, + real_col: column.real, + }) + + const schema = convertTableToSchema(table) + + // Test validation with all types + const result = schema[`~standard`].validate({ + id: `123`, + text_col: `text`, + int_col: 42, + real_col: 3.14, + }) as StandardSchemaV1.SuccessResult + + expect(result.issues).toBeUndefined() + expect(result.value).toEqual({ + id: `123`, + text_col: `text`, + int_col: 42, + real_col: 3.14, + }) + }) + }) + + describe(`convertPowerSyncSchemaToSpecs`, () => { + it(`should convert multiple tables in a schema`, () => { + const schema = new Schema({ + users: new Table({ + name: column.text, + age: column.integer, + }), + posts: new Table({ + title: column.text, + views: column.integer, + }), + }) + + const result = convertPowerSyncSchemaToSpecs(schema) + + // Test structure + expect(result).toHaveProperty(`users`) + expect(result).toHaveProperty(`posts`) + + // Test users table schema + const userValidResult = result.users[`~standard`].validate({ + id: `123`, + name: `John`, + age: 25, + }) as StandardSchemaV1.SuccessResult + + expect(userValidResult.issues).toBeUndefined() + expect(userValidResult.value).toEqual({ + id: `123`, + name: `John`, + age: 25, + }) + + // Test posts table schema + const postValidResult = result.posts[`~standard`].validate({ + id: `456`, + title: `Hello`, + views: 100, + }) as StandardSchemaV1.SuccessResult + + expect(postValidResult.issues).toBeUndefined() + expect(postValidResult.value).toEqual({ + id: `456`, + title: `Hello`, + views: 100, + }) + }) + + it(`should handle empty schema`, () => { + const schema = new Schema({}) + const result = convertPowerSyncSchemaToSpecs(schema) + expect(result).toEqual({}) + }) + + it(`should validate each table independently`, () => { + const schema = new Schema({ + users: new Table({ + name: column.text, + }), + posts: new Table({ + views: column.integer, + }), + }) + + const result = convertPowerSyncSchemaToSpecs(schema) + + // Test that invalid data in one table doesn't affect the other + const userInvalidResult = result.users[`~standard`].validate({ + id: `123`, + name: 42, // wrong type + }) as StandardSchemaV1.FailureResult + + const postValidResult = result.posts[`~standard`].validate({ + id: `456`, + views: 100, + }) as StandardSchemaV1.SuccessResult + + expect(userInvalidResult.issues).toHaveLength(1) + expect(postValidResult.issues).toBeUndefined() + }) + }) +}) diff --git a/packages/powersync-db-collection/tsconfig.docs.json b/packages/powersync-db-collection/tsconfig.docs.json new file mode 100644 index 000000000..5a73feb02 --- /dev/null +++ b/packages/powersync-db-collection/tsconfig.docs.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "paths": { + "@tanstack/db": ["../db/src"] + } + }, + "include": ["src"] +} diff --git a/packages/powersync-db-collection/tsconfig.json b/packages/powersync-db-collection/tsconfig.json new file mode 100644 index 000000000..7e586bab3 --- /dev/null +++ b/packages/powersync-db-collection/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "ESNext", + "moduleResolution": "Bundler", + "declaration": true, + "outDir": "dist", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "jsx": "react", + "paths": { + "@tanstack/store": ["../store/src"] + } + }, + "include": ["src", "tests", "vite.config.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/powersync-db-collection/vite.config.ts b/packages/powersync-db-collection/vite.config.ts new file mode 100644 index 000000000..af20d0da3 --- /dev/null +++ b/packages/powersync-db-collection/vite.config.ts @@ -0,0 +1,21 @@ +import { tanstackViteConfig } from "@tanstack/config/vite" +import { defineConfig, mergeConfig } from "vitest/config" +import packageJson from "./package.json" + +const config = defineConfig({ + test: { + name: packageJson.name, + dir: `./tests`, + environment: `node`, + coverage: { enabled: true, provider: `istanbul`, include: [`src/**/*`] }, + typecheck: { enabled: true }, + }, +}) + +export default mergeConfig( + config, + tanstackViteConfig({ + entry: `./src/index.ts`, + srcDir: `./src`, + }) +) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ee0d19ba2..c0815337d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -478,7 +478,7 @@ importers: version: 0.44.5(@types/pg@8.15.5)(kysely@0.28.7)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.44.5(@types/pg@8.15.5)(kysely@0.28.7)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76) + version: 0.8.3(drizzle-orm@0.44.5(@types/pg@8.15.5)(kysely@0.28.7)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11) express: specifier: ^4.21.2 version: 4.21.2 @@ -650,6 +650,37 @@ importers: specifier: ^3.2.4 version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.6.1)(jiti@2.6.0)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) + packages/powersync-db-collection: + dependencies: + '@standard-schema/spec': + specifier: ^1.0.0 + version: 1.0.0 + '@tanstack/db': + specifier: workspace:* + version: link:../db + '@tanstack/store': + specifier: ^0.7.7 + version: 0.7.7 + debug: + specifier: ^4.4.3 + version: 4.4.3 + p-defer: + specifier: ^4.0.1 + version: 4.0.1 + devDependencies: + '@powersync/common': + specifier: 0.0.0-dev-20251003085035 + version: 0.0.0-dev-20251003085035 + '@powersync/node': + specifier: 0.0.0-dev-20251003085035 + version: 0.0.0-dev-20251003085035(@powersync/common@0.0.0-dev-20251003085035) + '@types/debug': + specifier: ^4.1.12 + version: 4.1.12 + '@vitest/coverage-istanbul': + specifier: ^3.2.4 + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.6.1)(jiti@2.6.0)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) + packages/query-db-collection: dependencies: '@standard-schema/spec': @@ -757,7 +788,7 @@ importers: version: 1.9.9 vite-plugin-solid: specifier: ^2.11.8 - version: 2.11.8(@testing-library/jest-dom@6.9.0)(solid-js@1.9.9)(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 2.11.8(@testing-library/jest-dom@6.9.0)(solid-js@1.9.9)(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) vitest: specifier: ^3.2.4 version: 3.2.4(@types/debug@4.1.12)(@types/node@24.6.1)(jiti@2.6.0)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1) @@ -773,7 +804,7 @@ importers: version: 2.5.4(svelte@5.39.7)(typescript@5.9.3) '@sveltejs/vite-plugin-svelte': specifier: ^6.2.1 - version: 6.2.1(svelte@5.39.7)(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 6.2.1(svelte@5.39.7)(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) '@vitest/coverage-istanbul': specifier: ^3.2.4 version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.6.1)(jiti@2.6.0)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) @@ -826,7 +857,7 @@ importers: version: 1.0.10 '@vitejs/plugin-vue': specifier: ^5.2.4 - version: 5.2.4(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1))(vue@3.5.22(typescript@5.9.3)) + version: 5.2.4(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1))(vue@3.5.22(typescript@5.9.3)) '@vitest/coverage-istanbul': specifier: ^3.2.4 version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.6.1)(jiti@2.6.0)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) @@ -2826,6 +2857,18 @@ packages: resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + '@powersync/common@0.0.0-dev-20251003085035': + resolution: {integrity: sha512-k69aY8onIM4eXvj/obFkCadGmKgqMKSgk90Sih8lKF9BrGPGpQU/MtB6673LmhavURQnaS340FpBsL/4p/gk0g==} + + '@powersync/node@0.0.0-dev-20251003085035': + resolution: {integrity: sha512-PbmpmiaHxWNDLQw4bNBakezQwPXCxqeef8E6uzxUH+baYmsIfXx5OY+lI6XtdQ+PbLjY4hFtxHnDb2qpSzJZPg==} + peerDependencies: + '@powersync/common': 0.0.0-dev-20251003085035 + better-sqlite3: 12.x + peerDependenciesMeta: + better-sqlite3: + optional: true + '@protobufjs/aspromise@1.1.2': resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} @@ -4190,6 +4233,9 @@ packages: resolution: {integrity: sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==} engines: {node: '>= 0.4'} + async-lock@1.4.1: + resolution: {integrity: sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==} + autoprefixer@10.4.21: resolution: {integrity: sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==} engines: {node: ^10 || ^12 || >=14} @@ -4448,6 +4494,9 @@ packages: colorette@2.0.20: resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} + comlink@4.4.2: + resolution: {integrity: sha512-OxGdvBmJuNKSCMO4NTl1L47VRp6xn2wG4F/2hYzB6tiCb709otOxtEYCSvK80PtjODfXXZu8ds+Nw5kVCjqd2g==} + commander@11.1.0: resolution: {integrity: sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==} engines: {node: '>=16'} @@ -5944,6 +5993,9 @@ packages: js-base64@3.7.8: resolution: {integrity: sha512-hNngCeKxIUQiEUN3GPJOkz4wF/YvdUdbNL9hsBcMQTkKzboD7T/q3OYOuuPZLUE6dBxSGpwhk5mwuDud7JVAow==} + js-logger@1.6.1: + resolution: {integrity: sha512-yTgMCPXVjhmg28CuUH8CKjU+cIKL/G+zTu4Fn4lQxs8mRFH/03QTNvEFngcxfg/gRDiQAOoyCKmMTOm9ayOzXA==} + js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} @@ -6696,6 +6748,10 @@ packages: oxc-resolver@11.8.4: resolution: {integrity: sha512-qpimS3tHHEf+kgESMAme+q+rj7aCzMya00u9YdKOKyX2o7q4lozjPo6d7ZTTi979KHEcVOPWdNTueAKdeNq72w==} + p-defer@4.0.1: + resolution: {integrity: sha512-Mr5KC5efvAK5VUptYEIopP1bakB85k2IWXaRC0rsh1uwn1L6M0LVml8OIQ4Gudg4oyZakf7FmeRLkMMtZW1i5A==} + engines: {node: '>=12'} + p-filter@2.1.0: resolution: {integrity: sha512-ZBxxZ5sL2HghephhpGAQdoskxplTwr7ICaehZwLIlfL6acuVgZPm8yBNuRAFBGEqtD/hmUeq9eqLg2ys9Xr/yw==} engines: {node: '>=8'} @@ -10586,6 +10642,18 @@ snapshots: '@pkgr/core@0.2.9': {} + '@powersync/common@0.0.0-dev-20251003085035': + dependencies: + js-logger: 1.6.1 + + '@powersync/node@0.0.0-dev-20251003085035(@powersync/common@0.0.0-dev-20251003085035)': + dependencies: + '@powersync/common': 0.0.0-dev-20251003085035 + async-lock: 1.4.1 + bson: 6.10.4 + comlink: 4.4.2 + undici: 7.16.0 + '@protobufjs/aspromise@1.1.2': {} '@protobufjs/base64@1.1.2': {} @@ -10993,24 +11061,24 @@ snapshots: transitivePeerDependencies: - typescript - '@sveltejs/vite-plugin-svelte-inspector@5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.39.7)(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)))(svelte@5.39.7)(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1))': + '@sveltejs/vite-plugin-svelte-inspector@5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.39.7)(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)))(svelte@5.39.7)(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: - '@sveltejs/vite-plugin-svelte': 6.2.1(svelte@5.39.7)(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) + '@sveltejs/vite-plugin-svelte': 6.2.1(svelte@5.39.7)(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) debug: 4.4.3 svelte: 5.39.7 - vite: 6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - '@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.39.7)(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1))': + '@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.39.7)(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: - '@sveltejs/vite-plugin-svelte-inspector': 5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.39.7)(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)))(svelte@5.39.7)(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) + '@sveltejs/vite-plugin-svelte-inspector': 5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.39.7)(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)))(svelte@5.39.7)(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) debug: 4.4.3 deepmerge: 4.3.1 magic-string: 0.30.19 svelte: 5.39.7 - vite: 6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu: 1.1.1(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) + vite: 7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1) + vitefu: 1.1.1(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) transitivePeerDependencies: - supports-color @@ -11964,9 +12032,9 @@ snapshots: transitivePeerDependencies: - supports-color - '@vitejs/plugin-vue@5.2.4(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1))(vue@3.5.22(typescript@5.9.3))': + '@vitejs/plugin-vue@5.2.4(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1))(vue@3.5.22(typescript@5.9.3))': dependencies: - vite: 6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1) vue: 3.5.22(typescript@5.9.3) '@vitest/coverage-istanbul@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.6.1)(jiti@2.6.0)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1))': @@ -12330,6 +12398,8 @@ snapshots: async-function@1.0.0: {} + async-lock@1.4.1: {} + autoprefixer@10.4.21(postcss@8.5.6): dependencies: browserslist: 4.26.2 @@ -12648,6 +12718,8 @@ snapshots: colorette@2.0.20: {} + comlink@4.4.2: {} + commander@11.1.0: optional: true @@ -12940,11 +13012,6 @@ snapshots: pg: 8.16.3 postgres: 3.4.7 - drizzle-zod@0.8.3(drizzle-orm@0.44.5(@types/pg@8.15.5)(kysely@0.28.7)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76): - dependencies: - drizzle-orm: 0.44.5(@types/pg@8.15.5)(kysely@0.28.7)(pg@8.16.3)(postgres@3.4.7) - zod: 3.25.76 - drizzle-zod@0.8.3(drizzle-orm@0.44.5(@types/pg@8.15.5)(kysely@0.28.7)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11): dependencies: drizzle-orm: 0.44.5(@types/pg@8.15.5)(kysely@0.28.7)(pg@8.16.3)(postgres@3.4.7) @@ -14304,6 +14371,8 @@ snapshots: js-base64@3.7.8: {} + js-logger@1.6.1: {} + js-tokens@4.0.0: {} js-tokens@9.0.1: {} @@ -15127,6 +15196,8 @@ snapshots: '@oxc-resolver/binding-win32-ia32-msvc': 11.8.4 '@oxc-resolver/binding-win32-x64-msvc': 11.8.4 + p-defer@4.0.1: {} + p-filter@2.1.0: dependencies: p-map: 2.1.0 @@ -16731,21 +16802,6 @@ snapshots: transitivePeerDependencies: - supports-color - vite-plugin-solid@2.11.8(@testing-library/jest-dom@6.9.0)(solid-js@1.9.9)(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)): - dependencies: - '@babel/core': 7.28.4 - '@types/babel__core': 7.20.5 - babel-preset-solid: 1.9.9(@babel/core@7.28.4)(solid-js@1.9.9) - merge-anything: 5.1.7 - solid-js: 1.9.9 - solid-refresh: 0.6.3(solid-js@1.9.9) - vite: 6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu: 1.1.1(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)) - optionalDependencies: - '@testing-library/jest-dom': 6.9.0 - transitivePeerDependencies: - - supports-color - vite-plugin-solid@2.11.8(@testing-library/jest-dom@6.9.0)(solid-js@1.9.9)(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)): dependencies: '@babel/core': 7.28.4 @@ -16760,7 +16816,6 @@ snapshots: '@testing-library/jest-dom': 6.9.0 transitivePeerDependencies: - supports-color - optional: true vite-tsconfig-paths@5.1.4(typescript@5.9.3)(vite@6.3.6(@types/node@22.18.8)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)): dependencies: @@ -16801,23 +16856,6 @@ snapshots: tsx: 4.20.6 yaml: 2.8.1 - vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1): - dependencies: - esbuild: 0.25.10 - fdir: 6.5.0(picomatch@4.0.3) - picomatch: 4.0.3 - postcss: 8.5.6 - rollup: 4.52.3 - tinyglobby: 0.2.15 - optionalDependencies: - '@types/node': 24.6.1 - fsevents: 2.3.3 - jiti: 2.6.0 - lightningcss: 1.30.1 - sass: 1.90.0 - tsx: 4.20.6 - yaml: 2.8.1 - vite@7.1.5(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1): dependencies: esbuild: 0.25.10 @@ -16873,10 +16911,6 @@ snapshots: optionalDependencies: vite: 6.3.6(@types/node@22.18.8)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu@1.1.1(vite@6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)): - optionalDependencies: - vite: 6.3.6(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu@1.1.1(vite@7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)): optionalDependencies: vite: 7.1.7(@types/node@24.6.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(tsx@4.20.6)(yaml@2.8.1)