From f678672f9c119a85b2e38d7a46e73ac5c4b5e8a7 Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Wed, 1 Oct 2025 10:13:43 +0200 Subject: [PATCH 01/15] Compile IR to SQL --- .../electric-db-collection/src/electric.ts | 41 ++++- .../src/sql-compiler.ts | 146 ++++++++++++++++++ 2 files changed, 181 insertions(+), 6 deletions(-) create mode 100644 packages/electric-db-collection/src/sql-compiler.ts diff --git a/packages/electric-db-collection/src/electric.ts b/packages/electric-db-collection/src/electric.ts index 3dcb54b64..729517040 100644 --- a/packages/electric-db-collection/src/electric.ts +++ b/packages/electric-db-collection/src/electric.ts @@ -13,12 +13,14 @@ import { ExpectedNumberInAwaitTxIdError, TimeoutWaitingForTxIdError, } from "./errors" +import { compileSQL } from "./sql-compiler" import type { BaseCollectionConfig, CollectionConfig, DeleteMutationFnParams, Fn, InsertMutationFnParams, + OnLoadMoreOptions, SyncConfig, UpdateMutationFnParams, UtilsRecord, @@ -494,15 +496,42 @@ function createElectricSync>( } }) - // Return the unsubscribe function - return () => { - // Unsubscribe from the stream - unsubscribeStream() - // Abort the abort controller to stop the stream - abortController.abort() + return { + onLoadMore: (opts) => onLoadMore(params, opts), + cleanup: () => { + // Unsubscribe from the stream + unsubscribeStream() + // Abort the abort controller to stop the stream + abortController.abort() + }, } }, // Expose the getSyncMetadata function getSyncMetadata, } } + +async function onLoadMore>( + syncParams: Parameters[`sync`]>[0], + options: OnLoadMoreOptions +) { + const { begin, write, commit } = syncParams + + // TODO: optimize this by keeping track of which snapshot have been loaded already + // and only load this one if it's not a subset of the ones that have been loaded already + + const snapshotParams = compileSQL(options) + + const snapshot = await requestSnapshot(snapshotParams) + + begin() + + snapshot.data.forEach((row) => { + write({ + type: `insert`, + value: row.value, + }) + }) + + commit() +} diff --git a/packages/electric-db-collection/src/sql-compiler.ts b/packages/electric-db-collection/src/sql-compiler.ts new file mode 100644 index 000000000..730859461 --- /dev/null +++ b/packages/electric-db-collection/src/sql-compiler.ts @@ -0,0 +1,146 @@ +import type { IR, OnLoadMoreOptions } from "@tanstack/db" + +export function compileSQL( + options: OnLoadMoreOptions +): ExternalSubsetParamsRecord { + const { where, orderBy, limit } = options + + const params: Array = [] + const compiledSQL: ExternalSubsetParamsRecord = { params } + + if (where) { + // TODO: this only works when the where expression's PropRefs directly reference a column of the collection + // doesn't work if it goes through aliases because then we need to know the entire query to be able to follow the reference until the base collection (cf. followRef function) + compiledSQL.where = compileBasicExpression(where, params) + } + + if (orderBy) { + compiledSQL.orderBy = compileOrderBy(orderBy, params) + } + + if (limit) { + compiledSQL.limit = limit + } + + return compiledSQL +} + +/** + * Compiles the expression to a SQL string and mutates the params array with the values. + * @param exp - The expression to compile + * @param params - The params array + * @returns The compiled SQL string + */ +function compileBasicExpression( + exp: IR.BasicExpression, + params: Array +): string { + switch (exp.type) { + case `val`: + params.push(exp.value) + return `$${params.length}` + case `ref`: + if (exp.path.length !== 1) { + throw new Error( + `Compiler can't handle nested properties: ${exp.path.join(`.`)}` + ) + } + return exp.path[0]! + case `func`: + return compileFunction(exp, params) + } +} + +function compileOrderBy(orderBy: IR.OrderBy, params: Array): string { + const compiledOrderByClauses = orderBy.map((clause: IR.OrderByClause) => + compileOrderByClause(clause, params) + ) + return compiledOrderByClauses.join(`,`) +} + +function compileOrderByClause( + clause: IR.OrderByClause, + params: Array +): string { + // TODO: what to do with stringSort and locale? + // Correctly supporting them is tricky as it depends on Postgres' collation + const { expression, compareOptions } = clause + let sql = compileBasicExpression(expression, params) + + if (compareOptions.direction === `desc`) { + sql = `${sql} DESC` + } + + if (compareOptions.nulls === `first`) { + sql = `${sql} NULLS FIRST` + } + + if (compareOptions.nulls === `last`) { + sql = `${sql} NULLS LAST` + } + + return sql +} + +function compileFunction( + exp: IR.Func, + params: Array = [] +): string { + const { name, args } = exp + + const opName = getOpName(name) + + const compiledArgs = args.map((arg: IR.BasicExpression) => + compileBasicExpression(arg, params) + ) + + if (isBinaryOp(name)) { + if (compiledArgs.length !== 2) { + throw new Error(`Binary operator ${name} expects 2 arguments`) + } + const [lhs, rhs] = compiledArgs + return `${lhs} ${opName} ${rhs}` + } + + return `${opName}(${compiledArgs.join(`,`)})` +} + +function isBinaryOp(name: string): boolean { + const binaryOps = [`eq`, `gt`, `gte`, `lt`, `lte`, `and`, `or`] + return binaryOps.includes(name) +} + +function getOpName(name: string): string { + const opNames = { + eq: `=`, + gt: `>`, + gte: `>=`, + lt: `<`, + lte: `<=`, + add: `+`, + and: `AND`, + or: `OR`, + not: `NOT`, + isUndefined: `IS NULL`, + isNull: `IS NULL`, + in: `IN`, + like: `LIKE`, + ilike: `ILIKE`, + upper: `UPPER`, + lower: `LOWER`, + length: `LENGTH`, + concat: `CONCAT`, + coalesce: `COALESCE`, + } + return opNames[name as keyof typeof opNames] || name +} + +// TODO: remove this type once we rebase on top of Ilia's PR +// that type will be exported by Ilia's PR +export type ExternalSubsetParamsRecord = { + where?: string + params?: Record + limit?: number + offset?: number + orderBy?: string +} From 347a3c633cbd7f3c0a7a9428c9722a5309992482 Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Tue, 23 Sep 2025 15:21:29 +0200 Subject: [PATCH 02/15] Use the stream's requestSnapshot method --- packages/electric-db-collection/src/electric.ts | 5 +++-- packages/electric-db-collection/src/sql-compiler.ts | 2 ++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/electric-db-collection/src/electric.ts b/packages/electric-db-collection/src/electric.ts index 729517040..f3d47251b 100644 --- a/packages/electric-db-collection/src/electric.ts +++ b/packages/electric-db-collection/src/electric.ts @@ -497,7 +497,7 @@ function createElectricSync>( }) return { - onLoadMore: (opts) => onLoadMore(params, opts), + onLoadMore: (opts) => onLoadMore(stream, params, opts), cleanup: () => { // Unsubscribe from the stream unsubscribeStream() @@ -512,6 +512,7 @@ function createElectricSync>( } async function onLoadMore>( + stream: ShapeStream, syncParams: Parameters[`sync`]>[0], options: OnLoadMoreOptions ) { @@ -522,7 +523,7 @@ async function onLoadMore>( const snapshotParams = compileSQL(options) - const snapshot = await requestSnapshot(snapshotParams) + const snapshot = await stream.requestSnapshot(snapshotParams) begin() diff --git a/packages/electric-db-collection/src/sql-compiler.ts b/packages/electric-db-collection/src/sql-compiler.ts index 730859461..3d8ee92a7 100644 --- a/packages/electric-db-collection/src/sql-compiler.ts +++ b/packages/electric-db-collection/src/sql-compiler.ts @@ -48,6 +48,8 @@ function compileBasicExpression( return exp.path[0]! case `func`: return compileFunction(exp, params) + default: + throw new Error(`Unknown expression type`) } } From 5a024b57bd84652883a7d86e9c6a99ca3d3f6a1e Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Wed, 24 Sep 2025 09:13:10 +0200 Subject: [PATCH 03/15] Remove todo --- packages/electric-db-collection/src/electric.ts | 7 ------- 1 file changed, 7 deletions(-) diff --git a/packages/electric-db-collection/src/electric.ts b/packages/electric-db-collection/src/electric.ts index f3d47251b..4a4c28f46 100644 --- a/packages/electric-db-collection/src/electric.ts +++ b/packages/electric-db-collection/src/electric.ts @@ -517,22 +517,15 @@ async function onLoadMore>( options: OnLoadMoreOptions ) { const { begin, write, commit } = syncParams - - // TODO: optimize this by keeping track of which snapshot have been loaded already - // and only load this one if it's not a subset of the ones that have been loaded already - const snapshotParams = compileSQL(options) - const snapshot = await stream.requestSnapshot(snapshotParams) begin() - snapshot.data.forEach((row) => { write({ type: `insert`, value: row.value, }) }) - commit() } From 7dec8d64196e65ad6af1aff35004cb0893e69505 Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Wed, 24 Sep 2025 09:14:17 +0200 Subject: [PATCH 04/15] Modify output format of SQL compiler and serialize the values into PG string format --- .../src/pg-serializer.ts | 27 +++++++++++++ .../src/sql-compiler.ts | 40 +++++++++++++------ 2 files changed, 55 insertions(+), 12 deletions(-) create mode 100644 packages/electric-db-collection/src/pg-serializer.ts diff --git a/packages/electric-db-collection/src/pg-serializer.ts b/packages/electric-db-collection/src/pg-serializer.ts new file mode 100644 index 000000000..b4a3803a7 --- /dev/null +++ b/packages/electric-db-collection/src/pg-serializer.ts @@ -0,0 +1,27 @@ +export function serialize(value: unknown): string { + if (typeof value === `string`) { + return `'${value}'` + } + + if (value === null || value === undefined) { + return `NULL` + } + + if (typeof value === `boolean`) { + return value ? `true` : `false` + } + + if (value instanceof Date) { + return `'${value.toISOString()}'` + } + + if (Array.isArray(value)) { + return `ARRAY[${value.map(serialize).join(`,`)}]` + } + + if (typeof value === `object`) { + throw new Error(`Cannot serialize object: ${JSON.stringify(value)}`) + } + + return value.toString() +} diff --git a/packages/electric-db-collection/src/sql-compiler.ts b/packages/electric-db-collection/src/sql-compiler.ts index 3d8ee92a7..e76e99887 100644 --- a/packages/electric-db-collection/src/sql-compiler.ts +++ b/packages/electric-db-collection/src/sql-compiler.ts @@ -1,12 +1,18 @@ +import { serialize } from "./pg-serializer" +import type { ExternalSubsetParamsRecord } from "@electric-sql/client" import type { IR, OnLoadMoreOptions } from "@tanstack/db" +export type CompiledSqlRecord = Omit & { + params?: Array +} + export function compileSQL( options: OnLoadMoreOptions ): ExternalSubsetParamsRecord { const { where, orderBy, limit } = options const params: Array = [] - const compiledSQL: ExternalSubsetParamsRecord = { params } + const compiledSQL: CompiledSqlRecord = { params } if (where) { // TODO: this only works when the where expression's PropRefs directly reference a column of the collection @@ -22,7 +28,20 @@ export function compileSQL( compiledSQL.limit = limit } - return compiledSQL + // Serialize the values in the params array into PG formatted strings + // and transform the array into a Record + const paramsRecord = params.reduce( + (acc, param, index) => { + acc[`${index + 1}`] = serialize(param) + return acc + }, + {} as Record + ) + + return { + ...compiledSQL, + params: paramsRecord, + } } /** @@ -134,15 +153,12 @@ function getOpName(name: string): string { concat: `CONCAT`, coalesce: `COALESCE`, } - return opNames[name as keyof typeof opNames] || name -} -// TODO: remove this type once we rebase on top of Ilia's PR -// that type will be exported by Ilia's PR -export type ExternalSubsetParamsRecord = { - where?: string - params?: Record - limit?: number - offset?: number - orderBy?: string + const opName = opNames[name as keyof typeof opNames] + + if (!opName) { + throw new Error(`Unknown operator/function: ${name}`) + } + + return opName } From 61da27713e4e9cbfa5391b0fcb8024a910cbb697 Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Wed, 1 Oct 2025 10:00:40 +0200 Subject: [PATCH 05/15] Fixes to electric collection + unit test --- .../electric-db-collection/src/electric.ts | 24 +--- .../src/pg-serializer.ts | 10 +- .../src/sql-compiler.ts | 4 +- .../tests/electric-live-query.test.ts | 114 ++++++++++++++++++ 4 files changed, 127 insertions(+), 25 deletions(-) diff --git a/packages/electric-db-collection/src/electric.ts b/packages/electric-db-collection/src/electric.ts index 4a4c28f46..bcd897dff 100644 --- a/packages/electric-db-collection/src/electric.ts +++ b/packages/electric-db-collection/src/electric.ts @@ -16,6 +16,7 @@ import { import { compileSQL } from "./sql-compiler" import type { BaseCollectionConfig, + Collection, CollectionConfig, DeleteMutationFnParams, Fn, @@ -497,7 +498,10 @@ function createElectricSync>( }) return { - onLoadMore: (opts) => onLoadMore(stream, params, opts), + onLoadMore: (opts) => { + const snapshotParams = compileSQL(opts) + return stream.requestSnapshot(snapshotParams) + }, cleanup: () => { // Unsubscribe from the stream unsubscribeStream() @@ -511,21 +515,3 @@ function createElectricSync>( } } -async function onLoadMore>( - stream: ShapeStream, - syncParams: Parameters[`sync`]>[0], - options: OnLoadMoreOptions -) { - const { begin, write, commit } = syncParams - const snapshotParams = compileSQL(options) - const snapshot = await stream.requestSnapshot(snapshotParams) - - begin() - snapshot.data.forEach((row) => { - write({ - type: `insert`, - value: row.value, - }) - }) - commit() -} diff --git a/packages/electric-db-collection/src/pg-serializer.ts b/packages/electric-db-collection/src/pg-serializer.ts index b4a3803a7..707c4e1b8 100644 --- a/packages/electric-db-collection/src/pg-serializer.ts +++ b/packages/electric-db-collection/src/pg-serializer.ts @@ -3,6 +3,10 @@ export function serialize(value: unknown): string { return `'${value}'` } + if (typeof value === `number`) { + return value.toString() + } + if (value === null || value === undefined) { return `NULL` } @@ -19,9 +23,5 @@ export function serialize(value: unknown): string { return `ARRAY[${value.map(serialize).join(`,`)}]` } - if (typeof value === `object`) { - throw new Error(`Cannot serialize object: ${JSON.stringify(value)}`) - } - - return value.toString() + throw new Error(`Cannot serialize value: ${JSON.stringify(value)}`) } diff --git a/packages/electric-db-collection/src/sql-compiler.ts b/packages/electric-db-collection/src/sql-compiler.ts index e76e99887..d1d95040e 100644 --- a/packages/electric-db-collection/src/sql-compiler.ts +++ b/packages/electric-db-collection/src/sql-compiler.ts @@ -30,6 +30,7 @@ export function compileSQL( // Serialize the values in the params array into PG formatted strings // and transform the array into a Record + console.log("params", params) const paramsRecord = params.reduce( (acc, param, index) => { acc[`${index + 1}`] = serialize(param) @@ -58,7 +59,8 @@ function compileBasicExpression( case `val`: params.push(exp.value) return `$${params.length}` - case `ref`: + case `ref`: + // TODO: doesn't yet support JSON(B) values which could be accessed with nested props if (exp.path.length !== 1) { throw new Error( `Compiler can't handle nested properties: ${exp.path.join(`.`)}` diff --git a/packages/electric-db-collection/tests/electric-live-query.test.ts b/packages/electric-db-collection/tests/electric-live-query.test.ts index b387f1756..a629677ab 100644 --- a/packages/electric-db-collection/tests/electric-live-query.test.ts +++ b/packages/electric-db-collection/tests/electric-live-query.test.ts @@ -54,10 +54,30 @@ const sampleUsers: Array = [ // Mock the ShapeStream module const mockSubscribe = vi.fn() +const mockRequestSnapshot = vi.fn() const mockStream = { subscribe: mockSubscribe, + requestSnapshot: (...args: any) => { + mockRequestSnapshot(...args) + const results = mockRequestSnapshot.mock.results + const lastResult = results[results.length - 1]!.value + + const subscribers = mockSubscribe.mock.calls.map(args => args[0]) + subscribers.forEach(subscriber => subscriber(lastResult.data.map((row: any) => ({ + type: `insert`, + value: row.value, + key: row.key, + })))) + } } +// Mock the requestSnapshot method +// to return an empty array of data +// since most tests don't use it +mockRequestSnapshot.mockResolvedValue({ + data: [] +}) + vi.mock(`@electric-sql/client`, async () => { const actual = await vi.importActual(`@electric-sql/client`) return { @@ -437,4 +457,98 @@ describe.each([ // Clean up subscription.unsubscribe() }) + if (autoIndex === `eager`) { + it.only(`should load more data via requestSnapshot when creating live query with higher limit`, async () => { + // Reset mocks + vi.clearAllMocks() + mockRequestSnapshot.mockResolvedValue({ + data: [ + { key: 5, value: { id: 5, name: `Eve`, age: 30, email: `eve@example.com`, active: true } }, + { key: 6, value: { id: 6, name: `Frank`, age: 35, email: `frank@example.com`, active: true } }, + ], + }) + + // Initial sync with limited data + simulateInitialSync() + expect(electricCollection.status).toBe(`ready`) + expect(electricCollection.size).toBe(4) + expect(mockRequestSnapshot).toHaveBeenCalledTimes(0) + + // Create first live query with limit of 2 + const limitedLiveQuery = createLiveQueryCollection({ + id: `limited-users-live-query`, + startSync: true, + query: (q) => + q + .from({ user: electricCollection }) + .where(({ user }) => eq(user.active, true)) + .select(({ user }) => ({ + id: user.id, + name: user.name, + active: user.active, + age: user.age, + })) + .orderBy(({ user }) => user.age, `asc`) + .limit(2), + }) + + expect(limitedLiveQuery.status).toBe(`ready`) + expect(limitedLiveQuery.size).toBe(2) // Only first 2 active users + expect(mockRequestSnapshot).toHaveBeenCalledTimes(1) + + const callArgs = (index: number) => mockRequestSnapshot.mock.calls[index]?.[0] + expect(callArgs(0)).toMatchObject({ + params: { "1": "true" }, + where: "active = $1", + orderBy: "age NULLS FIRST", + limit: 2, + }) + + // Create second live query with higher limit of 5 + const expandedLiveQuery = createLiveQueryCollection({ + id: `expanded-users-live-query`, + startSync: true, + query: (q) => + q + .from({ user: electricCollection }) + .where(({ user }) => eq(user.active, true)) + .select(({ user }) => ({ + id: user.id, + name: user.name, + active: user.active, + })) + .orderBy(({ user }) => user.age, `asc`) + .limit(6), + }) + + // Wait for the live query to process + await new Promise((resolve) => setTimeout(resolve, 0)) + + // Verify that requestSnapshot was called with the correct parameters + expect(mockRequestSnapshot).toHaveBeenCalledTimes(3) + + // Check that first it requested a limit of 6 users + expect(callArgs(1)).toMatchObject({ + params: { "1": "true" }, + where: "active = $1", + orderBy: "age NULLS FIRST", + limit: 6, + }) + + // After this initial snapshot for the new live query it receives all 3 users from the local collection + // so it still needs 3 more users to reach the limit of 6 so it requests 3 more to the sync layer + expect(callArgs(2)).toMatchObject({ + params: { "1": "true", "2": "25" }, + where: "active = $1 AND age > $2", + orderBy: "age NULLS FIRST", + limit: 3, + }) + + // The sync layer won't provide any more users so the DB is exhausted and it stops (i.e. doesn't request more) + + // The expanded live query should now have more data + expect(expandedLiveQuery.status).toBe(`ready`) + expect(expandedLiveQuery.size).toBe(5) // Alice, Bob, Dave from initial + Eve and Frank from additional data + }) + } }) From e3a7d5d0235b3dac674c4374c03369d49d55bfd8 Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Mon, 29 Sep 2025 11:56:35 +0200 Subject: [PATCH 06/15] Fix unit test for loading more data via requestSnapshot in the Electric collection --- .../electric-db-collection/src/electric.ts | 7 +- .../tests/electric-live-query.test.ts | 104 ++++++++++++------ 2 files changed, 75 insertions(+), 36 deletions(-) diff --git a/packages/electric-db-collection/src/electric.ts b/packages/electric-db-collection/src/electric.ts index bcd897dff..019e3f969 100644 --- a/packages/electric-db-collection/src/electric.ts +++ b/packages/electric-db-collection/src/electric.ts @@ -16,12 +16,10 @@ import { import { compileSQL } from "./sql-compiler" import type { BaseCollectionConfig, - Collection, CollectionConfig, DeleteMutationFnParams, Fn, InsertMutationFnParams, - OnLoadMoreOptions, SyncConfig, UpdateMutationFnParams, UtilsRecord, @@ -498,9 +496,9 @@ function createElectricSync>( }) return { - onLoadMore: (opts) => { + onLoadMore: async (opts) => { const snapshotParams = compileSQL(opts) - return stream.requestSnapshot(snapshotParams) + await stream.requestSnapshot(snapshotParams) }, cleanup: () => { // Unsubscribe from the stream @@ -514,4 +512,3 @@ function createElectricSync>( getSyncMetadata, } } - diff --git a/packages/electric-db-collection/tests/electric-live-query.test.ts b/packages/electric-db-collection/tests/electric-live-query.test.ts index a629677ab..48b5aef04 100644 --- a/packages/electric-db-collection/tests/electric-live-query.test.ts +++ b/packages/electric-db-collection/tests/electric-live-query.test.ts @@ -57,25 +57,34 @@ const mockSubscribe = vi.fn() const mockRequestSnapshot = vi.fn() const mockStream = { subscribe: mockSubscribe, - requestSnapshot: (...args: any) => { - mockRequestSnapshot(...args) - const results = mockRequestSnapshot.mock.results - const lastResult = results[results.length - 1]!.value - - const subscribers = mockSubscribe.mock.calls.map(args => args[0]) - subscribers.forEach(subscriber => subscriber(lastResult.data.map((row: any) => ({ - type: `insert`, + requestSnapshot: async (...args: any) => { + const result = await mockRequestSnapshot(...args) + const subscribers = mockSubscribe.mock.calls.map((args) => args[0]) + const data = [...result.data] + + const messages: Array> = data.map((row: any) => ({ value: row.value, key: row.key, - })))) - } + headers: row.headers, + })) + + if (messages.length > 0) { + // add an up-to-date message + messages.push({ + headers: { control: `up-to-date` }, + }) + } + + subscribers.forEach((subscriber) => subscriber(messages)) + return result + }, } // Mock the requestSnapshot method // to return an empty array of data // since most tests don't use it mockRequestSnapshot.mockResolvedValue({ - data: [] + data: [], }) vi.mock(`@electric-sql/client`, async () => { @@ -458,14 +467,9 @@ describe.each([ subscription.unsubscribe() }) if (autoIndex === `eager`) { - it.only(`should load more data via requestSnapshot when creating live query with higher limit`, async () => { - // Reset mocks - vi.clearAllMocks() + it(`should load more data via requestSnapshot when creating live query with higher limit`, async () => { mockRequestSnapshot.mockResolvedValue({ - data: [ - { key: 5, value: { id: 5, name: `Eve`, age: 30, email: `eve@example.com`, active: true } }, - { key: 6, value: { id: 6, name: `Frank`, age: 35, email: `frank@example.com`, active: true } }, - ], + data: [], }) // Initial sync with limited data @@ -496,15 +500,45 @@ describe.each([ expect(limitedLiveQuery.size).toBe(2) // Only first 2 active users expect(mockRequestSnapshot).toHaveBeenCalledTimes(1) - const callArgs = (index: number) => mockRequestSnapshot.mock.calls[index]?.[0] + const callArgs = (index: number) => + mockRequestSnapshot.mock.calls[index]?.[0] expect(callArgs(0)).toMatchObject({ - params: { "1": "true" }, - where: "active = $1", - orderBy: "age NULLS FIRST", + params: { "1": `true` }, + where: `active = $1`, + orderBy: `age NULLS FIRST`, limit: 2, }) - // Create second live query with higher limit of 5 + // Next call will return a snapshot containing 2 rows + // Calls after that will return the default empty snapshot + mockRequestSnapshot.mockResolvedValueOnce({ + data: [ + { + headers: { operation: `insert` }, + key: 5, + value: { + id: 5, + name: `Eve`, + age: 30, + email: `eve@example.com`, + active: true, + }, + }, + { + headers: { operation: `insert` }, + key: 6, + value: { + id: 6, + name: `Frank`, + age: 35, + email: `frank@example.com`, + active: true, + }, + }, + ], + }) + + // Create second live query with higher limit of 6 const expandedLiveQuery = createLiveQueryCollection({ id: `expanded-users-live-query`, startSync: true, @@ -525,26 +559,34 @@ describe.each([ await new Promise((resolve) => setTimeout(resolve, 0)) // Verify that requestSnapshot was called with the correct parameters - expect(mockRequestSnapshot).toHaveBeenCalledTimes(3) + expect(mockRequestSnapshot).toHaveBeenCalledTimes(4) // Check that first it requested a limit of 6 users expect(callArgs(1)).toMatchObject({ - params: { "1": "true" }, - where: "active = $1", - orderBy: "age NULLS FIRST", + params: { "1": `true` }, + where: `active = $1`, + orderBy: `age NULLS FIRST`, limit: 6, }) // After this initial snapshot for the new live query it receives all 3 users from the local collection // so it still needs 3 more users to reach the limit of 6 so it requests 3 more to the sync layer expect(callArgs(2)).toMatchObject({ - params: { "1": "true", "2": "25" }, - where: "active = $1 AND age > $2", - orderBy: "age NULLS FIRST", + params: { "1": `true`, "2": `25` }, + where: `active = $1 AND age > $2`, + orderBy: `age NULLS FIRST`, limit: 3, }) - // The sync layer won't provide any more users so the DB is exhausted and it stops (i.e. doesn't request more) + // The previous snapshot returned 2 more users so it still needs 1 more user to reach the limit of 6 + expect(callArgs(3)).toMatchObject({ + params: { "1": `true`, "2": `35` }, + where: `active = $1 AND age > $2`, + orderBy: `age NULLS FIRST`, + limit: 1, + }) + + // The sync layer won't provide any more users so the DB is exhausted and it stops (i.e. doesn't request more) // The expanded live query should now have more data expect(expandedLiveQuery.status).toBe(`ready`) From 310bd6599034a99e66be3a1ec7236165e94c0cc9 Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Wed, 1 Oct 2025 10:02:58 +0200 Subject: [PATCH 07/15] Remove debug logging in electric collection --- packages/electric-db-collection/src/sql-compiler.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/electric-db-collection/src/sql-compiler.ts b/packages/electric-db-collection/src/sql-compiler.ts index d1d95040e..92ef7d287 100644 --- a/packages/electric-db-collection/src/sql-compiler.ts +++ b/packages/electric-db-collection/src/sql-compiler.ts @@ -30,7 +30,6 @@ export function compileSQL( // Serialize the values in the params array into PG formatted strings // and transform the array into a Record - console.log("params", params) const paramsRecord = params.reduce( (acc, param, index) => { acc[`${index + 1}`] = serialize(param) @@ -59,7 +58,7 @@ function compileBasicExpression( case `val`: params.push(exp.value) return `$${params.length}` - case `ref`: + case `ref`: // TODO: doesn't yet support JSON(B) values which could be accessed with nested props if (exp.path.length !== 1) { throw new Error( From 6df040c4556a7104c1df4e49b1b5ee9e3e272b98 Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Mon, 6 Oct 2025 15:57:20 +0200 Subject: [PATCH 08/15] Update type name --- packages/electric-db-collection/src/sql-compiler.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/packages/electric-db-collection/src/sql-compiler.ts b/packages/electric-db-collection/src/sql-compiler.ts index 92ef7d287..421c2ab7c 100644 --- a/packages/electric-db-collection/src/sql-compiler.ts +++ b/packages/electric-db-collection/src/sql-compiler.ts @@ -1,14 +1,12 @@ import { serialize } from "./pg-serializer" -import type { ExternalSubsetParamsRecord } from "@electric-sql/client" +import type { SubsetParams } from "@electric-sql/client" import type { IR, OnLoadMoreOptions } from "@tanstack/db" -export type CompiledSqlRecord = Omit & { +export type CompiledSqlRecord = Omit & { params?: Array } -export function compileSQL( - options: OnLoadMoreOptions -): ExternalSubsetParamsRecord { +export function compileSQL(options: OnLoadMoreOptions): SubsetParams { const { where, orderBy, limit } = options const params: Array = [] From 3f6dc70579a95cf5613a93e4e0d452c009f871c0 Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Mon, 6 Oct 2025 15:57:39 +0200 Subject: [PATCH 09/15] Upgrade electric client version --- pnpm-lock.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 88dc1cb8f..a37082d80 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1372,6 +1372,9 @@ packages: '@electric-sql/client@1.0.14': resolution: {integrity: sha512-LtPAfeMxXRiYS0hyDQ5hue2PjljUiK9stvzsVyVb4nwxWQxfOWTSF42bHTs/o5i3x1T4kAQ7mwHpxa4A+f8X7Q==} + '@electric-sql/client@1.0.14': + resolution: {integrity: sha512-LtPAfeMxXRiYS0hyDQ5hue2PjljUiK9stvzsVyVb4nwxWQxfOWTSF42bHTs/o5i3x1T4kAQ7mwHpxa4A+f8X7Q==} + '@emnapi/core@1.5.0': resolution: {integrity: sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==} @@ -9288,6 +9291,12 @@ snapshots: '@drizzle-team/brocli@0.10.2': {} + '@electric-sql/client@1.0.14': + dependencies: + '@microsoft/fetch-event-source': 2.0.1 + optionalDependencies: + '@rollup/rollup-darwin-arm64': 4.50.1 + '@electric-sql/client@1.0.14': dependencies: '@microsoft/fetch-event-source': 2.0.1 From dd337d8bb2910501b928a6bf158acc9320a624f9 Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Mon, 6 Oct 2025 15:59:34 +0200 Subject: [PATCH 10/15] Changeset --- .changeset/tender-carpets-cheat.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/tender-carpets-cheat.md diff --git a/.changeset/tender-carpets-cheat.md b/.changeset/tender-carpets-cheat.md new file mode 100644 index 000000000..77c9dfd73 --- /dev/null +++ b/.changeset/tender-carpets-cheat.md @@ -0,0 +1,5 @@ +--- +"@tanstack/electric-db-collection": patch +--- + +Handle predicates that are pushed down. From a118a66ee2efb5d308f66cf4bf53b3a1d5a5552c Mon Sep 17 00:00:00 2001 From: Kevin De Porre Date: Tue, 7 Oct 2025 12:24:41 +0200 Subject: [PATCH 11/15] Update lockfile --- pnpm-lock.yaml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a37082d80..88dc1cb8f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1372,9 +1372,6 @@ packages: '@electric-sql/client@1.0.14': resolution: {integrity: sha512-LtPAfeMxXRiYS0hyDQ5hue2PjljUiK9stvzsVyVb4nwxWQxfOWTSF42bHTs/o5i3x1T4kAQ7mwHpxa4A+f8X7Q==} - '@electric-sql/client@1.0.14': - resolution: {integrity: sha512-LtPAfeMxXRiYS0hyDQ5hue2PjljUiK9stvzsVyVb4nwxWQxfOWTSF42bHTs/o5i3x1T4kAQ7mwHpxa4A+f8X7Q==} - '@emnapi/core@1.5.0': resolution: {integrity: sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==} @@ -9291,12 +9288,6 @@ snapshots: '@drizzle-team/brocli@0.10.2': {} - '@electric-sql/client@1.0.14': - dependencies: - '@microsoft/fetch-event-source': 2.0.1 - optionalDependencies: - '@rollup/rollup-darwin-arm64': 4.50.1 - '@electric-sql/client@1.0.14': dependencies: '@microsoft/fetch-event-source': 2.0.1 From 044760b2beb42e3746cd79daee4db282a2b71c53 Mon Sep 17 00:00:00 2001 From: Sam Willis Date: Fri, 10 Oct 2025 19:54:58 +0100 Subject: [PATCH 12/15] syncMode config --- .../electric-db-collection/src/electric.ts | 44 ++- .../tests/electric-live-query.test.ts | 323 +++++++++++++++++- .../tests/electric.test.ts | 180 ++++++++++ 3 files changed, 536 insertions(+), 11 deletions(-) diff --git a/packages/electric-db-collection/src/electric.ts b/packages/electric-db-collection/src/electric.ts index 019e3f969..3aff03fb3 100644 --- a/packages/electric-db-collection/src/electric.ts +++ b/packages/electric-db-collection/src/electric.ts @@ -20,6 +20,7 @@ import type { DeleteMutationFnParams, Fn, InsertMutationFnParams, + OnLoadMoreOptions, SyncConfig, UpdateMutationFnParams, UtilsRecord, @@ -67,6 +68,24 @@ type InferSchemaOutput = T extends StandardSchemaV1 : Record : Record +/** + * The mode of sync to use for the collection. + * @default `eager` + * @description + * - `eager`: + * - syncs all data immediately on preload + * - collection will be marked as ready once the sync is complete + * - there is no incremental sync + * - `on-demand`: + * - syncs data synced in incremental snapshots as the collection is queried + * - collection will be marked as ready immediately after the first snapshot is synced + * - `progressive`: + * - syncs all data in the shape in the background + * - uses incremental snapshots during the initial sync to provide a fast path to the data required for queries + * - collection will be marked as ready once the initial sync is complete + */ +export type SyncMode = `eager` | `on-demand` | `progressive` + /** * Configuration interface for Electric collection options * @template T - The type of items in the collection @@ -86,6 +105,7 @@ export interface ElectricCollectionConfig< * Configuration options for the ElectricSQL ShapeStream */ shapeOptions: ShapeStreamOptions> + syncMode?: SyncMode } function isUpToDateMessage>( @@ -174,9 +194,11 @@ export function electricCollectionOptions( } { const seenTxids = new Store>(new Set([])) const seenSnapshots = new Store>([]) + const syncMode = config.syncMode ?? `eager` const sync = createElectricSync(config.shapeOptions, { seenTxids, seenSnapshots, + syncMode, }) /** @@ -332,12 +354,12 @@ export function electricCollectionOptions( function createElectricSync>( shapeOptions: ShapeStreamOptions>, options: { + syncMode: SyncMode seenTxids: Store> seenSnapshots: Store> } ): SyncConfig { - const { seenTxids } = options - const { seenSnapshots } = options + const { seenTxids, seenSnapshots, syncMode } = options // Store for the relation schema information const relationSchema = new Store(undefined) @@ -383,6 +405,8 @@ function createElectricSync>( const stream = new ShapeStream({ ...shapeOptions, + log: syncMode === `on-demand` ? `changes_only` : undefined, + // TODO: under the `on-demand` we should be setting the offset to `now` when there is no saved offset rather than -1 signal: abortController.signal, onError: (errorParams) => { // Just immediately mark ready if there's an error to avoid blocking @@ -495,11 +519,19 @@ function createElectricSync>( } }) + // Only set onLoadMore if the sync mode is not eager, this indicates to the sync + // layer can load more data on demand via the requestSnapshot method when, + // the syncMode = `on-demand` or `progressive` + const onLoadMore = + syncMode === `eager` + ? undefined + : async (opts: OnLoadMoreOptions) => { + const snapshotParams = compileSQL(opts) + await stream.requestSnapshot(snapshotParams) + } + return { - onLoadMore: async (opts) => { - const snapshotParams = compileSQL(opts) - await stream.requestSnapshot(snapshotParams) - }, + onLoadMore, cleanup: () => { // Unsubscribe from the stream unsubscribeStream() diff --git a/packages/electric-db-collection/tests/electric-live-query.test.ts b/packages/electric-db-collection/tests/electric-live-query.test.ts index 48b5aef04..73e7d21da 100644 --- a/packages/electric-db-collection/tests/electric-live-query.test.ts +++ b/packages/electric-db-collection/tests/electric-live-query.test.ts @@ -468,14 +468,45 @@ describe.each([ }) if (autoIndex === `eager`) { it(`should load more data via requestSnapshot when creating live query with higher limit`, async () => { + // Create a new electric collection with on-demand syncMode for this test + vi.clearAllMocks() + + const testSubscriber = vi.fn<(messages: Array>) => void>() + mockSubscribe.mockImplementation((callback) => { + testSubscriber.mockImplementation(callback) + return () => {} + }) + + const testElectricCollection = createCollection({ + ...electricCollectionOptions({ + id: `test-incremental-loading`, + shapeOptions: { + url: `http://test-url`, + params: { table: `users` }, + }, + syncMode: `on-demand`, + getKey: (user: User) => user.id, + }), + startSync: true, + autoIndex: `eager` as const, + }) + mockRequestSnapshot.mockResolvedValue({ data: [], }) // Initial sync with limited data - simulateInitialSync() - expect(electricCollection.status).toBe(`ready`) - expect(electricCollection.size).toBe(4) + testSubscriber([ + ...sampleUsers.map((user) => ({ + key: user.id.toString(), + value: user, + headers: { operation: `insert` as const }, + })), + { headers: { control: `up-to-date` as const } }, + ]) + + expect(testElectricCollection.status).toBe(`ready`) + expect(testElectricCollection.size).toBe(4) expect(mockRequestSnapshot).toHaveBeenCalledTimes(0) // Create first live query with limit of 2 @@ -484,7 +515,7 @@ describe.each([ startSync: true, query: (q) => q - .from({ user: electricCollection }) + .from({ user: testElectricCollection }) .where(({ user }) => eq(user.active, true)) .select(({ user }) => ({ id: user.id, @@ -544,7 +575,7 @@ describe.each([ startSync: true, query: (q) => q - .from({ user: electricCollection }) + .from({ user: testElectricCollection }) .where(({ user }) => eq(user.active, true)) .select(({ user }) => ({ id: user.id, @@ -594,3 +625,285 @@ describe.each([ }) } }) + +// Tests specifically for syncMode behavior with live queries +describe(`Electric Collection with Live Query - syncMode integration`, () => { + let subscriber: (messages: Array>) => void + + function createElectricCollectionWithSyncMode( + syncMode: `eager` | `on-demand` | `progressive` + ) { + vi.clearAllMocks() + + mockSubscribe.mockImplementation((callback) => { + subscriber = callback + return () => {} + }) + + mockRequestSnapshot.mockResolvedValue({ + data: [], + }) + + const config = { + id: `electric-users-${syncMode}`, + shapeOptions: { + url: `http://test-url`, + params: { + table: `users`, + }, + }, + syncMode, + getKey: (user: User) => user.id, + } + + const options = electricCollectionOptions(config) + return createCollection({ + ...options, + startSync: true, + autoIndex: `eager` as const, + }) + } + + function simulateInitialSync(users: Array = sampleUsers) { + const messages: Array> = users.map((user) => ({ + key: user.id.toString(), + value: user, + headers: { operation: `insert` }, + })) + + messages.push({ + headers: { control: `up-to-date` }, + }) + + subscriber(messages) + } + + it(`should trigger requestSnapshot in on-demand mode when live query needs more data`, async () => { + const electricCollection = createElectricCollectionWithSyncMode(`on-demand`) + + // Initial sync with limited data + simulateInitialSync([sampleUsers[0]!, sampleUsers[1]!]) // Only Alice and Bob + expect(electricCollection.status).toBe(`ready`) + expect(electricCollection.size).toBe(2) + expect(mockRequestSnapshot).toHaveBeenCalledTimes(0) + + // Mock requestSnapshot to return additional data + mockRequestSnapshot.mockResolvedValueOnce({ + data: [ + { + headers: { operation: `insert` }, + key: 3, + value: sampleUsers[2]!, // Charlie + }, + { + headers: { operation: `insert` }, + key: 4, + value: sampleUsers[3]!, // Dave + }, + ], + }) + + // Create live query with limit that exceeds available data + const liveQuery = createLiveQueryCollection({ + id: `on-demand-live-query`, + startSync: true, + query: (q) => + q + .from({ user: electricCollection }) + .where(({ user }) => eq(user.active, true)) + .orderBy(({ user }) => user.age, `asc`) + .limit(5), + }) + + // Wait for the live query to process + await new Promise((resolve) => setTimeout(resolve, 0)) + + // Should have requested more data from Electric + expect(mockRequestSnapshot).toHaveBeenCalled() + expect(liveQuery.size).toBeGreaterThan(2) + }) + + it(`should trigger requestSnapshot in progressive mode when live query needs more data`, async () => { + const electricCollection = + createElectricCollectionWithSyncMode(`progressive`) + + // Initial sync with limited data + simulateInitialSync([sampleUsers[0]!, sampleUsers[1]!]) // Only Alice and Bob + expect(electricCollection.status).toBe(`ready`) + expect(electricCollection.size).toBe(2) + + // Mock requestSnapshot to return additional data + mockRequestSnapshot.mockResolvedValueOnce({ + data: [ + { + headers: { operation: `insert` }, + key: 3, + value: sampleUsers[2]!, // Charlie + }, + ], + }) + + // Create live query that needs more data + createLiveQueryCollection({ + id: `progressive-live-query`, + startSync: true, + query: (q) => + q + .from({ user: electricCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(3), + }) + + // Wait for the live query to process + await new Promise((resolve) => setTimeout(resolve, 0)) + + // Should have requested more data from Electric + expect(mockRequestSnapshot).toHaveBeenCalled() + }) + + it(`should NOT trigger requestSnapshot in eager mode even when live query needs more data`, async () => { + const electricCollection = createElectricCollectionWithSyncMode(`eager`) + + // Initial sync with limited data + simulateInitialSync([sampleUsers[0]!, sampleUsers[1]!]) // Only Alice and Bob + expect(electricCollection.status).toBe(`ready`) + expect(electricCollection.size).toBe(2) + expect(mockRequestSnapshot).toHaveBeenCalledTimes(0) + + // Create live query with limit that exceeds available data + const liveQuery = createLiveQueryCollection({ + id: `eager-live-query`, + startSync: true, + query: (q) => + q + .from({ user: electricCollection }) + .where(({ user }) => eq(user.active, true)) + .orderBy(({ user }) => user.age, `asc`) + .limit(5), + }) + + // Wait for the live query to process + await new Promise((resolve) => setTimeout(resolve, 0)) + + // Should NOT have requested more data (eager mode doesn't support incremental loading) + expect(mockRequestSnapshot).not.toHaveBeenCalled() + expect(liveQuery.size).toBe(2) // Only has the initially synced data + }) + + it(`should request additional snapshots progressively as live query expands in on-demand mode`, async () => { + const electricCollection = createElectricCollectionWithSyncMode(`on-demand`) + + // Initial sync with just Alice + simulateInitialSync([sampleUsers[0]!]) + expect(electricCollection.size).toBe(1) + + const callArgs = (index: number) => + mockRequestSnapshot.mock.calls[index]?.[0] + + // First snapshot returns Bob and Charlie + mockRequestSnapshot.mockResolvedValueOnce({ + data: [ + { + headers: { operation: `insert` }, + key: 2, + value: sampleUsers[1]!, // Bob + }, + { + headers: { operation: `insert` }, + key: 3, + value: sampleUsers[2]!, // Charlie + }, + ], + }) + + // Create live query with limit of 3 + createLiveQueryCollection({ + id: `expanding-live-query`, + startSync: true, + query: (q) => + q + .from({ user: electricCollection }) + .orderBy(({ user }) => user.age, `asc`) + .limit(3), + }) + + await new Promise((resolve) => setTimeout(resolve, 0)) + + // Should have requested snapshot for limit 3 + expect(mockRequestSnapshot).toHaveBeenCalledWith( + expect.objectContaining({ + limit: 3, + orderBy: `age NULLS FIRST`, + }) + ) + + // After receiving Bob and Charlie, the collection now has 3 users (Alice + Bob + Charlie) + // but it still requests 2 more... TODO: check if this is correct? + expect(callArgs(1)).toMatchObject({ + limit: 2, + orderBy: `age NULLS FIRST`, + }) + }) + + it(`should pass correct WHERE clause to requestSnapshot when live query has filters`, async () => { + const electricCollection = createElectricCollectionWithSyncMode(`on-demand`) + + simulateInitialSync([]) + expect(electricCollection.size).toBe(0) + + // Create filtered live query + createLiveQueryCollection({ + id: `filtered-live-query`, + startSync: true, + query: (q) => + q + .from({ user: electricCollection }) + .where(({ user }) => eq(user.active, true)) + .orderBy(({ user }) => user.name, `desc`) + .limit(10), + }) + + await new Promise((resolve) => setTimeout(resolve, 0)) + + // Should have requested snapshot with WHERE clause + expect(mockRequestSnapshot).toHaveBeenCalledWith( + expect.objectContaining({ + where: `active = $1`, + params: { "1": `true` }, + orderBy: `name DESC NULLS FIRST`, + limit: 10, + }) + ) + }) + + it(`should handle complex filters in requestSnapshot`, async () => { + const electricCollection = + createElectricCollectionWithSyncMode(`progressive`) + + simulateInitialSync([]) + + // Create live query with complex WHERE clause + createLiveQueryCollection({ + id: `complex-filter-live-query`, + startSync: true, + query: (q) => + q + .from({ user: electricCollection }) + .where(({ user }) => gt(user.age, 20)) + .orderBy(({ user }) => user.age, `asc`) + .limit(5), + }) + + await new Promise((resolve) => setTimeout(resolve, 0)) + + // Should have requested snapshot with complex WHERE clause + expect(mockRequestSnapshot).toHaveBeenCalledWith( + expect.objectContaining({ + where: `age > $1`, + params: { "1": `20` }, + orderBy: `age NULLS FIRST`, + limit: 5, + }) + ) + }) +}) diff --git a/packages/electric-db-collection/tests/electric.test.ts b/packages/electric-db-collection/tests/electric.test.ts index ba21e5c6d..aae5dee56 100644 --- a/packages/electric-db-collection/tests/electric.test.ts +++ b/packages/electric-db-collection/tests/electric.test.ts @@ -19,8 +19,10 @@ import type { StandardSchemaV1 } from "@standard-schema/spec" // Mock the ShapeStream module const mockSubscribe = vi.fn() +const mockRequestSnapshot = vi.fn() const mockStream = { subscribe: mockSubscribe, + requestSnapshot: mockRequestSnapshot, } vi.mock(`@electric-sql/client`, async () => { @@ -50,6 +52,9 @@ describe(`Electric Integration`, () => { return () => {} }) + // Reset mock requestSnapshot + mockRequestSnapshot.mockResolvedValue(undefined) + // Create collection with Electric configuration const config = { id: `test`, @@ -1246,6 +1251,181 @@ describe(`Electric Integration`, () => { // Snapshot txid should also resolve await expect(testCollection.utils.awaitTxId(105)).resolves.toBe(true) }) + }) + + // Tests for syncMode configuration + describe(`syncMode configuration`, () => { + it(`should not request snapshots during subscription in eager mode`, () => { + vi.clearAllMocks() + + const config = { + id: `eager-no-snapshot-test`, + shapeOptions: { + url: `http://test-url`, + params: { + table: `test_table`, + }, + }, + syncMode: `eager` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + const testCollection = createCollection(electricCollectionOptions(config)) + + // Subscribe and try to get more data + const subscription = testCollection.subscribeChanges(() => {}) + + // In eager mode, requestSnapshot should not be called + expect(mockRequestSnapshot).not.toHaveBeenCalled() + + subscription.unsubscribe() + }) + + it(`should request incremental snapshots in on-demand mode when syncMore is called`, async () => { + vi.clearAllMocks() + + const config = { + id: `on-demand-snapshot-test`, + shapeOptions: { + url: `http://test-url`, + params: { + table: `test_table`, + }, + }, + syncMode: `on-demand` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + const testCollection = createCollection(electricCollectionOptions(config)) + + // Send up-to-date to mark collection as ready + subscriber([ + { + headers: { control: `up-to-date` }, + }, + ]) + + // In on-demand mode, calling syncMore should request a snapshot + await testCollection.syncMore({ limit: 10 }) + + // Verify requestSnapshot was called + expect(mockRequestSnapshot).toHaveBeenCalledWith( + expect.objectContaining({ + limit: 10, + params: {}, + }) + ) + }) + + it(`should request incremental snapshots in progressive mode when syncMore is called`, async () => { + vi.clearAllMocks() + + const config = { + id: `progressive-snapshot-test`, + shapeOptions: { + url: `http://test-url`, + params: { + table: `test_table`, + }, + }, + syncMode: `progressive` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + const testCollection = createCollection(electricCollectionOptions(config)) + + // Send up-to-date to mark collection as ready + subscriber([ + { + headers: { control: `up-to-date` }, + }, + ]) + + // In progressive mode, calling syncMore should request a snapshot + await testCollection.syncMore({ limit: 20 }) + + // Verify requestSnapshot was called + expect(mockRequestSnapshot).toHaveBeenCalledWith( + expect.objectContaining({ + limit: 20, + params: {}, + }) + ) + }) + + it(`should not request snapshots when syncMore is called in eager mode`, async () => { + vi.clearAllMocks() + + const config = { + id: `eager-no-syncmore-test`, + shapeOptions: { + url: `http://test-url`, + params: { + table: `test_table`, + }, + }, + syncMode: `eager` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + const testCollection = createCollection(electricCollectionOptions(config)) + + // Send up-to-date to mark collection as ready + subscriber([ + { + headers: { control: `up-to-date` }, + }, + ]) + + // In eager mode, syncMore should do nothing + await testCollection.syncMore({ limit: 10 }) + + // Verify requestSnapshot was NOT called + expect(mockRequestSnapshot).not.toHaveBeenCalled() + }) + + it(`should handle progressive mode syncing in background`, async () => { + vi.clearAllMocks() + + const config = { + id: `progressive-background-sync-test`, + shapeOptions: { + url: `http://test-url`, + params: { + table: `test_table`, + }, + }, + syncMode: `progressive` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + const testCollection = createCollection(electricCollectionOptions(config)) + + // Send initial data and up-to-date + subscriber([ + { + key: `1`, + value: { id: 1, name: `Initial User` }, + headers: { operation: `insert` }, + }, + { + headers: { control: `up-to-date` }, + }, + ]) + + // Collection should be ready with initial data + expect(testCollection.status).toBe(`ready`) + expect(testCollection.has(1)).toBe(true) + + // Should still be able to request more data incrementally + await testCollection.syncMore({ limit: 10 }) + expect(mockRequestSnapshot).toHaveBeenCalled() + }) it(`should resync after garbage collection and new subscription`, () => { // Use fake timers for this test From 2629fe4c292dd80aa1c48ceea012115e3f9845b4 Mon Sep 17 00:00:00 2001 From: Sam Willis Date: Fri, 10 Oct 2025 20:09:51 +0100 Subject: [PATCH 13/15] docs: Add notes about known unhandled rejection warnings in timeout tests These tests have a known issue with unhandled rejection warnings that exists in the main branch. The test functionality works correctly, but vitest reports unhandled rejections when testing timeout behavior with fake timers. The tests properly validate that timeouts occur and are handled, but the setTimeout callback in awaitMatch creates promise rejections that vitest flags as unhandled, even though they are properly caught by the test assertions. --- .../tests/electric.test.ts | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/packages/electric-db-collection/tests/electric.test.ts b/packages/electric-db-collection/tests/electric.test.ts index 57ef71629..49bea9a02 100644 --- a/packages/electric-db-collection/tests/electric.test.ts +++ b/packages/electric-db-collection/tests/electric.test.ts @@ -733,6 +733,9 @@ describe(`Electric Integration`, () => { expect(testCollection.has(1)).toBe(true) }) + // NOTE: This test has a known issue with unhandled rejection warnings + // This is a pre-existing issue from main branch (not caused by merge) + // The test functionality works correctly, but vitest reports unhandled rejections it(`should timeout with custom match function when no match found`, async () => { vi.useFakeTimers() @@ -759,14 +762,16 @@ describe(`Electric Integration`, () => { const testCollection = createCollection(electricCollectionOptions(config)) const tx = testCollection.insert({ id: 1, name: `Timeout Test` }) - // Add catch handler to prevent global unhandled rejection detection - tx.isPersisted.promise.catch(() => {}) + // Capture the rejection promise before advancing timers + const rejectionPromise = expect(tx.isPersisted.promise).rejects.toThrow( + `Timeout waiting for custom match function` + ) // Advance timers to trigger timeout await vi.runOnlyPendingTimersAsync() // Should timeout and fail - await expect(tx.isPersisted.promise).rejects.toThrow() + await rejectionPromise vi.useRealTimers() }) @@ -839,6 +844,9 @@ describe(`Electric Integration`, () => { expect(options.onDelete).toBeDefined() }) + // NOTE: This test has a known issue with unhandled rejection warnings + // This is a pre-existing issue from main branch (not caused by merge) + // The test functionality works correctly, but vitest reports unhandled rejections it(`should cleanup pending matches on timeout without memory leaks`, async () => { vi.useFakeTimers() @@ -867,16 +875,16 @@ describe(`Electric Integration`, () => { // Start insert that will timeout const tx = testCollection.insert({ id: 1, name: `Timeout Test` }) - // Add catch handler to prevent global unhandled rejection detection - tx.isPersisted.promise.catch(() => {}) + // Capture the rejection promise before advancing timers + const rejectionPromise = expect(tx.isPersisted.promise).rejects.toThrow( + `Timeout waiting for custom match function` + ) // Advance timers to trigger timeout await vi.runOnlyPendingTimersAsync() // Should timeout and fail - await expect(tx.isPersisted.promise).rejects.toThrow( - `Timeout waiting for custom match function` - ) + await rejectionPromise // Send a message after timeout - should not cause any side effects // This verifies that the pending match was properly cleaned up From 8a09cb8e39d03a67a70b2756a742f36f208affcf Mon Sep 17 00:00:00 2001 From: Sam Willis Date: Sat, 11 Oct 2025 14:40:09 +0100 Subject: [PATCH 14/15] default to offset=now for on-demand mode --- .../electric-db-collection/src/electric.ts | 9 +- .../tests/electric.test.ts | 117 ++++++++++++++++++ 2 files changed, 125 insertions(+), 1 deletion(-) diff --git a/packages/electric-db-collection/src/electric.ts b/packages/electric-db-collection/src/electric.ts index 87a63f805..8d1add75a 100644 --- a/packages/electric-db-collection/src/electric.ts +++ b/packages/electric-db-collection/src/electric.ts @@ -678,8 +678,15 @@ function createElectricSync>( const stream = new ShapeStream({ ...shapeOptions, + // In on-demand mode, we only want to sync changes, so we set the log to `changes_only` log: syncMode === `on-demand` ? `changes_only` : undefined, - // TODO: under the `on-demand` we should be setting the offset to `now` when there is no saved offset rather than -1 + // In on-demand mode, we only need the changes from the point of time the collection was created + // so we default to `now` when there is no saved offset. + offset: shapeOptions.offset + ? shapeOptions.offset + : syncMode === `on-demand` + ? `now` + : undefined, signal: abortController.signal, onError: (errorParams) => { // Just immediately mark ready if there's an error to avoid blocking diff --git a/packages/electric-db-collection/tests/electric.test.ts b/packages/electric-db-collection/tests/electric.test.ts index 49bea9a02..17df94206 100644 --- a/packages/electric-db-collection/tests/electric.test.ts +++ b/packages/electric-db-collection/tests/electric.test.ts @@ -1790,6 +1790,123 @@ describe(`Electric Integration`, () => { expect(mockRequestSnapshot).toHaveBeenCalled() }) + it(`should default offset to 'now' in on-demand mode when no offset provided`, async () => { + vi.clearAllMocks() + + // Import ShapeStream to check constructor calls + const { ShapeStream } = await import(`@electric-sql/client`) + + const config = { + id: `on-demand-offset-now-test`, + shapeOptions: { + url: `http://test-url`, + params: { + table: `test_table`, + }, + // No offset provided + }, + syncMode: `on-demand` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + createCollection(electricCollectionOptions(config)) + + // Check that ShapeStream was called with offset: 'now' + expect(ShapeStream).toHaveBeenCalledWith( + expect.objectContaining({ + offset: `now`, + }) + ) + }) + + it(`should use undefined offset in eager mode when no offset provided`, async () => { + vi.clearAllMocks() + + const { ShapeStream } = await import(`@electric-sql/client`) + + const config = { + id: `eager-offset-undefined-test`, + shapeOptions: { + url: `http://test-url`, + params: { + table: `test_table`, + }, + // No offset provided + }, + syncMode: `eager` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + createCollection(electricCollectionOptions(config)) + + // Check that ShapeStream was called with offset: undefined + expect(ShapeStream).toHaveBeenCalledWith( + expect.objectContaining({ + offset: undefined, + }) + ) + }) + + it(`should use undefined offset in progressive mode when no offset provided`, async () => { + vi.clearAllMocks() + + const { ShapeStream } = await import(`@electric-sql/client`) + + const config = { + id: `progressive-offset-undefined-test`, + shapeOptions: { + url: `http://test-url`, + params: { + table: `test_table`, + }, + // No offset provided + }, + syncMode: `progressive` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + createCollection(electricCollectionOptions(config)) + + // Check that ShapeStream was called with offset: undefined + expect(ShapeStream).toHaveBeenCalledWith( + expect.objectContaining({ + offset: undefined, + }) + ) + }) + + it(`should use explicit offset when provided regardless of syncMode`, async () => { + vi.clearAllMocks() + + const { ShapeStream } = await import(`@electric-sql/client`) + + const config = { + id: `explicit-offset-test`, + shapeOptions: { + url: `http://test-url`, + params: { + table: `test_table`, + }, + offset: -1 as any, // Explicit offset + }, + syncMode: `on-demand` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + createCollection(electricCollectionOptions(config)) + + // Check that ShapeStream was called with the explicit offset + expect(ShapeStream).toHaveBeenCalledWith( + expect.objectContaining({ + offset: -1, + }) + ) + }) + it(`should resync after garbage collection and new subscription`, () => { // Use fake timers for this test vi.useFakeTimers() From c9a8721522f2290f61a259be6f4983a332040aa1 Mon Sep 17 00:00:00 2001 From: Sam Willis Date: Sat, 11 Oct 2025 15:23:00 +0100 Subject: [PATCH 15/15] better handle setting ready under the differnet modes --- .../electric-db-collection/src/electric.ts | 11 +- .../tests/electric.test.ts | 241 ++++++++++++++++++ 2 files changed, 249 insertions(+), 3 deletions(-) diff --git a/packages/electric-db-collection/src/electric.ts b/packages/electric-db-collection/src/electric.ts index 8d1add75a..de3dd0a88 100644 --- a/packages/electric-db-collection/src/electric.ts +++ b/packages/electric-db-collection/src/electric.ts @@ -716,6 +716,7 @@ function createElectricSync>( unsubscribeStream = stream.subscribe((messages: Array>) => { let hasUpToDate = false + let hasSnapshotEnd = false for (const message of messages) { // Add message to current batch buffer (for race condition handling) @@ -780,6 +781,7 @@ function createElectricSync>( }) } else if (isSnapshotEndMessage(message)) { newSnapshots.push(parseSnapshotMessage(message)) + hasSnapshotEnd = true } else if (isUpToDateMessage(message)) { hasUpToDate = true } else if (isMustRefetchMessage(message)) { @@ -797,10 +799,11 @@ function createElectricSync>( // Reset hasUpToDate so we continue accumulating changes until next up-to-date hasUpToDate = false + hasSnapshotEnd = false } } - if (hasUpToDate) { + if (hasUpToDate || hasSnapshotEnd) { // Clear the current batch buffer since we're now up-to-date currentBatchMessages.setState(() => []) @@ -810,8 +813,10 @@ function createElectricSync>( transactionStarted = false } - // Mark the collection as ready now that sync is up to date - markReady() + if (hasUpToDate || (hasSnapshotEnd && syncMode === `on-demand`)) { + // Mark the collection as ready now that sync is up to date + markReady() + } // Always commit txids when we receive up-to-date, regardless of transaction state seenTxids.setState((currentTxids) => { diff --git a/packages/electric-db-collection/tests/electric.test.ts b/packages/electric-db-collection/tests/electric.test.ts index 17df94206..9ccc8c233 100644 --- a/packages/electric-db-collection/tests/electric.test.ts +++ b/packages/electric-db-collection/tests/electric.test.ts @@ -1906,7 +1906,248 @@ describe(`Electric Integration`, () => { }) ) }) + }) + + // Tests for commit and ready behavior with snapshot-end and up-to-date messages + describe(`Commit and ready behavior`, () => { + it(`should commit on snapshot-end in eager mode but not mark ready`, () => { + const config = { + id: `eager-snapshot-end-test`, + shapeOptions: { + url: `http://test-url`, + params: { table: `test_table` }, + }, + syncMode: `eager` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + const testCollection = createCollection(electricCollectionOptions(config)) + + // Send data followed by snapshot-end (but no up-to-date) + subscriber([ + { + key: `1`, + value: { id: 1, name: `Test User` }, + headers: { operation: `insert` }, + }, + { + headers: { + control: `snapshot-end`, + xmin: `100`, + xmax: `110`, + xip_list: [], + }, + }, + ]) + + // Data should be committed (available in state) + expect(testCollection.has(1)).toBe(true) + expect(testCollection.get(1)).toEqual({ id: 1, name: `Test User` }) + + // But collection should NOT be marked as ready yet in eager mode + expect(testCollection.status).toBe(`loading`) + + // Now send up-to-date + subscriber([ + { + headers: { control: `up-to-date` }, + }, + ]) + + // Now it should be ready + expect(testCollection.status).toBe(`ready`) + }) + + it(`should commit and mark ready on snapshot-end in on-demand mode`, () => { + const config = { + id: `on-demand-snapshot-end-test`, + shapeOptions: { + url: `http://test-url`, + params: { table: `test_table` }, + }, + syncMode: `on-demand` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + const testCollection = createCollection(electricCollectionOptions(config)) + + // Send data followed by snapshot-end (but no up-to-date) + subscriber([ + { + key: `1`, + value: { id: 1, name: `Test User` }, + headers: { operation: `insert` }, + }, + { + headers: { + control: `snapshot-end`, + xmin: `100`, + xmax: `110`, + xip_list: [], + }, + }, + ]) + + // Data should be committed (available in state) + expect(testCollection.has(1)).toBe(true) + expect(testCollection.get(1)).toEqual({ id: 1, name: `Test User` }) + + // Collection SHOULD be marked as ready in on-demand mode + expect(testCollection.status).toBe(`ready`) + }) + + it(`should commit on snapshot-end in progressive mode but not mark ready`, () => { + const config = { + id: `progressive-snapshot-end-test`, + shapeOptions: { + url: `http://test-url`, + params: { table: `test_table` }, + }, + syncMode: `progressive` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + const testCollection = createCollection(electricCollectionOptions(config)) + + // Send data followed by snapshot-end (but no up-to-date) + subscriber([ + { + key: `1`, + value: { id: 1, name: `Test User` }, + headers: { operation: `insert` }, + }, + { + headers: { + control: `snapshot-end`, + xmin: `100`, + xmax: `110`, + xip_list: [], + }, + }, + ]) + + // Data should be committed (available in state) + expect(testCollection.has(1)).toBe(true) + expect(testCollection.get(1)).toEqual({ id: 1, name: `Test User` }) + + // But collection should NOT be marked as ready yet in progressive mode + expect(testCollection.status).toBe(`loading`) + + // Now send up-to-date + subscriber([ + { + headers: { control: `up-to-date` }, + }, + ]) + + // Now it should be ready + expect(testCollection.status).toBe(`ready`) + }) + + it(`should commit multiple snapshot-end messages before up-to-date in eager mode`, () => { + const config = { + id: `eager-multiple-snapshots-test`, + shapeOptions: { + url: `http://test-url`, + params: { table: `test_table` }, + }, + syncMode: `eager` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + const testCollection = createCollection(electricCollectionOptions(config)) + + // First snapshot with data + subscriber([ + { + key: `1`, + value: { id: 1, name: `User 1` }, + headers: { operation: `insert` }, + }, + { + headers: { + control: `snapshot-end`, + xmin: `100`, + xmax: `110`, + xip_list: [], + }, + }, + ]) + + // First data should be committed + expect(testCollection.has(1)).toBe(true) + expect(testCollection.status).toBe(`loading`) + + // Second snapshot with more data + subscriber([ + { + key: `2`, + value: { id: 2, name: `User 2` }, + headers: { operation: `insert` }, + }, + { + headers: { + control: `snapshot-end`, + xmin: `110`, + xmax: `120`, + xip_list: [], + }, + }, + ]) + + // Second data should also be committed + expect(testCollection.has(2)).toBe(true) + expect(testCollection.size).toBe(2) + expect(testCollection.status).toBe(`loading`) + + // Finally send up-to-date + subscriber([ + { + headers: { control: `up-to-date` }, + }, + ]) + + // Now should be ready + expect(testCollection.status).toBe(`ready`) + }) + + it(`should handle up-to-date without snapshot-end (traditional behavior)`, () => { + const config = { + id: `traditional-up-to-date-test`, + shapeOptions: { + url: `http://test-url`, + params: { table: `test_table` }, + }, + syncMode: `eager` as const, + getKey: (item: Row) => item.id as number, + startSync: true, + } + + const testCollection = createCollection(electricCollectionOptions(config)) + + // Send data followed by up-to-date (no snapshot-end) + subscriber([ + { + key: `1`, + value: { id: 1, name: `Test User` }, + headers: { operation: `insert` }, + }, + { + headers: { control: `up-to-date` }, + }, + ]) + + // Data should be committed and collection ready + expect(testCollection.has(1)).toBe(true) + expect(testCollection.status).toBe(`ready`) + }) + }) + describe(`syncMode configuration - GC and resync`, () => { it(`should resync after garbage collection and new subscription`, () => { // Use fake timers for this test vi.useFakeTimers()