From 4be48175ab4661dc2cdaf8016259f1fc3793d438 Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Mon, 29 Sep 2025 07:22:26 -0400 Subject: [PATCH 01/15] WIP: allow relation queries, add author filter, add tests --- .../database/features/contentAccess.feature | 2 +- .../database/features/queryConcepts.feature | 66 +++++++ .../features/step-definitions/stepdefs.ts | 177 +++++++++++++----- packages/database/src/lib/queries.ts | 36 +++- 4 files changed, 225 insertions(+), 56 deletions(-) create mode 100644 packages/database/features/queryConcepts.feature diff --git a/packages/database/features/contentAccess.feature b/packages/database/features/contentAccess.feature index 9539a09ab..b071974fe 100644 --- a/packages/database/features/contentAccess.feature +++ b/packages/database/features/contentAccess.feature @@ -17,7 +17,7 @@ Feature: Content access And the user user3 opens the Roam plugin in space s1 And the user user3 opens the Roam plugin in space s2 And Document are added to the database: - | @id | _space_id | source_local_id | _author_id | created | last_modified | + | $id | _space_id | source_local_id | _author_id | created | last_modified | | d1 | s1 | abc | user1 | 2025/01/01 | 2025/01/01 | | d2 | s1 | def | user2 | 2025/01/01 | 2025/01/01 | | d3 | s2 | ghi | user3 | 2025/01/01 | 2025/01/01 | diff --git a/packages/database/features/queryConcepts.feature b/packages/database/features/queryConcepts.feature new file mode 100644 index 000000000..e81cb3a9d --- /dev/null +++ b/packages/database/features/queryConcepts.feature @@ -0,0 +1,66 @@ +Feature: Concept upsert + User story: + * As a user of the Roam plugin + * Logged in through a given space's anonymous account + * With existing concepts + * I want to make various concept queries + + Acceptance criteria: + * The queries should succeed + + Background: + Given the database is blank + And the user user1 opens the Roam plugin in space s1 + And the user user2 opens the Roam plugin in space s1 + And the user user3 opens the Roam plugin in space s1 + And Document are added to the database: + | $id | source_local_id | created | last_modified | _author_id | _space_id | + | d1 | d1 | 2025/01/01 | 2025/01/01 | user1 | s1 | + | d5 | d5 | 2025/01/01 | 2025/01/01 | user2 | s1 | + And Content are added to the database: + | $id | source_local_id | _document_id | text | created | last_modified | scale | _author_id | _space_id | + | ct1 | ct1 | d1 | Claim | 2025/01/01 | 2025/01/01 | document | user1 | s1 | + | ct5 | ct5 | d5 | Opposes | 2025/01/01 | 2025/01/01 | document | user2 | s1 | + And Concept are added to the database: + | $id | name | _space_id | _author_id | _represented_by_id | created | last_modified | @is_schema | _schema_id | literal_content | reference_content | + | c1 | Claim | s1 | user1 | ct1 | 2025/01/01 | 2025/01/01 | true | | {} | {} | + | c5 | Opposes | s1 | user1 | ct5 | 2025/01/01 | 2025/01/01 | true | | {"roles": ["target", "source"]} | {} | + And Concept are added to the database: + | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | literal_content | reference_content | + | c2 | claim 1 | s1 | user1 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | + | c3 | claim 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | + | c4 | claim 3 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | + And Concept are added to the database: + | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | literal_content | @_reference_content | + | c6 | opposes 1 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c3", "source": "c2"} | + + Scenario Outline: Query all nodes + And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":[]}' + Then query results should look like this + | _id | name | _space_id | _author_id | @is_schema | _schema_id | @_reference_content | + | c2 | claim 1 | s1 | user1 | false | c1 | {} | + | c3 | claim 2 | s1 | user2 | false | c1 | {} | + | c4 | claim 3 | s1 | user3 | false | c1 | {} | + | c6 | opposes 1 | s1 | user2 | false | c5 | {"target": "c3", "source": "c2"} | + + Scenario Outline: Query node schemas + And a user logged in space s1 and querying nodes with these parameters: '{}' + Then query results should look like this + | _id | name | _space_id | _author_id | @is_schema | _schema_id | literal_content | reference_content | _represented_by_id | + | c1 | Claim | s1 | user1 | true | | {} | {} | ct1 | + | c5 | Opposes | s1 | user1 | true | | {"roles": ["target", "source"]} | {} | ct5 | + + Scenario Outline: Query by node types + And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":["ct1"]}' + Then query results should look like this + | _id | name | _space_id | _author_id | @is_schema | _schema_id | literal_content | reference_content | + | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | + | c3 | claim 2 | s1 | user2 | false | c1 | {} | {} | + | c4 | claim 3 | s1 | user3 | false | c1 | {} | {} | + + Scenario Outline: Query by author + And a user logged in space s1 and querying nodes with these parameters: '{"nodeAuthor":["user2"],"schemaLocalIds":[]}' + Then query results should look like this + | _id | name | _space_id | _author_id | @is_schema | _schema_id | literal_content | @_reference_content | + | c3 | claim 2 | s1 | user2 | false | c1 | {} | {} | + | c6 | opposes 1 | s1 | user2 | false | c5 | {} | {"target": "c3", "source": "c2"} | diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index d9056556b..100e80a07 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -3,6 +3,7 @@ import { Given, When, Then, world, type DataTable } from "@cucumber/cucumber"; import { createClient } from "@supabase/supabase-js"; import { Constants, type Database, type Enums } from "@repo/database/dbTypes"; import { getVariant, config } from "@repo/database/dbDotEnv"; +import { getNodes } from "@repo/database/lib/queries"; import { spaceAnonUserEmail, @@ -26,8 +27,8 @@ const getAnonymousClient = () => { ); } return createClient( - process.env.SUPABASE_URL!, - process.env.SUPABASE_ANON_KEY!, + process.env.SUPABASE_URL, + process.env.SUPABASE_ANON_KEY, ); }; @@ -38,8 +39,8 @@ const getServiceClient = () => { ); } return createClient( - process.env.SUPABASE_URL!, - process.env.SUPABASE_SERVICE_ROLE_KEY!, + process.env.SUPABASE_URL, + process.env.SUPABASE_SERVICE_ROLE_KEY, ); }; @@ -49,6 +50,8 @@ Given("the database is blank", async () => { const client = getServiceClient(); let r = await client.from("Content").delete().neq("id", -1); assert.equal(r.error, null); + r = await client.from("Document").delete().neq("id", -1); + assert.equal(r.error, null); r = await client.from("Concept").delete().neq("id", -1); assert.equal(r.error, null); r = await client.from("AgentIdentifier").delete().neq("account_id", -1); @@ -57,39 +60,74 @@ Given("the database is blank", async () => { assert.equal(r.error, null); r = await client.from("Space").delete().neq("id", -1); assert.equal(r.error, null); + world.localRefs = {}; }); const substituteLocalReferences = ( - row: Record, + row: Record, localRefs: Record, -): Record => - Object.fromEntries( +): Record => { + const substituteLocalReferencesRec = (v: any): any => { + if (typeof v === "string") { + return localRefs[v]; + } + if (Array.isArray(v)) return v.map(substituteLocalReferencesRec); + if (typeof v === "object") + return Object.fromEntries( + Object.entries(v).map(([k, v]) => [k, substituteLocalReferencesRec(v)]), + ); + console.error("could not substitute", typeof v, v); + }; + + const processKV = ([k, v]: [string, string]) => { + let v2: any = v; + const isJson = k.charAt(0) === "@"; + if (isJson) { + k = k.substring(1); + v2 = JSON.parse(v2); + } + const isJsonObject = isJson && "{[".includes(v.charAt(0)); + if (k.charAt(0) === "_") { + k = k.substring(1); + v2 = substituteLocalReferencesRec(v2); + } + if (isJsonObject) v2 = JSON.stringify(v2); + return [k, v2]; + }; + + const result = Object.fromEntries( Object.entries(row) - .filter(([k, v]) => k.charAt(0) !== "@") - .map(([k, v]) => - k.charAt(0) == "_" ? [k.substring(1), localRefs[v]] : [k, v], - ), + .filter(([k, v]: [string, string]) => k.charAt(0) !== "$") + .map(processKV), ); + return result; +}; Given( "{word} are added to the database:", async (tableName: keyof Database["public"]["Tables"], table: DataTable) => { // generic function to add a bunch of objects. - // Columns prefixed by @ are primary keys, and are not sent to the database, + // Columns prefixed by $ are primary keys, and are not sent to the database, // but the local value is associated with the database id in world.localRefs. // Columns prefixed with _ are translated back from local references to db ids. + // Columns prefixed with @ are parsed as json values. (Use @ before _) const client = getServiceClient(); - const localRefs: Record = world.localRefs || {}; + const localRefs = (world.localRefs as Record) || {}; const rows = table.hashes(); const values: any[] = rows.map((r) => substituteLocalReferences(r, localRefs), ); const defIndex = table .raw()[0]! - .map((k) => (k.charAt(0) == "@" ? k : null)) + .map((k) => (k.charAt(0) == "$" ? k : null)) .filter((k) => typeof k == "string"); + const localIndexName = defIndex[0]!; + // do not allow to redefine values + assert.strictEqual( + values.filter((v) => localRefs[v[localIndexName]] !== undefined).length, + 0, + ); if (defIndex.length) { - const localIndexName = defIndex[0]!; const dbIndexName = localIndexName.substring(1); const ids = await client .from(tableName) @@ -120,7 +158,7 @@ When( // assumption: turbo dev is running. TODO: Make into hooks if (PLATFORMS.indexOf(platform) < 0) throw new Error(`Platform must be one of ${PLATFORMS}`); - const localRefs: Record = world.localRefs || {}; + const localRefs = (world.localRefs as Record) || {}; const spaceResponse = await fetchOrCreateSpaceDirect({ password: SPACE_ANONYMOUS_PASSWORD, url: `https://roamresearch.com/#/app/${spaceName}`, @@ -163,16 +201,22 @@ Then( }, ); +const getLoggedinDatabase = async (spaceId: number) => { + assert.notStrictEqual(spaceId, undefined); + const client = getAnonymousClient(); + const loginResponse = await client.auth.signInWithPassword({ + email: spaceAnonUserEmail("Roam", spaceId), + password: SPACE_ANONYMOUS_PASSWORD, + }); + assert.equal(loginResponse.error, null); + return client; +}; + Then( "a user logged in space {word} should see a {word} in the database", async (spaceName, tableName) => { - const client = getAnonymousClient(); - const spaceId = world.localRefs[spaceName]; - const loginResponse = await client.auth.signInWithPassword({ - email: spaceAnonUserEmail("Roam", spaceId), - password: SPACE_ANONYMOUS_PASSWORD, - }); - assert.equal(loginResponse.error, null); + const spaceId: number = world.localRefs[spaceName]; + const client = await getLoggedinDatabase(spaceId); const response = await client .from(tableName) .select("*", { count: "exact" }); @@ -183,13 +227,8 @@ Then( Then( "a user logged in space {word} should see {int} {word} in the database", async (spaceName, expectedCount, tableName) => { - const client = getAnonymousClient(); - const spaceId = world.localRefs[spaceName]; - const loginResponse = await client.auth.signInWithPassword({ - email: spaceAnonUserEmail("Roam", spaceId), - password: SPACE_ANONYMOUS_PASSWORD, - }); - assert.equal(loginResponse.error, null); + const spaceId: number = world.localRefs[spaceName]; + const client = await getLoggedinDatabase(spaceId); const response = await client .from(tableName) .select("*", { count: "exact" }); @@ -197,17 +236,26 @@ Then( }, ); +Given( + "user {word} upserts these accounts to space {word}:", + async (userName: string, spaceName: string, accountsString: string) => { + const accounts = JSON.parse(accountsString); + const spaceId: number = world.localRefs[spaceName]; + const client = await getLoggedinDatabase(spaceId); + const response = await client.rpc("upsert_accounts_in_space", { + space_id_: spaceId, + accounts, + }); + assert.equal(response.error, null); + }, +); + Given( "user {word} upserts these documents to space {word}:", async (userName: string, spaceName: string, docString: string) => { const data = JSON.parse(docString); - const client = getAnonymousClient(); - const spaceId = world.localRefs[spaceName]; - const loginResponse = await client.auth.signInWithPassword({ - email: spaceAnonUserEmail("Roam", spaceId), - password: SPACE_ANONYMOUS_PASSWORD, - }); - assert.equal(loginResponse.error, null); + const spaceId: number = world.localRefs[spaceName]; + const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_documents", { v_space_id: spaceId, data, @@ -220,13 +268,8 @@ Given( "user {word} upserts this content to space {word}:", async (userName: string, spaceName: string, docString: string) => { const data = JSON.parse(docString); - const client = getAnonymousClient(); - const spaceId = world.localRefs[spaceName]; - const loginResponse = await client.auth.signInWithPassword({ - email: spaceAnonUserEmail("Roam", spaceId), - password: SPACE_ANONYMOUS_PASSWORD, - }); - assert.equal(loginResponse.error, null); + const spaceId: number = world.localRefs[spaceName]; + const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_content", { v_space_id: spaceId, data, @@ -241,13 +284,8 @@ Given( "user {word} upserts these concepts to space {word}:", async (userName: string, spaceName: string, docString: string) => { const data = JSON.parse(docString); - const client = getAnonymousClient(); - const spaceId = world.localRefs[spaceName]; - const loginResponse = await client.auth.signInWithPassword({ - email: spaceAnonUserEmail("Roam", spaceId), - password: SPACE_ANONYMOUS_PASSWORD, - }); - assert.equal(loginResponse.error, null); + const spaceId: number = world.localRefs[spaceName]; + const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_concepts", { v_space_id: spaceId, data, @@ -255,3 +293,40 @@ Given( assert.equal(response.error, null); }, ); + +Given( + "a user logged in space {word} and querying nodes with these parameters: {string}", + async (spaceName: string, paramsJ: string) => { + const params = JSON.parse(paramsJ); + const spaceId: number = world.localRefs[spaceName]; + const supabase = await getLoggedinDatabase(spaceId); + const nodes = await getNodes({ ...params, supabase, spaceId }); + nodes.sort((a, b) => a.id! - b.id!); + world.queryResults = nodes; + }, +); + +Then("query results should look like this", (table: DataTable) => { + const localRefs = (world.localRefs as Record) || {}; + const rows = table.hashes(); + const values: any[] = rows.map((r) => + substituteLocalReferences(r, localRefs), + ); + // console.log(values); + // console.log(world.queryResults); + values.sort((a, b) => a.id! - b.id!); + assert.deepStrictEqual( + values.map((v) => v.id), + world.queryResults.map((v: any) => v.id), + ); + if (values.length) { + const keys = Object.keys(values[0]); + const truncatedResults = world.queryResults.map((v: any) => + Object.fromEntries( + Object.entries(v).filter(([k, _]) => keys.includes(k)), + ), + ); + // console.log(truncatedResults); + assert.deepEqual(values, truncatedResults); + } +}); diff --git a/packages/database/src/lib/queries.ts b/packages/database/src/lib/queries.ts index eeb6d1056..bc349ff74 100644 --- a/packages/database/src/lib/queries.ts +++ b/packages/database/src/lib/queries.ts @@ -58,6 +58,8 @@ const composeQuery = ({ conceptFields = ["id", "name", "space_id"], contentFields = ["source_local_id"], documentFields = [], + nodeAuthor = undefined, + fetchNodes = true, }: { supabase: DGSupabaseClient; spaceId?: number; @@ -65,17 +67,36 @@ const composeQuery = ({ conceptFields?: (keyof Concept)[]; contentFields?: (keyof Content)[]; documentFields?: (keyof Document)[]; + nodeAuthor?: string | undefined; + fetchNodes?: boolean | null; }) => { let q = conceptFields.join(",\n"); + if (schemaDbIds === 0 && !contentFields.includes("source_local_id")) { + contentFields = contentFields.slice(); + contentFields.push("source_local_id"); + } if (contentFields.length > 0) { - q += ",\nContent (\n" + contentFields.join(",\n"); + const args: string[] = contentFields.slice(); if (documentFields.length > 0) { - q += ",\nDocument (\n" + documentFields.join(",\n") + ")"; + args.push("Document (\n" + documentFields.join(",\n") + ")"); } - q += ")"; + q += `,\nContent${schemaDbIds === 0 ? "!inner" : ""} (\n${args.join(",\n")})`; + } + if (nodeAuthor !== undefined) { + q += ", author:author_id!inner(account_local_id)"; + } + let query = supabase.from("Concept").select(q); + if (fetchNodes === true) { + query = query.eq("arity", 0); + } else if (fetchNodes === false) { + query = query.gt("arity", 0); } - let query = supabase.from("Concept").select(q).eq("arity", 0); + // else fetch both + if (spaceId !== undefined) query = query.eq("space_id", spaceId); + if (nodeAuthor !== undefined) { + query = query.eq("author.account_local_id", nodeAuthor); + } if (schemaDbIds === 0) { query = query.eq("is_schema", true); } else { @@ -207,6 +228,7 @@ export const CONCEPT_FIELDS: (keyof Concept)[] = [ "reference_content", "refs", "is_schema", + "schema_id", "represented_by_id", ]; @@ -249,6 +271,8 @@ export const getNodes = async ({ conceptFields = CONCEPT_FIELDS, contentFields = CONTENT_FIELDS, documentFields = DOCUMENT_FIELDS, + nodeAuthor = undefined, + fetchNodes = true, }: { supabase: DGSupabaseClient; spaceId?: number; @@ -256,6 +280,8 @@ export const getNodes = async ({ conceptFields?: (keyof Concept)[]; contentFields?: (keyof Content)[]; documentFields?: (keyof Document)[]; + nodeAuthor?: string | undefined; + fetchNodes?: boolean | null; }): Promise => { let schemaDbIds: number | number[] = 0; const localIdsArray = @@ -283,6 +309,8 @@ export const getNodes = async ({ conceptFields, contentFields, documentFields, + nodeAuthor, + fetchNodes, }); const { error, data } = (await q) as PostgrestResponse; if (error) { From ddb37511284d257b4f2866ba2ac012017d8dd1c9 Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Wed, 1 Oct 2025 15:04:22 -0400 Subject: [PATCH 02/15] wip: More query cases --- .../database/features/queryConcepts.feature | 85 ++++++++++------ .../features/step-definitions/stepdefs.ts | 20 ++-- packages/database/src/dbTypes.ts | 40 ++++++++ packages/database/src/lib/queries.ts | 98 ++++++++++++++++--- ...250929154709_relation_access_functions.sql | 20 ++++ .../database/supabase/schemas/concept.sql | 22 +++++ 6 files changed, 230 insertions(+), 55 deletions(-) create mode 100644 packages/database/supabase/migrations/20250929154709_relation_access_functions.sql diff --git a/packages/database/features/queryConcepts.feature b/packages/database/features/queryConcepts.feature index e81cb3a9d..e15939bbb 100644 --- a/packages/database/features/queryConcepts.feature +++ b/packages/database/features/queryConcepts.feature @@ -17,50 +17,75 @@ Feature: Concept upsert | $id | source_local_id | created | last_modified | _author_id | _space_id | | d1 | d1 | 2025/01/01 | 2025/01/01 | user1 | s1 | | d5 | d5 | 2025/01/01 | 2025/01/01 | user2 | s1 | + | d7 | d7 | 2025/01/01 | 2025/01/01 | user1 | s1 | And Content are added to the database: - | $id | source_local_id | _document_id | text | created | last_modified | scale | _author_id | _space_id | - | ct1 | ct1 | d1 | Claim | 2025/01/01 | 2025/01/01 | document | user1 | s1 | - | ct5 | ct5 | d5 | Opposes | 2025/01/01 | 2025/01/01 | document | user2 | s1 | + | $id | source_local_id | _document_id | text | created | last_modified | scale | _author_id | _space_id | + | ct1 | ct1 | d1 | Claim | 2025/01/01 | 2025/01/01 | document | user1 | s1 | + | ct5 | ct5 | d5 | Opposes | 2025/01/01 | 2025/01/01 | document | user2 | s1 | + | ct7 | ct7 | d7 | Hypothesis | 2025/01/01 | 2025/01/01 | document | user1 | s1 | And Concept are added to the database: - | $id | name | _space_id | _author_id | _represented_by_id | created | last_modified | @is_schema | _schema_id | literal_content | reference_content | - | c1 | Claim | s1 | user1 | ct1 | 2025/01/01 | 2025/01/01 | true | | {} | {} | - | c5 | Opposes | s1 | user1 | ct5 | 2025/01/01 | 2025/01/01 | true | | {"roles": ["target", "source"]} | {} | + | $id | name | _space_id | _author_id | _represented_by_id | created | last_modified | @is_schema | _schema_id | @literal_content | @reference_content | + | c1 | Claim | s1 | user1 | ct1 | 2025/01/01 | 2025/01/01 | true | | {} | {} | + | c5 | Opposes | s1 | user1 | ct5 | 2025/01/01 | 2025/01/01 | true | | {"roles": ["target", "source"]} | {} | + | c7 | Hypothesis | s1 | user1 | ct7 | 2025/01/01 | 2025/01/01 | true | | {} | {} | And Concept are added to the database: - | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | literal_content | reference_content | - | c2 | claim 1 | s1 | user1 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | - | c3 | claim 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | - | c4 | claim 3 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | + | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | @literal_content | @reference_content | + | c2 | claim 1 | s1 | user1 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | + | c3 | claim 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | + | c4 | claim 3 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | + | c8 | hypothesis 1 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c7 | {} | {} | And Concept are added to the database: - | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | literal_content | @_reference_content | - | c6 | opposes 1 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c3", "source": "c2"} | + | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | @literal_content | @_reference_content | + | c6 | opposes 1 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c3", "source": "c2"} | + | c9 | opposes 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c8", "source": "c2"} | Scenario Outline: Query all nodes - And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":[]}' + And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":[],"fetchNodes":null}' Then query results should look like this - | _id | name | _space_id | _author_id | @is_schema | _schema_id | @_reference_content | - | c2 | claim 1 | s1 | user1 | false | c1 | {} | - | c3 | claim 2 | s1 | user2 | false | c1 | {} | - | c4 | claim 3 | s1 | user3 | false | c1 | {} | - | c6 | opposes 1 | s1 | user2 | false | c5 | {"target": "c3", "source": "c2"} | + | _id | name | _space_id | _author_id | @is_schema | _schema_id | @_reference_content | + | c2 | claim 1 | s1 | user1 | false | c1 | {} | + | c3 | claim 2 | s1 | user2 | false | c1 | {} | + | c4 | claim 3 | s1 | user3 | false | c1 | {} | + | c6 | opposes 1 | s1 | user2 | false | c5 | {"target": "c3", "source": "c2"} | + | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | + | c9 | opposes 2 | s1 | user2 | false | c5 | {"target": "c8", "source": "c2"} | Scenario Outline: Query node schemas - And a user logged in space s1 and querying nodes with these parameters: '{}' + And a user logged in space s1 and querying nodes with these parameters: '{"fetchNodes":null}' Then query results should look like this - | _id | name | _space_id | _author_id | @is_schema | _schema_id | literal_content | reference_content | _represented_by_id | - | c1 | Claim | s1 | user1 | true | | {} | {} | ct1 | - | c5 | Opposes | s1 | user1 | true | | {"roles": ["target", "source"]} | {} | ct5 | + | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @reference_content | _represented_by_id | + | c1 | Claim | s1 | user1 | true | | {} | {} | ct1 | + | c5 | Opposes | s1 | user1 | true | | {"roles": ["target", "source"]} | {} | ct5 | + | c7 | Hypothesis | s1 | user1 | true | | {} | {} | ct7 | Scenario Outline: Query by node types And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":["ct1"]}' Then query results should look like this - | _id | name | _space_id | _author_id | @is_schema | _schema_id | literal_content | reference_content | - | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | - | c3 | claim 2 | s1 | user2 | false | c1 | {} | {} | - | c4 | claim 3 | s1 | user3 | false | c1 | {} | {} | + | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @reference_content | + | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | + | c3 | claim 2 | s1 | user2 | false | c1 | {} | {} | + | c4 | claim 3 | s1 | user3 | false | c1 | {} | {} | Scenario Outline: Query by author - And a user logged in space s1 and querying nodes with these parameters: '{"nodeAuthor":["user2"],"schemaLocalIds":[]}' + And a user logged in space s1 and querying nodes with these parameters: '{"nodeAuthor":["user2"],"schemaLocalIds":[],"fetchNodes":null}' Then query results should look like this - | _id | name | _space_id | _author_id | @is_schema | _schema_id | literal_content | @_reference_content | - | c3 | claim 2 | s1 | user2 | false | c1 | {} | {} | - | c6 | opposes 1 | s1 | user2 | false | c5 | {} | {"target": "c3", "source": "c2"} | + | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | + | c3 | claim 2 | s1 | user2 | false | c1 | {} | {} | + | c6 | opposes 1 | s1 | user2 | false | c5 | {} | {"target": "c3", "source": "c2"} | + | c9 | opposes 2 | s1 | user2 | false | c5 | {} | {"target": "c8", "source": "c2"} | + + Scenario Outline: Query by relation type + And a user logged in space s1 and querying nodes with these parameters: '{"inRelsOfTypeLocal":["ct5"],"schemaLocalIds":[]}' + Then query results should look like this + | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | + | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | + | c3 | claim 2 | s1 | user2 | false | c1 | {} | {} | + | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | {} | + + Scenario Outline: Query by related node type + And a user logged in space s1 and querying nodes with these parameters: '{"inRelsToNodesOfTypeLocal":["ct7"],"schemaLocalIds":[]}' + Then query results should look like this + | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | + | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | + | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | {} | + # Note that the node is related to itself, unfortunate but hard to solve. diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index 100e80a07..947793957 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -3,7 +3,7 @@ import { Given, When, Then, world, type DataTable } from "@cucumber/cucumber"; import { createClient } from "@supabase/supabase-js"; import { Constants, type Database, type Enums } from "@repo/database/dbTypes"; import { getVariant, config } from "@repo/database/dbDotEnv"; -import { getNodes } from "@repo/database/lib/queries"; +import { getNodes, initNodeSchemaCache } from "@repo/database/lib/queries"; import { spaceAnonUserEmail, @@ -61,6 +61,8 @@ Given("the database is blank", async () => { r = await client.from("Space").delete().neq("id", -1); assert.equal(r.error, null); world.localRefs = {}; + // clear the cache + initNodeSchemaCache(); }); const substituteLocalReferences = ( @@ -79,19 +81,17 @@ const substituteLocalReferences = ( console.error("could not substitute", typeof v, v); }; - const processKV = ([k, v]: [string, string]) => { + const processKV = ([k, v]: [string, any]) => { let v2: any = v; const isJson = k.charAt(0) === "@"; if (isJson) { k = k.substring(1); v2 = JSON.parse(v2); } - const isJsonObject = isJson && "{[".includes(v.charAt(0)); if (k.charAt(0) === "_") { k = k.substring(1); v2 = substituteLocalReferencesRec(v2); } - if (isJsonObject) v2 = JSON.stringify(v2); return [k, v2]; }; @@ -312,12 +312,12 @@ Then("query results should look like this", (table: DataTable) => { const values: any[] = rows.map((r) => substituteLocalReferences(r, localRefs), ); - // console.log(values); - // console.log(world.queryResults); + // console.debug(values); + // console.debug(JSON.stringify(world.queryResults, null, 2)); values.sort((a, b) => a.id! - b.id!); - assert.deepStrictEqual( - values.map((v) => v.id), + assert.deepEqual( world.queryResults.map((v: any) => v.id), + values.map((v) => v.id), ); if (values.length) { const keys = Object.keys(values[0]); @@ -326,7 +326,7 @@ Then("query results should look like this", (table: DataTable) => { Object.entries(v).filter(([k, _]) => keys.includes(k)), ), ); - // console.log(truncatedResults); - assert.deepEqual(values, truncatedResults); + // console.debug(truncatedResults); + assert.deepEqual(truncatedResults, values); } }); diff --git a/packages/database/src/dbTypes.ts b/packages/database/src/dbTypes.ts index 4c15c8623..dc48a6a4c 100644 --- a/packages/database/src/dbTypes.ts +++ b/packages/database/src/dbTypes.ts @@ -578,10 +578,50 @@ export type Database = { Args: { lit_content: Json; schema_id: number } Returns: number } + concept_in_relations: { + Args: { concept: Database["public"]["Tables"]["Concept"]["Row"] } + Returns: { + arity: number | null + author_id: number | null + created: string + description: string | null + epistemic_status: Database["public"]["Enums"]["EpistemicStatus"] + id: number + is_schema: boolean + last_modified: string + literal_content: Json + name: string + reference_content: Json + refs: number[] + represented_by_id: number | null + schema_id: number | null + space_id: number + }[] + } concept_in_space: { Args: { concept_id: number } Returns: boolean } + concepts_of_relation: { + Args: { relation: Database["public"]["Tables"]["Concept"]["Row"] } + Returns: { + arity: number | null + author_id: number | null + created: string + description: string | null + epistemic_status: Database["public"]["Enums"]["EpistemicStatus"] + id: number + is_schema: boolean + last_modified: string + literal_content: Json + name: string + reference_content: Json + refs: number[] + represented_by_id: number | null + schema_id: number | null + space_id: number + }[] + } content_in_space: { Args: { content_id: number } Returns: boolean diff --git a/packages/database/src/lib/queries.ts b/packages/database/src/lib/queries.ts index bc349ff74..9b5e52bab 100644 --- a/packages/database/src/lib/queries.ts +++ b/packages/database/src/lib/queries.ts @@ -30,6 +30,12 @@ let NODE_SCHEMA_CACHE: Record = { [NODE_SCHEMAS]: nodeSchemaSignature, }; +export const initNodeSchemaCache = () => { + NODE_SCHEMA_CACHE = { + [NODE_SCHEMAS]: nodeSchemaSignature, + }; +} + export type PDocument = Partial>; export type PContent = Partial> & { Document: PDocument | null; @@ -60,6 +66,11 @@ const composeQuery = ({ documentFields = [], nodeAuthor = undefined, fetchNodes = true, + inRelsOfType = undefined, + relationFields = undefined, + relationToNodeFields = undefined, + inRelsToNodesOfType = undefined, + inRelsToNodesOfAuthor = undefined, }: { supabase: DGSupabaseClient; spaceId?: number; @@ -69,6 +80,11 @@ const composeQuery = ({ documentFields?: (keyof Document)[]; nodeAuthor?: string | undefined; fetchNodes?: boolean | null; + inRelsOfType?: number[]; + relationFields?: (keyof Concept)[]; + relationToNodeFields?: (keyof Concept)[]; + inRelsToNodesOfType?: number[]; + inRelsToNodesOfAuthor?: number; }) => { let q = conceptFields.join(",\n"); if (schemaDbIds === 0 && !contentFields.includes("source_local_id")) { @@ -85,6 +101,27 @@ const composeQuery = ({ if (nodeAuthor !== undefined) { q += ", author:author_id!inner(account_local_id)"; } + if ( + inRelsOfType !== undefined || + inRelsToNodesOfType !== undefined || + inRelsToNodesOfAuthor !== undefined + ) { + const args: string[] = (relationFields || []).slice(); + if (inRelsOfType !== undefined && !args.includes("schema_id")) + args.push("schema_id"); + if ( + inRelsToNodesOfType !== undefined || + inRelsToNodesOfAuthor !== undefined + ) { + const args2: string[] = (relationToNodeFields || []).slice(); + if (inRelsToNodesOfType !== undefined && !args2.includes("schema_id")) + args2.push("schema_id"); + if (inRelsToNodesOfAuthor !== undefined && !args2.includes("author_id")) + args2.push("author_id"); + args.push(`subnodes:concepts_of_relation!inner(${args2.join(",\n")})`); + } + q += `, relations:concept_in_relations!inner(${args.join(",\n")})`; + } let query = supabase.from("Concept").select(q); if (fetchNodes === true) { query = query.eq("arity", 0); @@ -108,6 +145,13 @@ const composeQuery = ({ query = query.eq("schema_id", schemaDbIds); else throw new Error("schemaDbIds should be a number or number[]"); } + if (inRelsOfType !== undefined && inRelsOfType.length > 0) + query = query.in("relations.schema_id", inRelsOfType); + if (inRelsToNodesOfType !== undefined && inRelsToNodesOfType.length > 0) + query = query.in("relations.subnodes.schema_id", inRelsToNodesOfType); + if (inRelsToNodesOfAuthor !== undefined) + query = query.eq("relations.subnodes.author_id", inRelsToNodesOfAuthor); + // console.debug(query); return query; }; @@ -121,7 +165,7 @@ export const getNodeSchemas = async ( .filter((x) => typeof x === "object") .filter((x) => x.spaceId === spaceId || x.spaceId === 0); if (forceCacheReload || result.length === 1) { - const q = composeQuery({ supabase, spaceId }); + const q = composeQuery({ supabase, spaceId, fetchNodes: null }); const res = (await q) as PostgrestResponse; if (res.error) { console.error("getNodeSchemas failed", res.error); @@ -273,6 +317,11 @@ export const getNodes = async ({ documentFields = DOCUMENT_FIELDS, nodeAuthor = undefined, fetchNodes = true, + inRelsOfTypeLocal = undefined, + relationFields = undefined, + relationToNodeFields = undefined, + inRelsToNodesOfTypeLocal = undefined, + inRelsToNodesOfAuthor = undefined, }: { supabase: DGSupabaseClient; spaceId?: number; @@ -282,26 +331,40 @@ export const getNodes = async ({ documentFields?: (keyof Document)[]; nodeAuthor?: string | undefined; fetchNodes?: boolean | null; + inRelsOfTypeLocal?: string[]; + relationFields?: (keyof Concept)[]; + relationToNodeFields?: (keyof Concept)[]; + inRelsToNodesOfTypeLocal?: string[]; + inRelsToNodesOfAuthor?: number; }): Promise => { - let schemaDbIds: number | number[] = 0; - const localIdsArray = + const schemaLocalIdsArray = typeof schemaLocalIds === "string" ? [schemaLocalIds] : schemaLocalIds; - if (schemaLocalIds !== NODE_SCHEMAS) { - const dbIdsMapping = await getLocalToDbIdMapping( - supabase, - localIdsArray, - spaceId, - ); - schemaDbIds = Object.values(dbIdsMapping).filter((x) => x !== null); - if (schemaDbIds.length < localIdsArray.length) { + const localIds = new Set(schemaLocalIds); + if (inRelsOfTypeLocal !== undefined) + inRelsOfTypeLocal.map((k) => localIds.add(k)); + if (inRelsToNodesOfTypeLocal !== undefined) + inRelsToNodesOfTypeLocal.map((k) => localIds.add(k)); + const dbIdsMapping = await getLocalToDbIdMapping( + supabase, + new Array(...localIds.keys()), + spaceId, + ); + const localToDbArray = (a: string[] | undefined): number[] | undefined => { + if (a === undefined) return undefined; + const r = a + .map((k) => dbIdsMapping[k]) + .filter((k) => k !== null && k !== undefined); + if (r.length < a.length) { console.error( "Some localIds are not yet in database: ", - localIdsArray - .filter((localId) => dbIdsMapping[localId] === null) - .join(", "), + a.filter((k) => !dbIdsMapping[k]).join(", "), ); } - } + return r; + }; + const schemaDbIds = + schemaLocalIds === NODE_SCHEMAS ? 0 : localToDbArray(schemaLocalIdsArray); + const q = composeQuery({ supabase, spaceId, @@ -311,6 +374,11 @@ export const getNodes = async ({ documentFields, nodeAuthor, fetchNodes, + inRelsOfType: localToDbArray(inRelsOfTypeLocal), + relationFields, + relationToNodeFields, + inRelsToNodesOfType: localToDbArray(inRelsToNodesOfTypeLocal), + inRelsToNodesOfAuthor, }); const { error, data } = (await q) as PostgrestResponse; if (error) { diff --git a/packages/database/supabase/migrations/20250929154709_relation_access_functions.sql b/packages/database/supabase/migrations/20250929154709_relation_access_functions.sql new file mode 100644 index 000000000..f201f223b --- /dev/null +++ b/packages/database/supabase/migrations/20250929154709_relation_access_functions.sql @@ -0,0 +1,20 @@ + +CREATE OR REPLACE FUNCTION public.concept_in_relations(concept "Concept") + RETURNS SETOF "Concept" + LANGUAGE sql + STABLE STRICT + SET search_path TO '' +AS $function$ + SELECT * from public."Concept" WHERE concept.id = any(refs); +$function$ +; + +CREATE OR REPLACE FUNCTION public.concepts_of_relation(relation "Concept") + RETURNS SETOF "Concept" + LANGUAGE sql + STABLE STRICT + SET search_path TO '' +AS $function$ + SELECT * from public."Concept" WHERE id = any(relation.refs); +$function$ +; diff --git a/packages/database/supabase/schemas/concept.sql b/packages/database/supabase/schemas/concept.sql index 6620a3d48..e22460bfe 100644 --- a/packages/database/supabase/schemas/concept.sql +++ b/packages/database/supabase/schemas/concept.sql @@ -160,6 +160,28 @@ $$; COMMENT ON FUNCTION public.instances_of_schema(public."Concept") IS 'Computed one-to-many: returns all Concept instances that are based on the given schema Concept.'; + +CREATE OR REPLACE FUNCTION public.concept_in_relations(concept public."Concept") +RETURNS SETOF public."Concept" STRICT STABLE +SET search_path = '' +LANGUAGE sql +AS $$ + SELECT * from public."Concept" WHERE concept.id = any(refs); +$$; +COMMENT ON FUNCTION public.concept_in_relations(public."Concept") +IS 'Computed one-to-many: returns all Concept instances that are relations including the current concept.'; + +CREATE OR REPLACE FUNCTION public.concepts_of_relation(relation public."Concept") +RETURNS SETOF public."Concept" STRICT STABLE +SET search_path = '' +LANGUAGE sql +AS $$ + SELECT * from public."Concept" WHERE id = any(relation.refs); +$$; +COMMENT ON FUNCTION public.concepts_of_relation(public."Concept") +IS 'Computed one-to-many: returns all Concept instances are referred to in the current concept.'; + + -- private function. Transform concept with local (platform) references to concept with db references CREATE OR REPLACE FUNCTION public._local_concept_to_db_concept(data public.concept_local_input) RETURNS public."Concept" STABLE From b948aef4b49eee0c5de4b9b6e59e3529c3d28f78 Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Thu, 2 Oct 2025 21:04:02 -0400 Subject: [PATCH 03/15] filter on connected node author --- .../database/features/queryConcepts.feature | 7 ++++++ .../features/step-definitions/stepdefs.ts | 22 ++++++++++++++----- packages/database/src/lib/queries.ts | 20 ++++++++++------- 3 files changed, 36 insertions(+), 13 deletions(-) diff --git a/packages/database/features/queryConcepts.feature b/packages/database/features/queryConcepts.feature index e15939bbb..d94a082c7 100644 --- a/packages/database/features/queryConcepts.feature +++ b/packages/database/features/queryConcepts.feature @@ -89,3 +89,10 @@ Feature: Concept upsert | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | {} | # Note that the node is related to itself, unfortunate but hard to solve. + + Scenario Outline: Query by author of related node + And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":[],"inRelsToNodesOfAuthor":"user3","relationFields":["id"],"relationToNodeFields":["id"]}' + Then query results should look like this + | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | + | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | + | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | {} | diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index 947793957..ed60f7911 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -68,10 +68,16 @@ Given("the database is blank", async () => { const substituteLocalReferences = ( row: Record, localRefs: Record, + prefixValue: boolean = false ): Record => { const substituteLocalReferencesRec = (v: any): any => { + if (v === undefined || v === null) + return v; if (typeof v === "string") { - return localRefs[v]; + if (prefixValue) + return (v.charAt(0) === '@') ? localRefs[v.substr(1)] : v; + else + return localRefs[v]; } if (Array.isArray(v)) return v.map(substituteLocalReferencesRec); if (typeof v === "object") @@ -80,7 +86,13 @@ const substituteLocalReferences = ( ); console.error("could not substitute", typeof v, v); }; + return substituteLocalReferencesRec(row); +} +const substituteLocalReferencesRow = ( + row: Record, + localRefs: Record, +): Record => { const processKV = ([k, v]: [string, any]) => { let v2: any = v; const isJson = k.charAt(0) === "@"; @@ -90,7 +102,7 @@ const substituteLocalReferences = ( } if (k.charAt(0) === "_") { k = k.substring(1); - v2 = substituteLocalReferencesRec(v2); + v2 = substituteLocalReferences(v2, localRefs); } return [k, v2]; }; @@ -115,7 +127,7 @@ Given( const localRefs = (world.localRefs as Record) || {}; const rows = table.hashes(); const values: any[] = rows.map((r) => - substituteLocalReferences(r, localRefs), + substituteLocalReferencesRow(r, localRefs), ); const defIndex = table .raw()[0]! @@ -297,7 +309,7 @@ Given( Given( "a user logged in space {word} and querying nodes with these parameters: {string}", async (spaceName: string, paramsJ: string) => { - const params = JSON.parse(paramsJ); + const params = substituteLocalReferences(JSON.parse(paramsJ), world.localRefs, true); const spaceId: number = world.localRefs[spaceName]; const supabase = await getLoggedinDatabase(spaceId); const nodes = await getNodes({ ...params, supabase, spaceId }); @@ -310,7 +322,7 @@ Then("query results should look like this", (table: DataTable) => { const localRefs = (world.localRefs as Record) || {}; const rows = table.hashes(); const values: any[] = rows.map((r) => - substituteLocalReferences(r, localRefs), + substituteLocalReferencesRow(r, localRefs), ); // console.debug(values); // console.debug(JSON.stringify(world.queryResults, null, 2)); diff --git a/packages/database/src/lib/queries.ts b/packages/database/src/lib/queries.ts index 9b5e52bab..7a81688a2 100644 --- a/packages/database/src/lib/queries.ts +++ b/packages/database/src/lib/queries.ts @@ -78,13 +78,13 @@ const composeQuery = ({ conceptFields?: (keyof Concept)[]; contentFields?: (keyof Content)[]; documentFields?: (keyof Document)[]; - nodeAuthor?: string | undefined; + nodeAuthor?: string; fetchNodes?: boolean | null; inRelsOfType?: number[]; relationFields?: (keyof Concept)[]; relationToNodeFields?: (keyof Concept)[]; inRelsToNodesOfType?: number[]; - inRelsToNodesOfAuthor?: number; + inRelsToNodesOfAuthor?: string; }) => { let q = conceptFields.join(",\n"); if (schemaDbIds === 0 && !contentFields.includes("source_local_id")) { @@ -116,8 +116,11 @@ const composeQuery = ({ const args2: string[] = (relationToNodeFields || []).slice(); if (inRelsToNodesOfType !== undefined && !args2.includes("schema_id")) args2.push("schema_id"); - if (inRelsToNodesOfAuthor !== undefined && !args2.includes("author_id")) - args2.push("author_id"); + if (inRelsToNodesOfAuthor !== undefined) { + if (!args2.includes("author_id")) + args2.push('author_id') + args2.push("author:author_id!inner(account_local_id)"); + } args.push(`subnodes:concepts_of_relation!inner(${args2.join(",\n")})`); } q += `, relations:concept_in_relations!inner(${args.join(",\n")})`; @@ -149,8 +152,9 @@ const composeQuery = ({ query = query.in("relations.schema_id", inRelsOfType); if (inRelsToNodesOfType !== undefined && inRelsToNodesOfType.length > 0) query = query.in("relations.subnodes.schema_id", inRelsToNodesOfType); - if (inRelsToNodesOfAuthor !== undefined) - query = query.eq("relations.subnodes.author_id", inRelsToNodesOfAuthor); + if (inRelsToNodesOfAuthor !== undefined) { + query = query.eq("relations.subnodes.author.account_local_id", inRelsToNodesOfAuthor); + } // console.debug(query); return query; }; @@ -329,13 +333,13 @@ export const getNodes = async ({ conceptFields?: (keyof Concept)[]; contentFields?: (keyof Content)[]; documentFields?: (keyof Document)[]; - nodeAuthor?: string | undefined; + nodeAuthor?: string; fetchNodes?: boolean | null; inRelsOfTypeLocal?: string[]; relationFields?: (keyof Concept)[]; relationToNodeFields?: (keyof Concept)[]; inRelsToNodesOfTypeLocal?: string[]; - inRelsToNodesOfAuthor?: number; + inRelsToNodesOfAuthor?: string; }): Promise => { const schemaLocalIdsArray = typeof schemaLocalIds === "string" ? [schemaLocalIds] : schemaLocalIds; From af940b4795012a1ed9501b9936e82348dfa4da34 Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Thu, 2 Oct 2025 21:39:26 -0400 Subject: [PATCH 04/15] filter on list of nodes or collected nodes --- .../database/features/queryConcepts.feature | 20 +++++++++---- packages/database/src/lib/queries.ts | 28 ++++++++++++++++--- 2 files changed, 39 insertions(+), 9 deletions(-) diff --git a/packages/database/features/queryConcepts.feature b/packages/database/features/queryConcepts.feature index d94a082c7..c96b2d411 100644 --- a/packages/database/features/queryConcepts.feature +++ b/packages/database/features/queryConcepts.feature @@ -16,11 +16,13 @@ Feature: Concept upsert And Document are added to the database: | $id | source_local_id | created | last_modified | _author_id | _space_id | | d1 | d1 | 2025/01/01 | 2025/01/01 | user1 | s1 | + | d2 | d2 | 2025/01/01 | 2025/01/01 | user1 | s1 | | d5 | d5 | 2025/01/01 | 2025/01/01 | user2 | s1 | | d7 | d7 | 2025/01/01 | 2025/01/01 | user1 | s1 | And Content are added to the database: | $id | source_local_id | _document_id | text | created | last_modified | scale | _author_id | _space_id | | ct1 | ct1 | d1 | Claim | 2025/01/01 | 2025/01/01 | document | user1 | s1 | + | ct2 | ct2 | d2 | claim 1 | 2025/01/01 | 2025/01/01 | document | user1 | s1 | | ct5 | ct5 | d5 | Opposes | 2025/01/01 | 2025/01/01 | document | user2 | s1 | | ct7 | ct7 | d7 | Hypothesis | 2025/01/01 | 2025/01/01 | document | user1 | s1 | And Concept are added to the database: @@ -29,11 +31,11 @@ Feature: Concept upsert | c5 | Opposes | s1 | user1 | ct5 | 2025/01/01 | 2025/01/01 | true | | {"roles": ["target", "source"]} | {} | | c7 | Hypothesis | s1 | user1 | ct7 | 2025/01/01 | 2025/01/01 | true | | {} | {} | And Concept are added to the database: - | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | @literal_content | @reference_content | - | c2 | claim 1 | s1 | user1 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | - | c3 | claim 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | - | c4 | claim 3 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | - | c8 | hypothesis 1 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c7 | {} | {} | + | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | @literal_content | @reference_content | _represented_by_id | + | c2 | claim 1 | s1 | user1 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | ct2 | + | c3 | claim 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | | + | c4 | claim 3 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | | + | c8 | hypothesis 1 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c7 | {} | {} | | And Concept are added to the database: | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | @literal_content | @_reference_content | | c6 | opposes 1 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c3", "source": "c2"} | @@ -96,3 +98,11 @@ Feature: Concept upsert | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | {} | + + Scenario Outline: Query by related node + And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":[],"inRelsToNodeLocalIds":["ct2"]}' + Then query results should look like this + | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | + | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | + | c3 | claim 2 | s1 | user2 | false | c1 | {} | {} | + | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | {} | diff --git a/packages/database/src/lib/queries.ts b/packages/database/src/lib/queries.ts index 7a81688a2..4655bb96d 100644 --- a/packages/database/src/lib/queries.ts +++ b/packages/database/src/lib/queries.ts @@ -64,6 +64,7 @@ const composeQuery = ({ conceptFields = ["id", "name", "space_id"], contentFields = ["source_local_id"], documentFields = [], + baseNodeLocalIds = [], nodeAuthor = undefined, fetchNodes = true, inRelsOfType = undefined, @@ -71,6 +72,7 @@ const composeQuery = ({ relationToNodeFields = undefined, inRelsToNodesOfType = undefined, inRelsToNodesOfAuthor = undefined, + inRelsToNodeLocalIds = undefined, }: { supabase: DGSupabaseClient; spaceId?: number; @@ -78,6 +80,7 @@ const composeQuery = ({ conceptFields?: (keyof Concept)[]; contentFields?: (keyof Content)[]; documentFields?: (keyof Document)[]; + baseNodeLocalIds?: string[]; nodeAuthor?: string; fetchNodes?: boolean | null; inRelsOfType?: number[]; @@ -85,9 +88,11 @@ const composeQuery = ({ relationToNodeFields?: (keyof Concept)[]; inRelsToNodesOfType?: number[]; inRelsToNodesOfAuthor?: string; + inRelsToNodeLocalIds?: string[]; }) => { let q = conceptFields.join(",\n"); - if (schemaDbIds === 0 && !contentFields.includes("source_local_id")) { + const innerContent = schemaDbIds === 0 || baseNodeLocalIds.length > 0; + if (innerContent && !contentFields.includes("source_local_id")) { contentFields = contentFields.slice(); contentFields.push("source_local_id"); } @@ -96,7 +101,7 @@ const composeQuery = ({ if (documentFields.length > 0) { args.push("Document (\n" + documentFields.join(",\n") + ")"); } - q += `,\nContent${schemaDbIds === 0 ? "!inner" : ""} (\n${args.join(",\n")})`; + q += `,\nContent${innerContent ? "!inner" : ""} (\n${args.join(",\n")})`; } if (nodeAuthor !== undefined) { q += ", author:author_id!inner(account_local_id)"; @@ -104,18 +109,22 @@ const composeQuery = ({ if ( inRelsOfType !== undefined || inRelsToNodesOfType !== undefined || - inRelsToNodesOfAuthor !== undefined + inRelsToNodesOfAuthor !== undefined || + inRelsToNodeLocalIds !== undefined ) { const args: string[] = (relationFields || []).slice(); if (inRelsOfType !== undefined && !args.includes("schema_id")) args.push("schema_id"); if ( inRelsToNodesOfType !== undefined || - inRelsToNodesOfAuthor !== undefined + inRelsToNodesOfAuthor !== undefined || + inRelsToNodeLocalIds !== undefined ) { const args2: string[] = (relationToNodeFields || []).slice(); if (inRelsToNodesOfType !== undefined && !args2.includes("schema_id")) args2.push("schema_id"); + if (inRelsToNodeLocalIds !== undefined) + args2.push("Content!inner(source_local_id)"); if (inRelsToNodesOfAuthor !== undefined) { if (!args2.includes("author_id")) args2.push('author_id') @@ -148,6 +157,8 @@ const composeQuery = ({ query = query.eq("schema_id", schemaDbIds); else throw new Error("schemaDbIds should be a number or number[]"); } + if (baseNodeLocalIds.length > 0) + query = query.in("content.source_local_id", baseNodeLocalIds); if (inRelsOfType !== undefined && inRelsOfType.length > 0) query = query.in("relations.schema_id", inRelsOfType); if (inRelsToNodesOfType !== undefined && inRelsToNodesOfType.length > 0) @@ -155,6 +166,9 @@ const composeQuery = ({ if (inRelsToNodesOfAuthor !== undefined) { query = query.eq("relations.subnodes.author.account_local_id", inRelsToNodesOfAuthor); } + if (inRelsToNodeLocalIds !== undefined) { + query = query.in("relations.subnodes.Content.source_local_id", inRelsToNodeLocalIds); + } // console.debug(query); return query; }; @@ -319,6 +333,7 @@ export const getNodes = async ({ conceptFields = CONCEPT_FIELDS, contentFields = CONTENT_FIELDS, documentFields = DOCUMENT_FIELDS, + baseNodeLocalIds = [], nodeAuthor = undefined, fetchNodes = true, inRelsOfTypeLocal = undefined, @@ -326,6 +341,7 @@ export const getNodes = async ({ relationToNodeFields = undefined, inRelsToNodesOfTypeLocal = undefined, inRelsToNodesOfAuthor = undefined, + inRelsToNodeLocalIds = undefined, }: { supabase: DGSupabaseClient; spaceId?: number; @@ -333,6 +349,7 @@ export const getNodes = async ({ conceptFields?: (keyof Concept)[]; contentFields?: (keyof Content)[]; documentFields?: (keyof Document)[]; + baseNodeLocalIds?: string[]; nodeAuthor?: string; fetchNodes?: boolean | null; inRelsOfTypeLocal?: string[]; @@ -340,6 +357,7 @@ export const getNodes = async ({ relationToNodeFields?: (keyof Concept)[]; inRelsToNodesOfTypeLocal?: string[]; inRelsToNodesOfAuthor?: string; + inRelsToNodeLocalIds?: string[]; }): Promise => { const schemaLocalIdsArray = typeof schemaLocalIds === "string" ? [schemaLocalIds] : schemaLocalIds; @@ -372,6 +390,7 @@ export const getNodes = async ({ const q = composeQuery({ supabase, spaceId, + baseNodeLocalIds, schemaDbIds, conceptFields, contentFields, @@ -383,6 +402,7 @@ export const getNodes = async ({ relationToNodeFields, inRelsToNodesOfType: localToDbArray(inRelsToNodesOfTypeLocal), inRelsToNodesOfAuthor, + inRelsToNodeLocalIds, }); const { error, data } = (await q) as PostgrestResponse; if (error) { From 803d55389a94dfdf42065390d6b43bcd4f285716 Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Fri, 3 Oct 2025 08:33:33 -0400 Subject: [PATCH 05/15] coderabbit corrections --- packages/database/features/step-definitions/stepdefs.ts | 2 ++ packages/database/src/lib/queries.ts | 2 +- .../migrations/20250929154709_relation_access_functions.sql | 2 +- packages/database/supabase/schemas/concept.sql | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index ed60f7911..13369edcf 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -79,12 +79,14 @@ const substituteLocalReferences = ( else return localRefs[v]; } + if (typeof v === "number" || typeof v === "boolean") return v; if (Array.isArray(v)) return v.map(substituteLocalReferencesRec); if (typeof v === "object") return Object.fromEntries( Object.entries(v).map(([k, v]) => [k, substituteLocalReferencesRec(v)]), ); console.error("could not substitute", typeof v, v); + return v; }; return substituteLocalReferencesRec(row); } diff --git a/packages/database/src/lib/queries.ts b/packages/database/src/lib/queries.ts index 4655bb96d..d28a8b893 100644 --- a/packages/database/src/lib/queries.ts +++ b/packages/database/src/lib/queries.ts @@ -361,7 +361,7 @@ export const getNodes = async ({ }): Promise => { const schemaLocalIdsArray = typeof schemaLocalIds === "string" ? [schemaLocalIds] : schemaLocalIds; - const localIds = new Set(schemaLocalIds); + const localIds = new Set(schemaLocalIdsArray); if (inRelsOfTypeLocal !== undefined) inRelsOfTypeLocal.map((k) => localIds.add(k)); if (inRelsToNodesOfTypeLocal !== undefined) diff --git a/packages/database/supabase/migrations/20250929154709_relation_access_functions.sql b/packages/database/supabase/migrations/20250929154709_relation_access_functions.sql index f201f223b..a7ef6efac 100644 --- a/packages/database/supabase/migrations/20250929154709_relation_access_functions.sql +++ b/packages/database/supabase/migrations/20250929154709_relation_access_functions.sql @@ -5,7 +5,7 @@ CREATE OR REPLACE FUNCTION public.concept_in_relations(concept "Concept") STABLE STRICT SET search_path TO '' AS $function$ - SELECT * from public."Concept" WHERE concept.id = any(refs); + SELECT * from public."Concept" WHERE refs @> ARRAY[concept.id]; $function$ ; diff --git a/packages/database/supabase/schemas/concept.sql b/packages/database/supabase/schemas/concept.sql index e22460bfe..623b76884 100644 --- a/packages/database/supabase/schemas/concept.sql +++ b/packages/database/supabase/schemas/concept.sql @@ -166,7 +166,7 @@ RETURNS SETOF public."Concept" STRICT STABLE SET search_path = '' LANGUAGE sql AS $$ - SELECT * from public."Concept" WHERE concept.id = any(refs); + SELECT * from public."Concept" WHERE refs @> ARRAY[concept.id]; $$; COMMENT ON FUNCTION public.concept_in_relations(public."Concept") IS 'Computed one-to-many: returns all Concept instances that are relations including the current concept.'; From e2a9a874e305f7620e7ef4f9a17fbf37bcd9db29 Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Mon, 6 Oct 2025 09:49:23 -0400 Subject: [PATCH 06/15] make lint happier --- .../features/step-definitions/stepdefs.ts | 129 ++++++++++-------- packages/database/src/inputTypes.ts | 2 +- packages/database/src/lib/queries.ts | 41 +++--- 3 files changed, 93 insertions(+), 79 deletions(-) diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index 13369edcf..7e04d78a6 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -1,7 +1,8 @@ +/* eslint @typescript-eslint/no-explicit-any : 0 */ import assert from "assert"; import { Given, When, Then, world, type DataTable } from "@cucumber/cucumber"; import { createClient } from "@supabase/supabase-js"; -import { Constants, type Database, type Enums } from "@repo/database/dbTypes"; +import { Constants, type Database, type Enums, type Json } from "@repo/database/dbTypes"; import { getVariant, config } from "@repo/database/dbDotEnv"; import { getNodes, initNodeSchemaCache } from "@repo/database/lib/queries"; @@ -12,6 +13,7 @@ import { } from "@repo/database/lib/contextFunctions"; type Platform = Enums<"Platform">; +type TableName = keyof Database["public"]["Tables"] const PLATFORMS: readonly Platform[] = Constants.public.Enums.Platform; if (getVariant() === "production") { @@ -33,6 +35,7 @@ const getAnonymousClient = () => { }; const getServiceClient = () => { + // eslint-disable-next-line turboPlugin/no-undeclared-env-vars if (!process.env.SUPABASE_URL || !process.env.SUPABASE_SERVICE_ROLE_KEY) { throw new Error( "Missing required environment variables: SUPABASE_URL and SUPABASE_SERVICE_ROLE_KEY", @@ -40,7 +43,7 @@ const getServiceClient = () => { } return createClient( process.env.SUPABASE_URL, - process.env.SUPABASE_SERVICE_ROLE_KEY, + process.env.SUPABASE_SERVICE_ROLE_KEY, // eslint-disable-line turboPlugin/no-undeclared-env-vars ); }; @@ -66,16 +69,16 @@ Given("the database is blank", async () => { }); const substituteLocalReferences = ( - row: Record, + obj: any, localRefs: Record, prefixValue: boolean = false -): Record => { +): any => { const substituteLocalReferencesRec = (v: any): any => { if (v === undefined || v === null) return v; if (typeof v === "string") { if (prefixValue) - return (v.charAt(0) === '@') ? localRefs[v.substr(1)] : v; + return (v.charAt(0) === '@') ? localRefs[v.substring(1)] : v; else return localRefs[v]; } @@ -83,35 +86,34 @@ const substituteLocalReferences = ( if (Array.isArray(v)) return v.map(substituteLocalReferencesRec); if (typeof v === "object") return Object.fromEntries( - Object.entries(v).map(([k, v]) => [k, substituteLocalReferencesRec(v)]), + Object.entries(v as object).map(([k, v]) => [k, substituteLocalReferencesRec(v)]), ); console.error("could not substitute", typeof v, v); return v; }; - return substituteLocalReferencesRec(row); + return substituteLocalReferencesRec(obj); } const substituteLocalReferencesRow = ( row: Record, localRefs: Record, ): Record => { - const processKV = ([k, v]: [string, any]) => { - let v2: any = v; + const processKV = ([k, v]: [string, any]): [string, any] => { const isJson = k.charAt(0) === "@"; if (isJson) { k = k.substring(1); - v2 = JSON.parse(v2); + v = JSON.parse(v as string) as Json; } if (k.charAt(0) === "_") { k = k.substring(1); - v2 = substituteLocalReferences(v2, localRefs); + v = substituteLocalReferences(v, localRefs); // eslint-disable-line @typescript-eslint/no-unsafe-assignment } - return [k, v2]; + return [k, v]; }; const result = Object.fromEntries( Object.entries(row) - .filter(([k, v]: [string, string]) => k.charAt(0) !== "$") + .filter(([k,]: [string, string]) => k.charAt(0) !== "$") .map(processKV), ); return result; @@ -119,33 +121,33 @@ const substituteLocalReferencesRow = ( Given( "{word} are added to the database:", - async (tableName: keyof Database["public"]["Tables"], table: DataTable) => { + async (tableName: TableName, table: DataTable) => { // generic function to add a bunch of objects. // Columns prefixed by $ are primary keys, and are not sent to the database, // but the local value is associated with the database id in world.localRefs. // Columns prefixed with _ are translated back from local references to db ids. // Columns prefixed with @ are parsed as json values. (Use @ before _) const client = getServiceClient(); - const localRefs = (world.localRefs as Record) || {}; + const localRefs = (world.localRefs || {}) as Record; const rows = table.hashes(); - const values: any[] = rows.map((r) => + const values: Record[] = rows.map((r) => substituteLocalReferencesRow(r, localRefs), ); - const defIndex = table + const defIndex: string[] = table .raw()[0]! .map((k) => (k.charAt(0) == "$" ? k : null)) .filter((k) => typeof k == "string"); - const localIndexName = defIndex[0]!; + const localIndexName: string = defIndex[0]!; // do not allow to redefine values assert.strictEqual( - values.filter((v) => localRefs[v[localIndexName]] !== undefined).length, + values.filter((v) => (typeof v[localIndexName] === "string")?(localRefs[v[localIndexName]] !== undefined):false).length, 0, ); if (defIndex.length) { const dbIndexName = localIndexName.substring(1); const ids = await client .from(tableName) - .insert(values) + .insert(values as any[]) .select(dbIndexName); assert.equal(ids.error, null); if (ids.data == null || ids.data == undefined) @@ -157,7 +159,7 @@ Given( localRefs[rows[idx]![localIndexName]!] = dbId; } } else { - const r = await client.from(tableName).insert(values); + const r = await client.from(tableName).insert(values as any[]); assert.equal(r.error, null); } world.localRefs = localRefs; @@ -168,11 +170,11 @@ const userEmail = (userAccountId: string) => `${userAccountId}@example.com`; When( "the user {word} opens the {word} plugin in space {word}", - async (userAccountId, platform, spaceName) => { + async (userAccountId: string, platform: Platform, spaceName: string) => { // assumption: turbo dev is running. TODO: Make into hooks if (PLATFORMS.indexOf(platform) < 0) - throw new Error(`Platform must be one of ${PLATFORMS}`); - const localRefs = (world.localRefs as Record) || {}; + throw new Error(`Platform must be one of ${PLATFORMS.join(', ')}`); + const localRefs = (world.localRefs || {}) as Record; const spaceResponse = await fetchOrCreateSpaceDirect({ password: SPACE_ANONYMOUS_PASSWORD, url: `https://roamresearch.com/#/app/${spaceName}`, @@ -198,7 +200,7 @@ When( }, ); -Then("the database should contain a {word}", async (tableName) => { +Then("the database should contain a {word}", async (tableName: TableName) => { const client = getServiceClient(); const response = await client.from(tableName).select("*", { count: "exact" }); assert.notEqual(response.count || 0, 0); @@ -206,7 +208,7 @@ Then("the database should contain a {word}", async (tableName) => { Then( "the database should contain {int} {word}", - async (expectedCount, tableName) => { + async (expectedCount: number, tableName: TableName) => { const client = getServiceClient(); const response = await client .from(tableName) @@ -228,8 +230,10 @@ const getLoggedinDatabase = async (spaceId: number) => { Then( "a user logged in space {word} should see a {word} in the database", - async (spaceName, tableName) => { - const spaceId: number = world.localRefs[spaceName]; + async (spaceName: string, tableName: TableName) => { + const localRefs = (world.localRefs || {}) as Record; + const spaceId = localRefs[spaceName]; + if (spaceId === undefined) assert.fail('spaceId'); const client = await getLoggedinDatabase(spaceId); const response = await client .from(tableName) @@ -240,8 +244,10 @@ Then( Then( "a user logged in space {word} should see {int} {word} in the database", - async (spaceName, expectedCount, tableName) => { - const spaceId: number = world.localRefs[spaceName]; + async (spaceName: string, expectedCount: number, tableName: TableName) => { + const localRefs = (world.localRefs || {}) as Record; + const spaceId = localRefs[spaceName]; + if (spaceId === undefined) assert.fail('spaceId'); const client = await getLoggedinDatabase(spaceId); const response = await client .from(tableName) @@ -253,11 +259,13 @@ Then( Given( "user {word} upserts these accounts to space {word}:", async (userName: string, spaceName: string, accountsString: string) => { - const accounts = JSON.parse(accountsString); - const spaceId: number = world.localRefs[spaceName]; + const accounts = JSON.parse(accountsString) as Json; + const localRefs = (world.localRefs || {}) as Record; + const spaceId = localRefs[spaceName]; + if (spaceId === undefined) assert.fail('spaceId'); const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_accounts_in_space", { - space_id_: spaceId, + space_id_: spaceId, // eslint-disable-line @typescript-eslint/naming-convention accounts, }); assert.equal(response.error, null); @@ -267,11 +275,13 @@ Given( Given( "user {word} upserts these documents to space {word}:", async (userName: string, spaceName: string, docString: string) => { - const data = JSON.parse(docString); - const spaceId: number = world.localRefs[spaceName]; + const data = JSON.parse(docString) as Json; + const localRefs = (world.localRefs || {}) as Record; + const spaceId = localRefs[spaceName]; + if (spaceId === undefined) assert.fail('spaceId'); const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_documents", { - v_space_id: spaceId, + v_space_id: spaceId, // eslint-disable-line @typescript-eslint/naming-convention data, }); assert.equal(response.error, null); @@ -281,14 +291,18 @@ Given( Given( "user {word} upserts this content to space {word}:", async (userName: string, spaceName: string, docString: string) => { - const data = JSON.parse(docString); - const spaceId: number = world.localRefs[spaceName]; + const data = JSON.parse(docString) as Json; + const localRefs = (world.localRefs || {}) as Record; + const spaceId = localRefs[spaceName]; + if (spaceId === undefined) assert.fail('spaceId'); + const userId = localRefs[userName]; + if (userId === undefined) assert.fail('userId'); const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_content", { - v_space_id: spaceId, + v_space_id: spaceId, // eslint-disable-line @typescript-eslint/naming-convention data, - v_creator_id: world.localRefs[userName], - content_as_document: false, + v_creator_id: userId, // eslint-disable-line @typescript-eslint/naming-convention + content_as_document: false, // eslint-disable-line @typescript-eslint/naming-convention }); assert.equal(response.error, null); }, @@ -297,11 +311,13 @@ Given( Given( "user {word} upserts these concepts to space {word}:", async (userName: string, spaceName: string, docString: string) => { - const data = JSON.parse(docString); - const spaceId: number = world.localRefs[spaceName]; + const data = JSON.parse(docString) as Json; + const localRefs = (world.localRefs || {}) as Record; + const spaceId = localRefs[spaceName]; + if (spaceId === undefined) assert.fail('spaceId'); const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_concepts", { - v_space_id: spaceId, + v_space_id: spaceId, // eslint-disable-line @typescript-eslint/naming-convention data, }); assert.equal(response.error, null); @@ -311,8 +327,10 @@ Given( Given( "a user logged in space {word} and querying nodes with these parameters: {string}", async (spaceName: string, paramsJ: string) => { - const params = substituteLocalReferences(JSON.parse(paramsJ), world.localRefs, true); - const spaceId: number = world.localRefs[spaceName]; + const localRefs = (world.localRefs || {}) as Record; + const params = substituteLocalReferences(JSON.parse(paramsJ), localRefs, true) as object; + const spaceId = localRefs[spaceName]; + if (spaceId === undefined) assert.fail('spaceId'); const supabase = await getLoggedinDatabase(spaceId); const nodes = await getNodes({ ...params, supabase, spaceId }); nodes.sort((a, b) => a.id! - b.id!); @@ -321,23 +339,24 @@ Given( ); Then("query results should look like this", (table: DataTable) => { - const localRefs = (world.localRefs as Record) || {}; + const localRefs = (world.localRefs || {}) as Record; const rows = table.hashes(); - const values: any[] = rows.map((r) => + const values: object[] = rows.map((r) => substituteLocalReferencesRow(r, localRefs), ); // console.debug(values); // console.debug(JSON.stringify(world.queryResults, null, 2)); - values.sort((a, b) => a.id! - b.id!); + const queryResults = (world.queryResults || []) as object[]; + values.sort((a, b) => (a.id as number) - (b.id as number)); assert.deepEqual( - world.queryResults.map((v: any) => v.id), - values.map((v) => v.id), + queryResults.map((v) => (v.id as number)), + values.map((v) => (v.id as number)), ); - if (values.length) { - const keys = Object.keys(values[0]); - const truncatedResults = world.queryResults.map((v: any) => + if (values.length > 0) { + const keys = Object.keys(values[0]!); + const truncatedResults = queryResults.map((v: object) => Object.fromEntries( - Object.entries(v).filter(([k, _]) => keys.includes(k)), + Object.entries(v).filter(([k,]) => keys.includes(k)), ), ); // console.debug(truncatedResults); diff --git a/packages/database/src/inputTypes.ts b/packages/database/src/inputTypes.ts index 758025ab9..b71e3bfd6 100644 --- a/packages/database/src/inputTypes.ts +++ b/packages/database/src/inputTypes.ts @@ -1,4 +1,4 @@ -import type { Database, TablesInsert } from "@repo/database/dbTypes"; +import type { Database } from "@repo/database/dbTypes"; export type LocalAccountDataInput = Partial< Database["public"]["CompositeTypes"]["account_local_input"] diff --git a/packages/database/src/lib/queries.ts b/packages/database/src/lib/queries.ts index d28a8b893..b71b680aa 100644 --- a/packages/database/src/lib/queries.ts +++ b/packages/database/src/lib/queries.ts @@ -26,30 +26,31 @@ export const nodeSchemaSignature: NodeSignature = { type CacheMissTimestamp = number; type CacheEntry = NodeSignature | CacheMissTimestamp; -let NODE_SCHEMA_CACHE: Record = { +const NODE_SCHEMA_CACHE: Record = { [NODE_SCHEMAS]: nodeSchemaSignature, }; export const initNodeSchemaCache = () => { - NODE_SCHEMA_CACHE = { - [NODE_SCHEMAS]: nodeSchemaSignature, - }; + Object.keys(NODE_SCHEMA_CACHE).map(k => { + if (k !== NODE_SCHEMAS) + delete NODE_SCHEMA_CACHE[k]; + }) } export type PDocument = Partial>; export type PContent = Partial> & { - Document: PDocument | null; + Document: PDocument | null; // eslint-disable-line @typescript-eslint/naming-convention }; export type PConcept = Partial> & { - Content: PContent | null; - schema_of_concept: { name: string } | null; + Content: PContent | null; // eslint-disable-line @typescript-eslint/naming-convention + schema_of_concept: { name: string } | null; // eslint-disable-line @typescript-eslint/naming-convention }; -type defaultQueryShape = { +type DefaultQueryShape = { id: number; - space_id: number; + space_id: number; // eslint-disable-line @typescript-eslint/naming-convention name: string; - Content: { source_local_id: string }; + Content: { source_local_id: string }; // eslint-disable-line @typescript-eslint/naming-convention }; // Utility function to compose a generic query to fetch concepts, content and document. @@ -184,14 +185,12 @@ export const getNodeSchemas = async ( .filter((x) => x.spaceId === spaceId || x.spaceId === 0); if (forceCacheReload || result.length === 1) { const q = composeQuery({ supabase, spaceId, fetchNodes: null }); - const res = (await q) as PostgrestResponse; + const res = (await q) as PostgrestResponse; if (res.error) { console.error("getNodeSchemas failed", res.error); return [NODE_SCHEMA_CACHE[NODE_SCHEMAS] as NodeSignature]; } - NODE_SCHEMA_CACHE = { - ...NODE_SCHEMA_CACHE, - ...Object.fromEntries( + Object.assign(NODE_SCHEMA_CACHE, Object.fromEntries( res.data.map((x) => [ x.Content.source_local_id, { @@ -201,8 +200,7 @@ export const getNodeSchemas = async ( name: x.name, }, ]), - ), - }; + )); result = Object.values(NODE_SCHEMA_CACHE) .filter((x) => typeof x === "object") .filter((x) => x.spaceId === spaceId || x.spaceId === 0); @@ -229,7 +227,7 @@ const getLocalToDbIdMapping = async ( const numMissing = Object.values(dbIds).filter((x) => x === null).length; if (numMissing === 0) return dbIds; const previousMisses = Object.fromEntries( - partialResult.filter(([k, v]) => typeof v === "number"), + partialResult.filter(([,v]) => typeof v === "number"), ) as Record; const numPreviousMisses = Object.values(previousMisses).length; const now = Date.now(); @@ -247,14 +245,12 @@ const getLocalToDbIdMapping = async ( .in("Content.source_local_id", localLocalIds) .not("Content.source_local_id", "is", null); } // otherwise populate the cache - const res = (await q) as PostgrestResponse; + const res = (await q) as PostgrestResponse; if (res.error) { console.error("could not get db Ids", res.error); return dbIds; } - NODE_SCHEMA_CACHE = { - ...NODE_SCHEMA_CACHE, - ...Object.fromEntries( + Object.assign(NODE_SCHEMA_CACHE, Object.fromEntries( res.data.map((x) => [ x.Content.source_local_id, { @@ -264,8 +260,7 @@ const getLocalToDbIdMapping = async ( name: x.name, }, ]), - ), - }; + )); for (const localId of localLocalIds) { if (typeof NODE_SCHEMA_CACHE[localId] !== "object") NODE_SCHEMA_CACHE[localId] = now; From 4fb0768eeb6f7c1088a6fb604f68216f9f0daf3d Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Mon, 6 Oct 2025 09:54:00 -0400 Subject: [PATCH 07/15] prettier --- .../database/features/addConcepts.feature | 86 ++++++++++++++++-- packages/database/features/addContent.feature | 89 +++++++++++++++++-- .../database/features/queryConcepts.feature | 2 +- .../features/step-definitions/stepdefs.ts | 68 ++++++++------ packages/database/src/lib/contextFunctions.ts | 9 +- packages/database/src/lib/queries.ts | 40 +++++---- 6 files changed, 234 insertions(+), 60 deletions(-) diff --git a/packages/database/features/addConcepts.feature b/packages/database/features/addConcepts.feature index 173f3c886..8e07c1fc3 100644 --- a/packages/database/features/addConcepts.feature +++ b/packages/database/features/addConcepts.feature @@ -14,7 +14,14 @@ Feature: Concept upsert Scenario Outline: Calling the upsert steps separately When user user1 upserts these documents to space s1: """json - [{ "source_local_id": "page1_uid", "created": "2000/01/01", "last_modified": "2001/01/02", "author_local_id":"user1"}] + [ + { + "source_local_id": "page1_uid", + "created": "2000/01/01", + "last_modified": "2001/01/02", + "author_local_id": "user1" + } + ] """ And user user1 upserts this content to space s1: """json @@ -36,11 +43,80 @@ Feature: Concept upsert "last_modified": "2001/01/02", "text": "Some text" }, - { "author_local_id": "user2", "document_local_id":"page1_uid", "source_local_id": "s2", "scale":"document", "created": "2000/01/02", "last_modified": "2001/01/03", "part_of_local_id":"s1", "text": "Some subtext" }, { - "author_local_id": "user2", "document_inline": { "source_local_id": "page1_uid", "created": "2000/01/01", "last_modified": "2001/01/02", "author_local_id":"user2"}, "source_local_id": "s3", "scale": "document", "created": "2000/01/02", "last_modified": "2001/01/03", "part_of_local_id": "s2", "text": "Some subsubtext", "embedding_inline": { - "model":"openai_text_embedding_3_small_1536", "vector":[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] - } } + "author_local_id": "user2", + "document_local_id": "page1_uid", + "source_local_id": "s2", + "scale": "document", + "created": "2000/01/02", + "last_modified": "2001/01/03", + "part_of_local_id": "s1", + "text": "Some subtext" + }, + { + "author_local_id": "user2", + "document_inline": { + "source_local_id": "page1_uid", + "created": "2000/01/01", + "last_modified": "2001/01/02", + "author_local_id": "user2" + }, + "source_local_id": "s3", + "scale": "document", + "created": "2000/01/02", + "last_modified": "2001/01/03", + "part_of_local_id": "s2", + "text": "Some subsubtext", + "embedding_inline": { + "model": "openai_text_embedding_3_small_1536", + "vector": [ + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ] + } + } ] """ And user user1 upserts these concepts to space s1: diff --git a/packages/database/features/addContent.feature b/packages/database/features/addContent.feature index b2a647824..19c025349 100644 --- a/packages/database/features/addContent.feature +++ b/packages/database/features/addContent.feature @@ -14,7 +14,14 @@ Feature: Content access Scenario Outline: Calling the upsert steps separately When user user1 upserts these documents to space s1: """json - [{ "source_local_id": "page1_uid", "created": "2000/01/01", "last_modified": "2001/01/02", "author_local_id":"user1"}] + [ + { + "source_local_id": "page1_uid", + "created": "2000/01/01", + "last_modified": "2001/01/02", + "author_local_id": "user1" + } + ] """ And user user1 upserts this content to space s1: """json @@ -37,11 +44,83 @@ Feature: Content access "last_modified": "2001/01/02", "text": "Some text" }, - { "author_local_id": "user2", "document_local_id":"page1_uid", "space_local_id": "s1", "source_local_id": "s2", "scale":"document", "created": "2000/01/02", "last_modified": "2001/01/03", "part_of_local_id":"s1", "text": "Some subtext" }, { - "space_local_id": "s1", "author_local_id": "user2", "document_inline": { "source_local_id": "page1_uid", "space_local_id": "s1", "created": "2000/01/01", "last_modified": "2001/01/02", "author_local_id":"user2"}, "source_local_id": "s3", "scale": "document", "created": "2000/01/02", "last_modified": "2001/01/03", "part_of_local_id": "s2", "text": "Some subsubtext", "embedding_inline": { - "model":"openai_text_embedding_3_small_1536", "vector":[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] - } } + "author_local_id": "user2", + "document_local_id": "page1_uid", + "space_local_id": "s1", + "source_local_id": "s2", + "scale": "document", + "created": "2000/01/02", + "last_modified": "2001/01/03", + "part_of_local_id": "s1", + "text": "Some subtext" + }, + { + "space_local_id": "s1", + "author_local_id": "user2", + "document_inline": { + "source_local_id": "page1_uid", + "space_local_id": "s1", + "created": "2000/01/01", + "last_modified": "2001/01/02", + "author_local_id": "user2" + }, + "source_local_id": "s3", + "scale": "document", + "created": "2000/01/02", + "last_modified": "2001/01/03", + "part_of_local_id": "s2", + "text": "Some subsubtext", + "embedding_inline": { + "model": "openai_text_embedding_3_small_1536", + "vector": [ + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ] + } + } ] """ Then a user logged in space s1 should see 3 PlatformAccount in the database diff --git a/packages/database/features/queryConcepts.feature b/packages/database/features/queryConcepts.feature index c96b2d411..eabf8e8c0 100644 --- a/packages/database/features/queryConcepts.feature +++ b/packages/database/features/queryConcepts.feature @@ -90,7 +90,7 @@ Feature: Concept upsert | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | {} | - # Note that the node is related to itself, unfortunate but hard to solve. + # Note that the node is related to itself, unfortunate but hard to solve. Scenario Outline: Query by author of related node And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":[],"inRelsToNodesOfAuthor":"user3","relationFields":["id"],"relationToNodeFields":["id"]}' diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index 7e04d78a6..4c7532346 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -2,7 +2,12 @@ import assert from "assert"; import { Given, When, Then, world, type DataTable } from "@cucumber/cucumber"; import { createClient } from "@supabase/supabase-js"; -import { Constants, type Database, type Enums, type Json } from "@repo/database/dbTypes"; +import { + Constants, + type Database, + type Enums, + type Json, +} from "@repo/database/dbTypes"; import { getVariant, config } from "@repo/database/dbDotEnv"; import { getNodes, initNodeSchemaCache } from "@repo/database/lib/queries"; @@ -13,7 +18,7 @@ import { } from "@repo/database/lib/contextFunctions"; type Platform = Enums<"Platform">; -type TableName = keyof Database["public"]["Tables"] +type TableName = keyof Database["public"]["Tables"]; const PLATFORMS: readonly Platform[] = Constants.public.Enums.Platform; if (getVariant() === "production") { @@ -43,7 +48,7 @@ const getServiceClient = () => { } return createClient( process.env.SUPABASE_URL, - process.env.SUPABASE_SERVICE_ROLE_KEY, // eslint-disable-line turboPlugin/no-undeclared-env-vars + process.env.SUPABASE_SERVICE_ROLE_KEY, // eslint-disable-line turboPlugin/no-undeclared-env-vars ); }; @@ -71,28 +76,29 @@ Given("the database is blank", async () => { const substituteLocalReferences = ( obj: any, localRefs: Record, - prefixValue: boolean = false + prefixValue: boolean = false, ): any => { const substituteLocalReferencesRec = (v: any): any => { - if (v === undefined || v === null) - return v; + if (v === undefined || v === null) return v; if (typeof v === "string") { if (prefixValue) - return (v.charAt(0) === '@') ? localRefs[v.substring(1)] : v; - else - return localRefs[v]; + return v.charAt(0) === "@" ? localRefs[v.substring(1)] : v; + else return localRefs[v]; } if (typeof v === "number" || typeof v === "boolean") return v; if (Array.isArray(v)) return v.map(substituteLocalReferencesRec); if (typeof v === "object") return Object.fromEntries( - Object.entries(v as object).map(([k, v]) => [k, substituteLocalReferencesRec(v)]), + Object.entries(v as object).map(([k, v]) => [ + k, + substituteLocalReferencesRec(v), + ]), ); console.error("could not substitute", typeof v, v); return v; }; return substituteLocalReferencesRec(obj); -} +}; const substituteLocalReferencesRow = ( row: Record, @@ -113,7 +119,7 @@ const substituteLocalReferencesRow = ( const result = Object.fromEntries( Object.entries(row) - .filter(([k,]: [string, string]) => k.charAt(0) !== "$") + .filter(([k]: [string, string]) => k.charAt(0) !== "$") .map(processKV), ); return result; @@ -140,7 +146,11 @@ Given( const localIndexName: string = defIndex[0]!; // do not allow to redefine values assert.strictEqual( - values.filter((v) => (typeof v[localIndexName] === "string")?(localRefs[v[localIndexName]] !== undefined):false).length, + values.filter((v) => + typeof v[localIndexName] === "string" + ? localRefs[v[localIndexName]] !== undefined + : false, + ).length, 0, ); if (defIndex.length) { @@ -173,7 +183,7 @@ When( async (userAccountId: string, platform: Platform, spaceName: string) => { // assumption: turbo dev is running. TODO: Make into hooks if (PLATFORMS.indexOf(platform) < 0) - throw new Error(`Platform must be one of ${PLATFORMS.join(', ')}`); + throw new Error(`Platform must be one of ${PLATFORMS.join(", ")}`); const localRefs = (world.localRefs || {}) as Record; const spaceResponse = await fetchOrCreateSpaceDirect({ password: SPACE_ANONYMOUS_PASSWORD, @@ -233,7 +243,7 @@ Then( async (spaceName: string, tableName: TableName) => { const localRefs = (world.localRefs || {}) as Record; const spaceId = localRefs[spaceName]; - if (spaceId === undefined) assert.fail('spaceId'); + if (spaceId === undefined) assert.fail("spaceId"); const client = await getLoggedinDatabase(spaceId); const response = await client .from(tableName) @@ -247,7 +257,7 @@ Then( async (spaceName: string, expectedCount: number, tableName: TableName) => { const localRefs = (world.localRefs || {}) as Record; const spaceId = localRefs[spaceName]; - if (spaceId === undefined) assert.fail('spaceId'); + if (spaceId === undefined) assert.fail("spaceId"); const client = await getLoggedinDatabase(spaceId); const response = await client .from(tableName) @@ -262,7 +272,7 @@ Given( const accounts = JSON.parse(accountsString) as Json; const localRefs = (world.localRefs || {}) as Record; const spaceId = localRefs[spaceName]; - if (spaceId === undefined) assert.fail('spaceId'); + if (spaceId === undefined) assert.fail("spaceId"); const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_accounts_in_space", { space_id_: spaceId, // eslint-disable-line @typescript-eslint/naming-convention @@ -278,7 +288,7 @@ Given( const data = JSON.parse(docString) as Json; const localRefs = (world.localRefs || {}) as Record; const spaceId = localRefs[spaceName]; - if (spaceId === undefined) assert.fail('spaceId'); + if (spaceId === undefined) assert.fail("spaceId"); const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_documents", { v_space_id: spaceId, // eslint-disable-line @typescript-eslint/naming-convention @@ -294,9 +304,9 @@ Given( const data = JSON.parse(docString) as Json; const localRefs = (world.localRefs || {}) as Record; const spaceId = localRefs[spaceName]; - if (spaceId === undefined) assert.fail('spaceId'); + if (spaceId === undefined) assert.fail("spaceId"); const userId = localRefs[userName]; - if (userId === undefined) assert.fail('userId'); + if (userId === undefined) assert.fail("userId"); const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_content", { v_space_id: spaceId, // eslint-disable-line @typescript-eslint/naming-convention @@ -314,7 +324,7 @@ Given( const data = JSON.parse(docString) as Json; const localRefs = (world.localRefs || {}) as Record; const spaceId = localRefs[spaceName]; - if (spaceId === undefined) assert.fail('spaceId'); + if (spaceId === undefined) assert.fail("spaceId"); const client = await getLoggedinDatabase(spaceId); const response = await client.rpc("upsert_concepts", { v_space_id: spaceId, // eslint-disable-line @typescript-eslint/naming-convention @@ -328,9 +338,13 @@ Given( "a user logged in space {word} and querying nodes with these parameters: {string}", async (spaceName: string, paramsJ: string) => { const localRefs = (world.localRefs || {}) as Record; - const params = substituteLocalReferences(JSON.parse(paramsJ), localRefs, true) as object; + const params = substituteLocalReferences( + JSON.parse(paramsJ), + localRefs, + true, + ) as object; const spaceId = localRefs[spaceName]; - if (spaceId === undefined) assert.fail('spaceId'); + if (spaceId === undefined) assert.fail("spaceId"); const supabase = await getLoggedinDatabase(spaceId); const nodes = await getNodes({ ...params, supabase, spaceId }); nodes.sort((a, b) => a.id! - b.id!); @@ -349,15 +363,13 @@ Then("query results should look like this", (table: DataTable) => { const queryResults = (world.queryResults || []) as object[]; values.sort((a, b) => (a.id as number) - (b.id as number)); assert.deepEqual( - queryResults.map((v) => (v.id as number)), - values.map((v) => (v.id as number)), + queryResults.map((v) => v.id as number), + values.map((v) => v.id as number), ); if (values.length > 0) { const keys = Object.keys(values[0]!); const truncatedResults = queryResults.map((v: object) => - Object.fromEntries( - Object.entries(v).filter(([k,]) => keys.includes(k)), - ), + Object.fromEntries(Object.entries(v).filter(([k]) => keys.includes(k))), ); // console.debug(truncatedResults); assert.deepEqual(truncatedResults, values); diff --git a/packages/database/src/lib/contextFunctions.ts b/packages/database/src/lib/contextFunctions.ts index 8117df59c..d82e0403e 100644 --- a/packages/database/src/lib/contextFunctions.ts +++ b/packages/database/src/lib/contextFunctions.ts @@ -2,10 +2,7 @@ import type { Enums, Tables, TablesInsert } from "@repo/database/dbTypes"; import type { PostgrestSingleResponse } from "@supabase/supabase-js"; import type { FunctionsResponse } from "@supabase/functions-js"; import { nextApiRoot } from "@repo/utils/execContext"; -import { - createClient, - type DGSupabaseClient, -} from "@repo/database/lib/client"; +import { createClient, type DGSupabaseClient } from "@repo/database/lib/client"; export const spaceAnonUserEmail = (platform: string, space_id: number) => `${platform.toLowerCase()}-${space_id}-anon@database.discoursegraphs.com`; @@ -123,8 +120,8 @@ export const createLoggedInClient = async ( platform: Platform, spaceId: number, password: string, -): Promise => { - const loggedInClient: DGSupabaseClient|null = createClient(); +): Promise => { + const loggedInClient: DGSupabaseClient | null = createClient(); if (!loggedInClient) return null; const { error } = await loggedInClient.auth.signInWithPassword({ email: spaceAnonUserEmail(platform, spaceId), diff --git a/packages/database/src/lib/queries.ts b/packages/database/src/lib/queries.ts index b71b680aa..527f952a8 100644 --- a/packages/database/src/lib/queries.ts +++ b/packages/database/src/lib/queries.ts @@ -31,15 +31,14 @@ const NODE_SCHEMA_CACHE: Record = { }; export const initNodeSchemaCache = () => { - Object.keys(NODE_SCHEMA_CACHE).map(k => { - if (k !== NODE_SCHEMAS) - delete NODE_SCHEMA_CACHE[k]; - }) -} + Object.keys(NODE_SCHEMA_CACHE).map((k) => { + if (k !== NODE_SCHEMAS) delete NODE_SCHEMA_CACHE[k]; + }); +}; export type PDocument = Partial>; export type PContent = Partial> & { - Document: PDocument | null; // eslint-disable-line @typescript-eslint/naming-convention + Document: PDocument | null; // eslint-disable-line @typescript-eslint/naming-convention }; export type PConcept = Partial> & { Content: PContent | null; // eslint-disable-line @typescript-eslint/naming-convention @@ -127,8 +126,7 @@ const composeQuery = ({ if (inRelsToNodeLocalIds !== undefined) args2.push("Content!inner(source_local_id)"); if (inRelsToNodesOfAuthor !== undefined) { - if (!args2.includes("author_id")) - args2.push('author_id') + if (!args2.includes("author_id")) args2.push("author_id"); args2.push("author:author_id!inner(account_local_id)"); } args.push(`subnodes:concepts_of_relation!inner(${args2.join(",\n")})`); @@ -165,10 +163,16 @@ const composeQuery = ({ if (inRelsToNodesOfType !== undefined && inRelsToNodesOfType.length > 0) query = query.in("relations.subnodes.schema_id", inRelsToNodesOfType); if (inRelsToNodesOfAuthor !== undefined) { - query = query.eq("relations.subnodes.author.account_local_id", inRelsToNodesOfAuthor); + query = query.eq( + "relations.subnodes.author.account_local_id", + inRelsToNodesOfAuthor, + ); } if (inRelsToNodeLocalIds !== undefined) { - query = query.in("relations.subnodes.Content.source_local_id", inRelsToNodeLocalIds); + query = query.in( + "relations.subnodes.Content.source_local_id", + inRelsToNodeLocalIds, + ); } // console.debug(query); return query; @@ -190,7 +194,9 @@ export const getNodeSchemas = async ( console.error("getNodeSchemas failed", res.error); return [NODE_SCHEMA_CACHE[NODE_SCHEMAS] as NodeSignature]; } - Object.assign(NODE_SCHEMA_CACHE, Object.fromEntries( + Object.assign( + NODE_SCHEMA_CACHE, + Object.fromEntries( res.data.map((x) => [ x.Content.source_local_id, { @@ -200,7 +206,8 @@ export const getNodeSchemas = async ( name: x.name, }, ]), - )); + ), + ); result = Object.values(NODE_SCHEMA_CACHE) .filter((x) => typeof x === "object") .filter((x) => x.spaceId === spaceId || x.spaceId === 0); @@ -227,7 +234,7 @@ const getLocalToDbIdMapping = async ( const numMissing = Object.values(dbIds).filter((x) => x === null).length; if (numMissing === 0) return dbIds; const previousMisses = Object.fromEntries( - partialResult.filter(([,v]) => typeof v === "number"), + partialResult.filter(([, v]) => typeof v === "number"), ) as Record; const numPreviousMisses = Object.values(previousMisses).length; const now = Date.now(); @@ -250,7 +257,9 @@ const getLocalToDbIdMapping = async ( console.error("could not get db Ids", res.error); return dbIds; } - Object.assign(NODE_SCHEMA_CACHE, Object.fromEntries( + Object.assign( + NODE_SCHEMA_CACHE, + Object.fromEntries( res.data.map((x) => [ x.Content.source_local_id, { @@ -260,7 +269,8 @@ const getLocalToDbIdMapping = async ( name: x.name, }, ]), - )); + ), + ); for (const localId of localLocalIds) { if (typeof NODE_SCHEMA_CACHE[localId] !== "object") NODE_SCHEMA_CACHE[localId] = now; From 97f5577c5482da07c0d93162061d3a15601c48a3 Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Mon, 6 Oct 2025 10:16:34 -0400 Subject: [PATCH 08/15] better TS --- .../database/features/step-definitions/stepdefs.ts | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index 4c7532346..70fe145f1 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -352,19 +352,21 @@ Given( }, ); +type ObjectWithId = object & { id: number }; + Then("query results should look like this", (table: DataTable) => { const localRefs = (world.localRefs || {}) as Record; const rows = table.hashes(); - const values: object[] = rows.map((r) => + const values = rows.map((r) => substituteLocalReferencesRow(r, localRefs), - ); + ) as ObjectWithId[]; // console.debug(values); // console.debug(JSON.stringify(world.queryResults, null, 2)); - const queryResults = (world.queryResults || []) as object[]; - values.sort((a, b) => (a.id as number) - (b.id as number)); + const queryResults = (world.queryResults || []) as ObjectWithId[]; + values.sort((a, b) => a.id - b.id); assert.deepEqual( - queryResults.map((v) => v.id as number), - values.map((v) => v.id as number), + queryResults.map((v) => v.id), + values.map((v) => v.id), ); if (values.length > 0) { const keys = Object.keys(values[0]!); From 6a24a37dc156545e7532c98f42f6a67b00b00c8c Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Tue, 7 Oct 2025 09:30:08 -0400 Subject: [PATCH 09/15] comment, rename, clarify --- .../src/components/settings/AdminPanel.tsx | 12 +- .../database/features/queryConcepts.feature | 34 ++-- .../features/step-definitions/stepdefs.ts | 7 +- packages/database/src/lib/queries.ts | 156 ++++++++++-------- 4 files changed, 118 insertions(+), 91 deletions(-) diff --git a/apps/roam/src/components/settings/AdminPanel.tsx b/apps/roam/src/components/settings/AdminPanel.tsx index b21c78bd9..8e47b674c 100644 --- a/apps/roam/src/components/settings/AdminPanel.tsx +++ b/apps/roam/src/components/settings/AdminPanel.tsx @@ -7,8 +7,8 @@ import { SupabaseContext, } from "~/utils/supabaseContext"; import { - getNodes, - getNodeSchemas, + getConcepts, + getSchemaConcepts, nodeSchemaSignature, type NodeSignature, type PConcept, @@ -130,10 +130,10 @@ const AdminPanel = () => { void (async () => { if (!ignore && supabase !== null && context !== null) { try { - setSchemas(await getNodeSchemas(supabase, context.spaceId)); + setSchemas(await getSchemaConcepts(supabase, context.spaceId)); } catch (e) { setError((e as Error).message); - console.error("getNodeSchemas failed", e); + console.error("getSchemaConcepts failed", e); } finally { setLoading(false); } @@ -157,7 +157,7 @@ const AdminPanel = () => { try { setLoadingNodes(true); setNodes( - await getNodes({ + await getConcepts({ supabase, spaceId, schemaLocalIds: showingSchema.sourceLocalId, @@ -165,7 +165,7 @@ const AdminPanel = () => { ); } catch (e) { setError((e as Error).message); - console.error("getNodes failed", e); + console.error("getConcepts failed", e); } finally { setLoadingNodes(false); } diff --git a/packages/database/features/queryConcepts.feature b/packages/database/features/queryConcepts.feature index eabf8e8c0..d29fc0db6 100644 --- a/packages/database/features/queryConcepts.feature +++ b/packages/database/features/queryConcepts.feature @@ -1,4 +1,4 @@ -Feature: Concept upsert +Feature: Concept queries User story: * As a user of the Roam plugin * Logged in through a given space's anonymous account @@ -15,16 +15,16 @@ Feature: Concept upsert And the user user3 opens the Roam plugin in space s1 And Document are added to the database: | $id | source_local_id | created | last_modified | _author_id | _space_id | - | d1 | d1 | 2025/01/01 | 2025/01/01 | user1 | s1 | - | d2 | d2 | 2025/01/01 | 2025/01/01 | user1 | s1 | - | d5 | d5 | 2025/01/01 | 2025/01/01 | user2 | s1 | - | d7 | d7 | 2025/01/01 | 2025/01/01 | user1 | s1 | + | d1 | ld1 | 2025/01/01 | 2025/01/01 | user1 | s1 | + | d2 | ld2 | 2025/01/01 | 2025/01/01 | user1 | s1 | + | d5 | ld5 | 2025/01/01 | 2025/01/01 | user2 | s1 | + | d7 | ld7 | 2025/01/01 | 2025/01/01 | user1 | s1 | And Content are added to the database: | $id | source_local_id | _document_id | text | created | last_modified | scale | _author_id | _space_id | - | ct1 | ct1 | d1 | Claim | 2025/01/01 | 2025/01/01 | document | user1 | s1 | - | ct2 | ct2 | d2 | claim 1 | 2025/01/01 | 2025/01/01 | document | user1 | s1 | - | ct5 | ct5 | d5 | Opposes | 2025/01/01 | 2025/01/01 | document | user2 | s1 | - | ct7 | ct7 | d7 | Hypothesis | 2025/01/01 | 2025/01/01 | document | user1 | s1 | + | ct1 | lct1 | d1 | Claim | 2025/01/01 | 2025/01/01 | document | user1 | s1 | + | ct2 | lct2 | d2 | claim 1 | 2025/01/01 | 2025/01/01 | document | user1 | s1 | + | ct5 | lct5 | d5 | Opposes | 2025/01/01 | 2025/01/01 | document | user2 | s1 | + | ct7 | lct7 | d7 | Hypothesis | 2025/01/01 | 2025/01/01 | document | user1 | s1 | And Concept are added to the database: | $id | name | _space_id | _author_id | _represented_by_id | created | last_modified | @is_schema | _schema_id | @literal_content | @reference_content | | c1 | Claim | s1 | user1 | ct1 | 2025/01/01 | 2025/01/01 | true | | {} | {} | @@ -42,7 +42,7 @@ Feature: Concept upsert | c9 | opposes 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c8", "source": "c2"} | Scenario Outline: Query all nodes - And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":[],"fetchNodes":null}' + And a user logged in space s1 and calling getConcepts with these parameters: '{"schemaLocalIds":[],"fetchNodes":null}' Then query results should look like this | _id | name | _space_id | _author_id | @is_schema | _schema_id | @_reference_content | | c2 | claim 1 | s1 | user1 | false | c1 | {} | @@ -53,7 +53,7 @@ Feature: Concept upsert | c9 | opposes 2 | s1 | user2 | false | c5 | {"target": "c8", "source": "c2"} | Scenario Outline: Query node schemas - And a user logged in space s1 and querying nodes with these parameters: '{"fetchNodes":null}' + And a user logged in space s1 and calling getConcepts with these parameters: '{"fetchNodes":null}' Then query results should look like this | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @reference_content | _represented_by_id | | c1 | Claim | s1 | user1 | true | | {} | {} | ct1 | @@ -61,7 +61,7 @@ Feature: Concept upsert | c7 | Hypothesis | s1 | user1 | true | | {} | {} | ct7 | Scenario Outline: Query by node types - And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":["ct1"]}' + And a user logged in space s1 and calling getConcepts with these parameters: '{"schemaLocalIds":["lct1"]}' Then query results should look like this | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @reference_content | | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | @@ -69,7 +69,7 @@ Feature: Concept upsert | c4 | claim 3 | s1 | user3 | false | c1 | {} | {} | Scenario Outline: Query by author - And a user logged in space s1 and querying nodes with these parameters: '{"nodeAuthor":["user2"],"schemaLocalIds":[],"fetchNodes":null}' + And a user logged in space s1 and calling getConcepts with these parameters: '{"nodeAuthor":["user2"],"schemaLocalIds":[],"fetchNodes":null}' Then query results should look like this | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | | c3 | claim 2 | s1 | user2 | false | c1 | {} | {} | @@ -77,7 +77,7 @@ Feature: Concept upsert | c9 | opposes 2 | s1 | user2 | false | c5 | {} | {"target": "c8", "source": "c2"} | Scenario Outline: Query by relation type - And a user logged in space s1 and querying nodes with these parameters: '{"inRelsOfTypeLocal":["ct5"],"schemaLocalIds":[]}' + And a user logged in space s1 and calling getConcepts with these parameters: '{"inRelsOfTypeLocal":["lct5"],"schemaLocalIds":[]}' Then query results should look like this | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | @@ -85,7 +85,7 @@ Feature: Concept upsert | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | {} | Scenario Outline: Query by related node type - And a user logged in space s1 and querying nodes with these parameters: '{"inRelsToNodesOfTypeLocal":["ct7"],"schemaLocalIds":[]}' + And a user logged in space s1 and calling getConcepts with these parameters: '{"inRelsToNodesOfTypeLocal":["lct7"],"schemaLocalIds":[]}' Then query results should look like this | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | @@ -93,14 +93,14 @@ Feature: Concept upsert # Note that the node is related to itself, unfortunate but hard to solve. Scenario Outline: Query by author of related node - And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":[],"inRelsToNodesOfAuthor":"user3","relationFields":["id"],"relationToNodeFields":["id"]}' + And a user logged in space s1 and calling getConcepts with these parameters: '{"schemaLocalIds":[],"inRelsToNodesOfAuthor":"user3","relationFields":["id"],"relationSubNodesFields":["id"]}' Then query results should look like this | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | {} | Scenario Outline: Query by related node - And a user logged in space s1 and querying nodes with these parameters: '{"schemaLocalIds":[],"inRelsToNodeLocalIds":["ct2"]}' + And a user logged in space s1 and calling getConcepts with these parameters: '{"schemaLocalIds":[],"inRelsToNodeLocalIds":["lct2"]}' Then query results should look like this | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index 70fe145f1..2c682a712 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -9,7 +9,7 @@ import { type Json, } from "@repo/database/dbTypes"; import { getVariant, config } from "@repo/database/dbDotEnv"; -import { getNodes, initNodeSchemaCache } from "@repo/database/lib/queries"; +import { getConcepts, initNodeSchemaCache } from "@repo/database/lib/queries"; import { spaceAnonUserEmail, @@ -335,7 +335,7 @@ Given( ); Given( - "a user logged in space {word} and querying nodes with these parameters: {string}", + "a user logged in space {word} and calling getConcepts with these parameters: {string}", async (spaceName: string, paramsJ: string) => { const localRefs = (world.localRefs || {}) as Record; const params = substituteLocalReferences( @@ -346,7 +346,8 @@ Given( const spaceId = localRefs[spaceName]; if (spaceId === undefined) assert.fail("spaceId"); const supabase = await getLoggedinDatabase(spaceId); - const nodes = await getNodes({ ...params, supabase, spaceId }); + // note that we supply spaceId and supabase, they do not need to be part of the incoming json + const nodes = await getConcepts({ ...params, supabase, spaceId }); nodes.sort((a, b) => a.id! - b.id!); world.queryResults = nodes; }, diff --git a/packages/database/src/lib/queries.ts b/packages/database/src/lib/queries.ts index 527f952a8..f4891eb3c 100644 --- a/packages/database/src/lib/queries.ts +++ b/packages/database/src/lib/queries.ts @@ -2,7 +2,7 @@ import { PostgrestResponse } from "@supabase/supabase-js"; import type { Tables } from "../dbTypes"; import { DGSupabaseClient } from "./client"; -// the functions you are most likely to use are getNodeSchemas and getNodes. +// the functions you are most likely to use are getSchemaConcepts and getConcepts. type Concept = Tables<"Concept">; type Content = Tables<"Content">; @@ -36,58 +36,69 @@ export const initNodeSchemaCache = () => { }); }; +/* eslint-disable @typescript-eslint/naming-convention */ export type PDocument = Partial>; export type PContent = Partial> & { - Document: PDocument | null; // eslint-disable-line @typescript-eslint/naming-convention + Document?: PDocument | null; }; -export type PConcept = Partial> & { - Content: PContent | null; // eslint-disable-line @typescript-eslint/naming-convention - schema_of_concept: { name: string } | null; // eslint-disable-line @typescript-eslint/naming-convention +export type PAccount = Partial>; +export type PConceptBase = Partial>; +export type PConceptSubNode = PConceptBase & { + Concept?: { source_local_id: string } | null; + author?: { account_local_id: string } | null; +}; +export type PRelConcept = PConceptBase & { + subnodes?: PConceptSubNode[]; +}; + +export type PConceptFull = PConceptBase & { + Content?: PContent | null; + author?: PAccount; + relations?: PRelConcept[]; }; type DefaultQueryShape = { id: number; - space_id: number; // eslint-disable-line @typescript-eslint/naming-convention + space_id: number; name: string; - Content: { source_local_id: string }; // eslint-disable-line @typescript-eslint/naming-convention + Content: { source_local_id: string }; }; +/* eslint-enable @typescript-eslint/naming-convention */ // Utility function to compose a generic query to fetch concepts, content and document. -// - schemaDbIds = 0 → fetch schemas (is_schema = true) -// - schemaDbIds = n → fetch nodes under schema with dbId n (is_schema = false, eq schema_id) -// - schemaDbIds = [] → fetch all nodes (is_schema = false, no filter on schema_id) -// - schemaDbIds = [a,b,...] → fetch nodes under any of those schemas -const composeQuery = ({ +// Arguments are as in getConcepts, except we use numeric db ids of concepts for schemas instead +// their respective content's source_local_id. +const composeConceptQuery = ({ supabase, spaceId, - schemaDbIds = 0, - conceptFields = ["id", "name", "space_id"], - contentFields = ["source_local_id"], - documentFields = [], baseNodeLocalIds = [], - nodeAuthor = undefined, + schemaDbIds = 0, fetchNodes = true, + nodeAuthor = undefined, inRelsOfType = undefined, - relationFields = undefined, - relationToNodeFields = undefined, inRelsToNodesOfType = undefined, inRelsToNodesOfAuthor = undefined, inRelsToNodeLocalIds = undefined, + conceptFields = ["id", "name", "space_id"], + contentFields = ["source_local_id"], + documentFields = [], + relationFields = undefined, + relationSubNodesFields = undefined, }: { supabase: DGSupabaseClient; spaceId?: number; schemaDbIds?: number | number[]; - conceptFields?: (keyof Concept)[]; - contentFields?: (keyof Content)[]; - documentFields?: (keyof Document)[]; baseNodeLocalIds?: string[]; - nodeAuthor?: string; fetchNodes?: boolean | null; + nodeAuthor?: string; inRelsOfType?: number[]; - relationFields?: (keyof Concept)[]; - relationToNodeFields?: (keyof Concept)[]; + relationSubNodesFields?: (keyof Concept)[]; inRelsToNodesOfType?: number[]; inRelsToNodesOfAuthor?: string; + conceptFields?: (keyof Concept)[]; + contentFields?: (keyof Content)[]; + documentFields?: (keyof Document)[]; + relationFields?: (keyof Concept)[]; inRelsToNodeLocalIds?: string[]; }) => { let q = conceptFields.join(",\n"); @@ -120,7 +131,7 @@ const composeQuery = ({ inRelsToNodesOfAuthor !== undefined || inRelsToNodeLocalIds !== undefined ) { - const args2: string[] = (relationToNodeFields || []).slice(); + const args2: string[] = (relationSubNodesFields || []).slice(); if (inRelsToNodesOfType !== undefined && !args2.includes("schema_id")) args2.push("schema_id"); if (inRelsToNodeLocalIds !== undefined) @@ -179,7 +190,7 @@ const composeQuery = ({ }; // Obtain basic data for all node schemas in a space, populating the cache. -export const getNodeSchemas = async ( +export const getSchemaConcepts = async ( supabase: DGSupabaseClient, spaceId: number, forceCacheReload: boolean = false, @@ -188,10 +199,10 @@ export const getNodeSchemas = async ( .filter((x) => typeof x === "object") .filter((x) => x.spaceId === spaceId || x.spaceId === 0); if (forceCacheReload || result.length === 1) { - const q = composeQuery({ supabase, spaceId, fetchNodes: null }); + const q = composeConceptQuery({ supabase, spaceId, fetchNodes: null }); const res = (await q) as PostgrestResponse; if (res.error) { - console.error("getNodeSchemas failed", res.error); + console.error("getSchemaConcepts failed", res.error); return [NODE_SCHEMA_CACHE[NODE_SCHEMAS] as NodeSignature]; } Object.assign( @@ -245,7 +256,7 @@ const getLocalToDbIdMapping = async ( console.warn("Cannot populate cache without spaceId"); return dbIds; } - let q = composeQuery({ supabase, spaceId }); + let q = composeConceptQuery({ supabase, spaceId }); if (Object.keys(NODE_SCHEMA_CACHE).length > 1) { // Non-empty cache, query selectively q = q @@ -324,48 +335,58 @@ export const DOCUMENT_FIELDS: (keyof Document)[] = [ "author_id", ]; -// get all nodes that belong to a certain number of schemas. -// This query will return Concept objects, and associated Content and Document, -// according to which fields are requested. Defaults to maximal information. -// Main call options: -// • ALL schemas: schemaLocalIds = "__schemas" (default) -// • ALL nodes (instances): schemaLocalIds = [] -// • Nodes from X,Y schemas: schemaLocalIds = ["localIdX","localIdY",...] -export const getNodes = async ({ - supabase, - spaceId, - schemaLocalIds = NODE_SCHEMAS, - conceptFields = CONCEPT_FIELDS, - contentFields = CONTENT_FIELDS, - documentFields = DOCUMENT_FIELDS, - baseNodeLocalIds = [], - nodeAuthor = undefined, - fetchNodes = true, - inRelsOfTypeLocal = undefined, - relationFields = undefined, - relationToNodeFields = undefined, - inRelsToNodesOfTypeLocal = undefined, - inRelsToNodesOfAuthor = undefined, - inRelsToNodeLocalIds = undefined, +// instrumentation for benchmarking +export let lastQueryDuration: number = 0; + +// Main entry point to query Concepts and related data: +// related sub-objects can be provided as: +// Content, Content.Document, author (PlatformAccount), relations (Concept), +// relations.subnodes (Concept), relations.subnodes.author, relations.subnodes.Content +// Which fields of these subobjects are fetched is controlled by the respective Fields parameters +// (except the last two, which would have just enough data for query filters.) +// If the fields are empty, the sub-object will not be fetched (unless needed for matching query parameters) +// Any parameter called "local" expects platform Ids (source_local_id) of the corresponding Content. +// In the case of node/relation definitions, schema refers to the page Id of the definition. +export const getConcepts = async ({ + supabase, // An instance of a logged-in client + spaceId, // the numeric id of the space being queried + baseNodeLocalIds = [], // If we are specifying the Concepts being queried directly. + schemaLocalIds = NODE_SCHEMAS, // the type of Concepts being queried + // • ALL schemas: schemaLocalIds = NODE_SCHEMAS (default, "__schemas") + // • ALL instances (nodes and/or relations): schemaLocalIds = [] + // • Nodes from X,Y schemas: schemaLocalIds = ["localIdX","localIdY",...] + fetchNodes = true, // are we fetching nodes or relations? + // true for nodes, false for relations, null for both + nodeAuthor = undefined, // filter on Content author + inRelsOfTypeLocal = undefined, // filter on Concepts that participate in a relation of a given type + inRelsToNodesOfTypeLocal = undefined, // filter on Concepts that are in a relation with another node of a given type + inRelsToNodesOfAuthor = undefined, // filter on Concepts that are in a relation with another Concept by a given author + inRelsToNodeLocalIds = undefined, // filter on Concepts that are in relation with a Concept from a given list + conceptFields = CONCEPT_FIELDS, // which fields are returned for the given Concept + contentFields = CONTENT_FIELDS, // which fields are returned for the corresponding Content + documentFields = DOCUMENT_FIELDS, // which fields are returned for the Content's corresponding Document + relationFields = undefined, // which fields are returned for the relation the node is part of + relationSubNodesFields = undefined, // which fields are returned for the other nodes in the relation the target node is part of }: { supabase: DGSupabaseClient; spaceId?: number; - schemaLocalIds?: string | string[]; - conceptFields?: (keyof Concept)[]; - contentFields?: (keyof Content)[]; - documentFields?: (keyof Document)[]; baseNodeLocalIds?: string[]; - nodeAuthor?: string; + schemaLocalIds?: string | string[]; fetchNodes?: boolean | null; + nodeAuthor?: string; inRelsOfTypeLocal?: string[]; - relationFields?: (keyof Concept)[]; - relationToNodeFields?: (keyof Concept)[]; inRelsToNodesOfTypeLocal?: string[]; inRelsToNodesOfAuthor?: string; inRelsToNodeLocalIds?: string[]; -}): Promise => { + conceptFields?: (keyof Concept)[]; + contentFields?: (keyof Content)[]; + documentFields?: (keyof Document)[]; + relationFields?: (keyof Concept)[]; + relationSubNodesFields?: (keyof Concept)[]; +}): Promise => { const schemaLocalIdsArray = typeof schemaLocalIds === "string" ? [schemaLocalIds] : schemaLocalIds; + // translate schema local content Ids to concept database Ids. const localIds = new Set(schemaLocalIdsArray); if (inRelsOfTypeLocal !== undefined) inRelsOfTypeLocal.map((k) => localIds.add(k)); @@ -392,7 +413,7 @@ export const getNodes = async ({ const schemaDbIds = schemaLocalIds === NODE_SCHEMAS ? 0 : localToDbArray(schemaLocalIdsArray); - const q = composeQuery({ + const q = composeConceptQuery({ supabase, spaceId, baseNodeLocalIds, @@ -404,14 +425,19 @@ export const getNodes = async ({ fetchNodes, inRelsOfType: localToDbArray(inRelsOfTypeLocal), relationFields, - relationToNodeFields, + relationSubNodesFields, inRelsToNodesOfType: localToDbArray(inRelsToNodesOfTypeLocal), inRelsToNodesOfAuthor, inRelsToNodeLocalIds, }); - const { error, data } = (await q) as PostgrestResponse; + const before = Date.now(); + const { error, data } = (await q) as PostgrestResponse; + lastQueryDuration = Date.now() - before; + // benchmarking + // console.debug(lastQueryDuration, q); + if (error) { - console.error("getNodes failed", error); + console.error("getConcepts failed", error); return []; } return data || []; From 5ad8c6efb565a9c4d1b0b2265dbf891a70c31798 Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Tue, 7 Oct 2025 10:15:59 -0400 Subject: [PATCH 10/15] correction, limit offset --- .../src/components/settings/AdminPanel.tsx | 10 +++++----- packages/database/src/lib/queries.ts | 19 +++++++++++++++++++ 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/apps/roam/src/components/settings/AdminPanel.tsx b/apps/roam/src/components/settings/AdminPanel.tsx index 8e47b674c..96ef9506a 100644 --- a/apps/roam/src/components/settings/AdminPanel.tsx +++ b/apps/roam/src/components/settings/AdminPanel.tsx @@ -11,11 +11,11 @@ import { getSchemaConcepts, nodeSchemaSignature, type NodeSignature, - type PConcept, + type PConceptFull, } from "@repo/database/lib/queries"; import { DGSupabaseClient } from "@repo/database/lib/client"; -const NodeRow = ({ node }: { node: PConcept }) => { +const NodeRow = ({ node }: { node: PConceptFull }) => { return ( {node.name} @@ -69,7 +69,7 @@ const NodeRow = ({ node }: { node: PConcept }) => { ); }; -const NodeTable = ({ nodes }: { nodes: PConcept[] }) => { +const NodeTable = ({ nodes }: { nodes: PConceptFull[] }) => { return ( @@ -83,7 +83,7 @@ const NodeTable = ({ nodes }: { nodes: PConcept[] }) => { - {nodes.map((node: PConcept) => ( + {nodes.map((node: PConceptFull) => ( ))} @@ -97,7 +97,7 @@ const AdminPanel = () => { const [schemas, setSchemas] = useState([]); const [showingSchema, setShowingSchema] = useState(nodeSchemaSignature); - const [nodes, setNodes] = useState([]); + const [nodes, setNodes] = useState([]); const [loading, setLoading] = useState(true); const [loadingNodes, setLoadingNodes] = useState(true); const [error, setError] = useState(null); diff --git a/packages/database/src/lib/queries.ts b/packages/database/src/lib/queries.ts index f4891eb3c..ce43d5beb 100644 --- a/packages/database/src/lib/queries.ts +++ b/packages/database/src/lib/queries.ts @@ -84,6 +84,8 @@ const composeConceptQuery = ({ documentFields = [], relationFields = undefined, relationSubNodesFields = undefined, + limit = 100, + offset = 0, }: { supabase: DGSupabaseClient; spaceId?: number; @@ -100,6 +102,8 @@ const composeConceptQuery = ({ documentFields?: (keyof Document)[]; relationFields?: (keyof Concept)[]; inRelsToNodeLocalIds?: string[]; + limit?: number; + offset?: number; }) => { let q = conceptFields.join(",\n"); const innerContent = schemaDbIds === 0 || baseNodeLocalIds.length > 0; @@ -185,6 +189,15 @@ const composeConceptQuery = ({ inRelsToNodeLocalIds, ); } + if (limit > 0 || offset > 0) { + query = query.order('id'); + if (offset > 0) { + limit = Math.min(limit, 1000); + query = query.range(offset, offset+limit); + } else if (limit > 0) { + query = query.limit(limit); + } + } // console.debug(query); return query; }; @@ -367,6 +380,8 @@ export const getConcepts = async ({ documentFields = DOCUMENT_FIELDS, // which fields are returned for the Content's corresponding Document relationFields = undefined, // which fields are returned for the relation the node is part of relationSubNodesFields = undefined, // which fields are returned for the other nodes in the relation the target node is part of + limit = 100, // query limit + offset = 0, // query offset }: { supabase: DGSupabaseClient; spaceId?: number; @@ -383,6 +398,8 @@ export const getConcepts = async ({ documentFields?: (keyof Document)[]; relationFields?: (keyof Concept)[]; relationSubNodesFields?: (keyof Concept)[]; + limit?: number; + offset?: number; }): Promise => { const schemaLocalIdsArray = typeof schemaLocalIds === "string" ? [schemaLocalIds] : schemaLocalIds; @@ -429,6 +446,8 @@ export const getConcepts = async ({ inRelsToNodesOfType: localToDbArray(inRelsToNodesOfTypeLocal), inRelsToNodesOfAuthor, inRelsToNodeLocalIds, + limit, + offset, }); const before = Date.now(); const { error, data } = (await q) as PostgrestResponse; From 333c734d5fb9186b221e1c9ed5b80a74738d913c Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Tue, 7 Oct 2025 13:57:22 -0400 Subject: [PATCH 11/15] correction: relation defs in cache --- packages/database/src/lib/queries.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/database/src/lib/queries.ts b/packages/database/src/lib/queries.ts index ce43d5beb..b9d1374ee 100644 --- a/packages/database/src/lib/queries.ts +++ b/packages/database/src/lib/queries.ts @@ -190,10 +190,10 @@ const composeConceptQuery = ({ ); } if (limit > 0 || offset > 0) { - query = query.order('id'); + query = query.order("id"); if (offset > 0) { limit = Math.min(limit, 1000); - query = query.range(offset, offset+limit); + query = query.range(offset, offset + limit); } else if (limit > 0) { query = query.limit(limit); } @@ -269,7 +269,7 @@ const getLocalToDbIdMapping = async ( console.warn("Cannot populate cache without spaceId"); return dbIds; } - let q = composeConceptQuery({ supabase, spaceId }); + let q = composeConceptQuery({ supabase, spaceId, fetchNodes: null }); if (Object.keys(NODE_SCHEMA_CACHE).length > 1) { // Non-empty cache, query selectively q = q @@ -349,7 +349,7 @@ export const DOCUMENT_FIELDS: (keyof Document)[] = [ ]; // instrumentation for benchmarking -export let lastQueryDuration: number = 0; +export const LAST_QUERY_DATA = { duration: 0 }; // Main entry point to query Concepts and related data: // related sub-objects can be provided as: @@ -451,9 +451,9 @@ export const getConcepts = async ({ }); const before = Date.now(); const { error, data } = (await q) as PostgrestResponse; - lastQueryDuration = Date.now() - before; + LAST_QUERY_DATA.duration = Date.now() - before; // benchmarking - // console.debug(lastQueryDuration, q); + // console.debug(LAST_QUERY_DATA.duration, q); if (error) { console.error("getConcepts failed", error); From 8b549c60c77df0da650b88be56c5c2e38511b25f Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Wed, 8 Oct 2025 10:03:02 -0400 Subject: [PATCH 12/15] correction --- packages/database/features/queryConcepts.feature | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/database/features/queryConcepts.feature b/packages/database/features/queryConcepts.feature index d29fc0db6..dc3a10002 100644 --- a/packages/database/features/queryConcepts.feature +++ b/packages/database/features/queryConcepts.feature @@ -69,7 +69,7 @@ Feature: Concept queries | c4 | claim 3 | s1 | user3 | false | c1 | {} | {} | Scenario Outline: Query by author - And a user logged in space s1 and calling getConcepts with these parameters: '{"nodeAuthor":["user2"],"schemaLocalIds":[],"fetchNodes":null}' + And a user logged in space s1 and calling getConcepts with these parameters: '{"nodeAuthor":"user2","schemaLocalIds":[],"fetchNodes":null}' Then query results should look like this | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | | c3 | claim 2 | s1 | user2 | false | c1 | {} | {} | From 71e4ccfadcd4c38c130ad55af7b4a37c56576fb4 Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Wed, 8 Oct 2025 15:12:03 -0400 Subject: [PATCH 13/15] some more comments --- packages/database/features/queryConcepts.feature | 6 ++++++ .../database/features/step-definitions/stepdefs.ts | 11 ++++++----- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/packages/database/features/queryConcepts.feature b/packages/database/features/queryConcepts.feature index dc3a10002..6f72b0b31 100644 --- a/packages/database/features/queryConcepts.feature +++ b/packages/database/features/queryConcepts.feature @@ -13,29 +13,35 @@ Feature: Concept queries And the user user1 opens the Roam plugin in space s1 And the user user2 opens the Roam plugin in space s1 And the user user3 opens the Roam plugin in space s1 + # Add Documents as support for the Content objects + # Note: table syntax is explained in features/step-definitions/stepdefs.ts, look for `added to the database`. And Document are added to the database: | $id | source_local_id | created | last_modified | _author_id | _space_id | | d1 | ld1 | 2025/01/01 | 2025/01/01 | user1 | s1 | | d2 | ld2 | 2025/01/01 | 2025/01/01 | user1 | s1 | | d5 | ld5 | 2025/01/01 | 2025/01/01 | user2 | s1 | | d7 | ld7 | 2025/01/01 | 2025/01/01 | user1 | s1 | + # Add Content as support for the Concept objects, esp. schemas And Content are added to the database: | $id | source_local_id | _document_id | text | created | last_modified | scale | _author_id | _space_id | | ct1 | lct1 | d1 | Claim | 2025/01/01 | 2025/01/01 | document | user1 | s1 | | ct2 | lct2 | d2 | claim 1 | 2025/01/01 | 2025/01/01 | document | user1 | s1 | | ct5 | lct5 | d5 | Opposes | 2025/01/01 | 2025/01/01 | document | user2 | s1 | | ct7 | lct7 | d7 | Hypothesis | 2025/01/01 | 2025/01/01 | document | user1 | s1 | + # First add schemas And Concept are added to the database: | $id | name | _space_id | _author_id | _represented_by_id | created | last_modified | @is_schema | _schema_id | @literal_content | @reference_content | | c1 | Claim | s1 | user1 | ct1 | 2025/01/01 | 2025/01/01 | true | | {} | {} | | c5 | Opposes | s1 | user1 | ct5 | 2025/01/01 | 2025/01/01 | true | | {"roles": ["target", "source"]} | {} | | c7 | Hypothesis | s1 | user1 | ct7 | 2025/01/01 | 2025/01/01 | true | | {} | {} | + # Then nodes referring to the schemas And Concept are added to the database: | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | @literal_content | @reference_content | _represented_by_id | | c2 | claim 1 | s1 | user1 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | ct2 | | c3 | claim 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | | | c4 | claim 3 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | | | c8 | hypothesis 1 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c7 | {} | {} | | + # Then relations (which refer to nodes) And Concept are added to the database: | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | @literal_content | @_reference_content | | c6 | opposes 1 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c3", "source": "c2"} | diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index 2c682a712..45b6811ce 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -128,10 +128,10 @@ const substituteLocalReferencesRow = ( Given( "{word} are added to the database:", async (tableName: TableName, table: DataTable) => { - // generic function to add a bunch of objects. - // Columns prefixed by $ are primary keys, and are not sent to the database, - // but the local value is associated with the database id in world.localRefs. - // Columns prefixed with _ are translated back from local references to db ids. + // generic function to add a bunch of objects to an arbitrary table. + // Columns prefixed by $ are aliases for the primary keys, and are not sent to the database, + // but the alias name is associated with the database id in world.localRefs. + // Columns prefixed with _ are translated back from aliases to db ids. // Columns prefixed with @ are parsed as json values. (Use @ before _) const client = getServiceClient(); const localRefs = (world.localRefs || {}) as Record; @@ -337,6 +337,7 @@ Given( Given( "a user logged in space {word} and calling getConcepts with these parameters: {string}", async (spaceName: string, paramsJ: string) => { + // params are assumed to be Json. Values prefixed with '@' are interpreted as aliases. const localRefs = (world.localRefs || {}) as Record; const params = substituteLocalReferences( JSON.parse(paramsJ), @@ -346,7 +347,7 @@ Given( const spaceId = localRefs[spaceName]; if (spaceId === undefined) assert.fail("spaceId"); const supabase = await getLoggedinDatabase(spaceId); - // note that we supply spaceId and supabase, they do not need to be part of the incoming json + // note that we supply spaceId and supabase, they do not need to be part of the incoming Json const nodes = await getConcepts({ ...params, supabase, spaceId }); nodes.sort((a, b) => a.id! - b.id!); world.queryResults = nodes; From 1aca01b61d4bd44f48c419dfe17916b04d35b162 Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Sun, 12 Oct 2025 14:10:42 -0400 Subject: [PATCH 14/15] Document cucumber table syntax --- packages/database/README.md | 2 ++ packages/database/doc/tests.md | 33 +++++++++++++++++++ .../features/step-definitions/stepdefs.ts | 10 ++++++ 3 files changed, 45 insertions(+) create mode 100644 packages/database/doc/tests.md diff --git a/packages/database/README.md b/packages/database/README.md index 1fc148116..cab6d0d3f 100644 --- a/packages/database/README.md +++ b/packages/database/README.md @@ -71,10 +71,12 @@ If schema changes are deployed to `main` by another developer while you work on There are [cucumber](https://cucumber.io/) scenarios (in `packages/database/features`) to test the flow of database operations. We have not yet automated those tests, but you should run against the local environment when developing the database. You will need to: +1. set `SUPABASE_DB=local` in `packages/database/.env` 1. Run `turbo dev` in one terminal (in the root directory) 2. In another other terminal, `cd` to this directory (`packages/database`) and run the tests with `pnpm run test` Think of adding new tests if appropriate! +Some more details in `doc/tests.md` ## Using local code against your Supabase branch diff --git a/packages/database/doc/tests.md b/packages/database/doc/tests.md new file mode 100644 index 000000000..af03a4a43 --- /dev/null +++ b/packages/database/doc/tests.md @@ -0,0 +1,33 @@ +# Designing cucumber tests + +Cucumber is a harness for the gherkin language, allowing to make tests more legible. The steps are defined as regexp in `features/step-definitions/stepdefs.ts`. Currently, we assume the database is running with `turbo dev` in another terminal. + +Some of test steps were defined to clear the database (`Given the database is blank`) or to put arbitrary data in the tables (`{word} are added to the database:`) which expects the name of a table as argument, and a markdown table for the table data. + +The latter step requires some further explanations: + +A lot of database objects use foreign keys, so we need to refer to numeric database identifiers. Those are defined by the database. To allow this to work, we have a pseudo-column called `$id`, which is a string alias that corresponds to the database numeric `id`. Make sure each value in that column is unique. We keep a dictionary of those aliases to the database numeric `id` in cucumber. When interpreting the table, if any other column is prefixed by a `_`, we will recursively search for strings and from the alias set and replace them with the appropriate database ids. Note that inserts are made in bulk, so you may need to break up your inserts according to dependencies. For example: + + * Adding a schema first + `And Concept are added to the database:` + + | $id | name | @is_schema | + | alias1 | Claim | true | + + * Then a concept referring to the schema + `And Concept are added to the database:` + | $id | name | @is_schema | _schema_id | + | alias2 | claim 1 | false | alias1 | + +Also, cucumber treats all columns as strings; if they contain a non-string literal (essentially number, boolean or JSON) you can use the `@` prefix in the column name so the cell value will be parsed as json before sending to the database. (`@` comes before `_` if both are used.) + +Other steps that require explanation: + +* `a user logged in space {word} and calling getConcepts with these parameters: {string}` +* `Then query results should look like this` + +This comes in pairs: The results from the query (whose parameters are defined as json) are checked against a table, using the same syntax as above. Only the columns defined are checked for equivalence. + +* `the user {word} opens the {word} plugin in space {word}`. + +This both creates the space and an account tied to that space. Because tying spaces and accounts goes through an edge function, it is the only good way to do both. diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index 45b6811ce..c7c8bb903 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -178,6 +178,8 @@ Given( const userEmail = (userAccountId: string) => `${userAccountId}@example.com`; +// Invoke the edge function to log an account into a database. +// Use this instead of trying to create spaces directly. When( "the user {word} opens the {word} plugin in space {word}", async (userAccountId: string, platform: Platform, spaceName: string) => { @@ -210,12 +212,14 @@ When( }, ); +// A test of non-empty object count for the named table Then("the database should contain a {word}", async (tableName: TableName) => { const client = getServiceClient(); const response = await client.from(tableName).select("*", { count: "exact" }); assert.notEqual(response.count || 0, 0); }); +// A test of absolute object count for the named table Then( "the database should contain {int} {word}", async (expectedCount: number, tableName: TableName) => { @@ -238,6 +242,7 @@ const getLoggedinDatabase = async (spaceId: number) => { return client; }; +// A test of non-empty object count for the named table, as seen by the user Then( "a user logged in space {word} should see a {word} in the database", async (spaceName: string, tableName: TableName) => { @@ -252,6 +257,7 @@ Then( }, ); +// A test of exact object count for the named table, as seen by the user Then( "a user logged in space {word} should see {int} {word} in the database", async (spaceName: string, expectedCount: number, tableName: TableName) => { @@ -266,6 +272,7 @@ Then( }, ); +// invoke the upsert_accounts_in_space function, expects json Given( "user {word} upserts these accounts to space {word}:", async (userName: string, spaceName: string, accountsString: string) => { @@ -282,6 +289,7 @@ Given( }, ); +// invoke the upsert_documents function, expects json Given( "user {word} upserts these documents to space {word}:", async (userName: string, spaceName: string, docString: string) => { @@ -298,6 +306,7 @@ Given( }, ); +// invoke the upsert_content function, expects json Given( "user {word} upserts this content to space {word}:", async (userName: string, spaceName: string, docString: string) => { @@ -318,6 +327,7 @@ Given( }, ); +// invoke the upsert_concepts function, expects json Given( "user {word} upserts these concepts to space {word}:", async (userName: string, spaceName: string, docString: string) => { From d5ee24d9da9519bc2060e6ae25eeab27284e2d1f Mon Sep 17 00:00:00 2001 From: Marc-Antoine Parent Date: Sun, 12 Oct 2025 19:20:59 -0400 Subject: [PATCH 15/15] apply prettier --- packages/database/README.md | 2 +- packages/database/doc/tests.md | 22 ++++++------ .../database/features/contentAccess.feature | 6 ++-- .../database/features/queryConcepts.feature | 36 +++++++++---------- 4 files changed, 33 insertions(+), 33 deletions(-) diff --git a/packages/database/README.md b/packages/database/README.md index cab6d0d3f..41bfc1030 100644 --- a/packages/database/README.md +++ b/packages/database/README.md @@ -73,7 +73,7 @@ There are [cucumber](https://cucumber.io/) scenarios (in `packages/database/feat 1. set `SUPABASE_DB=local` in `packages/database/.env` 1. Run `turbo dev` in one terminal (in the root directory) -2. In another other terminal, `cd` to this directory (`packages/database`) and run the tests with `pnpm run test` +1. In another other terminal, `cd` to this directory (`packages/database`) and run the tests with `pnpm run test` Think of adding new tests if appropriate! Some more details in `doc/tests.md` diff --git a/packages/database/doc/tests.md b/packages/database/doc/tests.md index af03a4a43..4553d4830 100644 --- a/packages/database/doc/tests.md +++ b/packages/database/doc/tests.md @@ -8,26 +8,26 @@ The latter step requires some further explanations: A lot of database objects use foreign keys, so we need to refer to numeric database identifiers. Those are defined by the database. To allow this to work, we have a pseudo-column called `$id`, which is a string alias that corresponds to the database numeric `id`. Make sure each value in that column is unique. We keep a dictionary of those aliases to the database numeric `id` in cucumber. When interpreting the table, if any other column is prefixed by a `_`, we will recursively search for strings and from the alias set and replace them with the appropriate database ids. Note that inserts are made in bulk, so you may need to break up your inserts according to dependencies. For example: - * Adding a schema first - `And Concept are added to the database:` +- Adding a schema first + `And Concept are added to the database:` - | $id | name | @is_schema | - | alias1 | Claim | true | + | $id | name | @is_schema | + | alias1 | Claim | true | - * Then a concept referring to the schema - `And Concept are added to the database:` - | $id | name | @is_schema | _schema_id | - | alias2 | claim 1 | false | alias1 | +- Then a concept referring to the schema + `And Concept are added to the database:` + | $id | name | @is_schema | \_schema_id | + | alias2 | claim 1 | false | alias1 | Also, cucumber treats all columns as strings; if they contain a non-string literal (essentially number, boolean or JSON) you can use the `@` prefix in the column name so the cell value will be parsed as json before sending to the database. (`@` comes before `_` if both are used.) Other steps that require explanation: -* `a user logged in space {word} and calling getConcepts with these parameters: {string}` -* `Then query results should look like this` +- `a user logged in space {word} and calling getConcepts with these parameters: {string}` +- `Then query results should look like this` This comes in pairs: The results from the query (whose parameters are defined as json) are checked against a table, using the same syntax as above. Only the columns defined are checked for equivalence. -* `the user {word} opens the {word} plugin in space {word}`. +- `the user {word} opens the {word} plugin in space {word}`. This both creates the space and an account tied to that space. Because tying spaces and accounts goes through an edge function, it is the only good way to do both. diff --git a/packages/database/features/contentAccess.feature b/packages/database/features/contentAccess.feature index b071974fe..540fb3dce 100644 --- a/packages/database/features/contentAccess.feature +++ b/packages/database/features/contentAccess.feature @@ -18,9 +18,9 @@ Feature: Content access And the user user3 opens the Roam plugin in space s2 And Document are added to the database: | $id | _space_id | source_local_id | _author_id | created | last_modified | - | d1 | s1 | abc | user1 | 2025/01/01 | 2025/01/01 | - | d2 | s1 | def | user2 | 2025/01/01 | 2025/01/01 | - | d3 | s2 | ghi | user3 | 2025/01/01 | 2025/01/01 | + | d1 | s1 | abc | user1 | 2025/01/01 | 2025/01/01 | + | d2 | s1 | def | user2 | 2025/01/01 | 2025/01/01 | + | d3 | s2 | ghi | user3 | 2025/01/01 | 2025/01/01 | Scenario Outline: Per-space document access When the user user1 opens the Roam plugin in space s1 diff --git a/packages/database/features/queryConcepts.feature b/packages/database/features/queryConcepts.feature index 6f72b0b31..15e3b5ffa 100644 --- a/packages/database/features/queryConcepts.feature +++ b/packages/database/features/queryConcepts.feature @@ -17,35 +17,35 @@ Feature: Concept queries # Note: table syntax is explained in features/step-definitions/stepdefs.ts, look for `added to the database`. And Document are added to the database: | $id | source_local_id | created | last_modified | _author_id | _space_id | - | d1 | ld1 | 2025/01/01 | 2025/01/01 | user1 | s1 | - | d2 | ld2 | 2025/01/01 | 2025/01/01 | user1 | s1 | - | d5 | ld5 | 2025/01/01 | 2025/01/01 | user2 | s1 | - | d7 | ld7 | 2025/01/01 | 2025/01/01 | user1 | s1 | + | d1 | ld1 | 2025/01/01 | 2025/01/01 | user1 | s1 | + | d2 | ld2 | 2025/01/01 | 2025/01/01 | user1 | s1 | + | d5 | ld5 | 2025/01/01 | 2025/01/01 | user2 | s1 | + | d7 | ld7 | 2025/01/01 | 2025/01/01 | user1 | s1 | # Add Content as support for the Concept objects, esp. schemas And Content are added to the database: | $id | source_local_id | _document_id | text | created | last_modified | scale | _author_id | _space_id | - | ct1 | lct1 | d1 | Claim | 2025/01/01 | 2025/01/01 | document | user1 | s1 | - | ct2 | lct2 | d2 | claim 1 | 2025/01/01 | 2025/01/01 | document | user1 | s1 | - | ct5 | lct5 | d5 | Opposes | 2025/01/01 | 2025/01/01 | document | user2 | s1 | - | ct7 | lct7 | d7 | Hypothesis | 2025/01/01 | 2025/01/01 | document | user1 | s1 | + | ct1 | lct1 | d1 | Claim | 2025/01/01 | 2025/01/01 | document | user1 | s1 | + | ct2 | lct2 | d2 | claim 1 | 2025/01/01 | 2025/01/01 | document | user1 | s1 | + | ct5 | lct5 | d5 | Opposes | 2025/01/01 | 2025/01/01 | document | user2 | s1 | + | ct7 | lct7 | d7 | Hypothesis | 2025/01/01 | 2025/01/01 | document | user1 | s1 | # First add schemas And Concept are added to the database: | $id | name | _space_id | _author_id | _represented_by_id | created | last_modified | @is_schema | _schema_id | @literal_content | @reference_content | - | c1 | Claim | s1 | user1 | ct1 | 2025/01/01 | 2025/01/01 | true | | {} | {} | - | c5 | Opposes | s1 | user1 | ct5 | 2025/01/01 | 2025/01/01 | true | | {"roles": ["target", "source"]} | {} | - | c7 | Hypothesis | s1 | user1 | ct7 | 2025/01/01 | 2025/01/01 | true | | {} | {} | + | c1 | Claim | s1 | user1 | ct1 | 2025/01/01 | 2025/01/01 | true | | {} | {} | + | c5 | Opposes | s1 | user1 | ct5 | 2025/01/01 | 2025/01/01 | true | | {"roles": ["target", "source"]} | {} | + | c7 | Hypothesis | s1 | user1 | ct7 | 2025/01/01 | 2025/01/01 | true | | {} | {} | # Then nodes referring to the schemas And Concept are added to the database: | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | @literal_content | @reference_content | _represented_by_id | - | c2 | claim 1 | s1 | user1 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | ct2 | - | c3 | claim 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | | - | c4 | claim 3 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | | - | c8 | hypothesis 1 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c7 | {} | {} | | + | c2 | claim 1 | s1 | user1 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | ct2 | + | c3 | claim 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | | + | c4 | claim 3 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c1 | {} | {} | | + | c8 | hypothesis 1 | s1 | user3 | 2025/01/01 | 2025/01/01 | false | c7 | {} | {} | | # Then relations (which refer to nodes) And Concept are added to the database: | $id | name | _space_id | _author_id | created | last_modified | @is_schema | _schema_id | @literal_content | @_reference_content | - | c6 | opposes 1 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c3", "source": "c2"} | - | c9 | opposes 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c8", "source": "c2"} | + | c6 | opposes 1 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c3", "source": "c2"} | + | c9 | opposes 2 | s1 | user2 | 2025/01/01 | 2025/01/01 | false | c5 | {} | {"target": "c8", "source": "c2"} | Scenario Outline: Query all nodes And a user logged in space s1 and calling getConcepts with these parameters: '{"schemaLocalIds":[],"fetchNodes":null}' @@ -96,8 +96,8 @@ Feature: Concept queries | _id | name | _space_id | _author_id | @is_schema | _schema_id | @literal_content | @_reference_content | | c2 | claim 1 | s1 | user1 | false | c1 | {} | {} | | c8 | hypothesis 1 | s1 | user3 | false | c7 | {} | {} | - # Note that the node is related to itself, unfortunate but hard to solve. + # Note that the node is related to itself, unfortunate but hard to solve. Scenario Outline: Query by author of related node And a user logged in space s1 and calling getConcepts with these parameters: '{"schemaLocalIds":[],"inRelsToNodesOfAuthor":"user3","relationFields":["id"],"relationSubNodesFields":["id"]}' Then query results should look like this