diff --git a/clients/typescript/src/_generated/protocol/satellite.ts b/clients/typescript/src/_generated/protocol/satellite.ts index 772bcdbc12..1f4611eef6 100644 --- a/clients/typescript/src/_generated/protocol/satellite.ts +++ b/clients/typescript/src/_generated/protocol/satellite.ts @@ -679,9 +679,14 @@ export interface SatShapeDataEnd { */ export interface SatPerms { $type: "Electric.Satellite.SatPerms"; + /** + * this id is the id of the user permissions, this struct is the user + * permissions fused with the global permissions at that point in time + */ id: Long; + userId: string; rules: SatPerms_Rules | undefined; - roles: SatPerms_Roles | undefined; + roles: SatPerms_Role[]; } export enum SatPerms_Privilege { @@ -718,7 +723,7 @@ export interface SatPerms_Path { export interface SatPerms_Scope { $type: "Electric.Satellite.SatPerms.Scope"; table: SatPerms_Table | undefined; - id: string; + id: string[]; } export interface SatPerms_RoleName { @@ -727,18 +732,33 @@ export interface SatPerms_RoleName { application?: string | undefined; } +export interface SatPerms_ColumnList { + $type: "Electric.Satellite.SatPerms.ColumnList"; + names: string[]; +} + export interface SatPerms_Grant { $type: "Electric.Satellite.SatPerms.Grant"; id: string; table: SatPerms_Table | undefined; role: SatPerms_RoleName | undefined; - privileges: SatPerms_Privilege[]; - columns: string[]; + privilege: SatPerms_Privilege; + columns?: SatPerms_ColumnList | undefined; scope?: SatPerms_Table | undefined; path?: SatPerms_Path | undefined; check?: string | undefined; } +export interface SatPerms_Revoke { + $type: "Electric.Satellite.SatPerms.Revoke"; + id: string; + table: SatPerms_Table | undefined; + role: SatPerms_RoleName | undefined; + privilege: SatPerms_Privilege; + scope?: SatPerms_Table | undefined; + path?: SatPerms_Path | undefined; +} + export interface SatPerms_Assign { $type: "Electric.Satellite.SatPerms.Assign"; id: string; @@ -750,27 +770,56 @@ export interface SatPerms_Assign { if?: string | undefined; } +export interface SatPerms_Unassign { + $type: "Electric.Satellite.SatPerms.Unassign"; + id: string; + table: SatPerms_Table | undefined; + userColumn?: string | undefined; + roleColumn?: string | undefined; + roleName?: string | undefined; + scope?: SatPerms_Table | undefined; +} + +export interface SatPerms_Sqlite { + $type: "Electric.Satellite.SatPerms.Sqlite"; + stmt: string; +} + export interface SatPerms_Role { $type: "Electric.Satellite.SatPerms.Role"; - id: string; + rowId: string[]; role: string; userId: string; assignId: string; scope?: SatPerms_Scope | undefined; } +export interface SatPerms_DDLX { + $type: "Electric.Satellite.SatPerms.DDLX"; + grants: SatPerms_Grant[]; + revokes: SatPerms_Revoke[]; + assigns: SatPerms_Assign[]; + unassigns: SatPerms_Unassign[]; + sqlite: SatPerms_Sqlite[]; +} + /** * split the rules and roles info into distinct messages so they can be * serialized separately */ export interface SatPerms_Rules { $type: "Electric.Satellite.SatPerms.Rules"; + id: Long; + parentId?: Long | undefined; grants: SatPerms_Grant[]; assigns: SatPerms_Assign[]; } export interface SatPerms_Roles { $type: "Electric.Satellite.SatPerms.Roles"; + id: Long; + parentId?: Long | undefined; + rulesId: Long; roles: SatPerms_Role[]; } @@ -4263,7 +4312,7 @@ export const SatShapeDataEnd = { messageTypeRegistry.set(SatShapeDataEnd.$type, SatShapeDataEnd); function createBaseSatPerms(): SatPerms { - return { $type: "Electric.Satellite.SatPerms", id: Long.ZERO, rules: undefined, roles: undefined }; + return { $type: "Electric.Satellite.SatPerms", id: Long.ZERO, userId: "", rules: undefined, roles: [] }; } export const SatPerms = { @@ -4273,11 +4322,14 @@ export const SatPerms = { if (!message.id.isZero()) { writer.uint32(8).int64(message.id); } + if (message.userId !== "") { + writer.uint32(18).string(message.userId); + } if (message.rules !== undefined) { SatPerms_Rules.encode(message.rules, writer.uint32(26).fork()).ldelim(); } - if (message.roles !== undefined) { - SatPerms_Roles.encode(message.roles, writer.uint32(34).fork()).ldelim(); + for (const v of message.roles) { + SatPerms_Role.encode(v!, writer.uint32(34).fork()).ldelim(); } return writer; }, @@ -4296,6 +4348,13 @@ export const SatPerms = { message.id = reader.int64() as Long; continue; + case 2: + if (tag !== 18) { + break; + } + + message.userId = reader.string(); + continue; case 3: if (tag !== 26) { break; @@ -4308,7 +4367,7 @@ export const SatPerms = { break; } - message.roles = SatPerms_Roles.decode(reader, reader.uint32()); + message.roles.push(SatPerms_Role.decode(reader, reader.uint32())); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -4326,12 +4385,11 @@ export const SatPerms = { fromPartial, I>>(object: I): SatPerms { const message = createBaseSatPerms(); message.id = (object.id !== undefined && object.id !== null) ? Long.fromValue(object.id) : Long.ZERO; + message.userId = object.userId ?? ""; message.rules = (object.rules !== undefined && object.rules !== null) ? SatPerms_Rules.fromPartial(object.rules) : undefined; - message.roles = (object.roles !== undefined && object.roles !== null) - ? SatPerms_Roles.fromPartial(object.roles) - : undefined; + message.roles = object.roles?.map((e) => SatPerms_Role.fromPartial(e)) || []; return message; }, }; @@ -4513,7 +4571,7 @@ export const SatPerms_Path = { messageTypeRegistry.set(SatPerms_Path.$type, SatPerms_Path); function createBaseSatPerms_Scope(): SatPerms_Scope { - return { $type: "Electric.Satellite.SatPerms.Scope", table: undefined, id: "" }; + return { $type: "Electric.Satellite.SatPerms.Scope", table: undefined, id: [] }; } export const SatPerms_Scope = { @@ -4523,8 +4581,8 @@ export const SatPerms_Scope = { if (message.table !== undefined) { SatPerms_Table.encode(message.table, writer.uint32(10).fork()).ldelim(); } - if (message.id !== "") { - writer.uint32(18).string(message.id); + for (const v of message.id) { + writer.uint32(18).string(v!); } return writer; }, @@ -4548,7 +4606,7 @@ export const SatPerms_Scope = { break; } - message.id = reader.string(); + message.id.push(reader.string()); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -4568,7 +4626,7 @@ export const SatPerms_Scope = { message.table = (object.table !== undefined && object.table !== null) ? SatPerms_Table.fromPartial(object.table) : undefined; - message.id = object.id ?? ""; + message.id = object.id?.map((e) => e) || []; return message; }, }; @@ -4636,14 +4694,64 @@ export const SatPerms_RoleName = { messageTypeRegistry.set(SatPerms_RoleName.$type, SatPerms_RoleName); +function createBaseSatPerms_ColumnList(): SatPerms_ColumnList { + return { $type: "Electric.Satellite.SatPerms.ColumnList", names: [] }; +} + +export const SatPerms_ColumnList = { + $type: "Electric.Satellite.SatPerms.ColumnList" as const, + + encode(message: SatPerms_ColumnList, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.names) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SatPerms_ColumnList { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSatPerms_ColumnList(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 5: + if (tag !== 42) { + break; + } + + message.names.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + create, I>>(base?: I): SatPerms_ColumnList { + return SatPerms_ColumnList.fromPartial(base ?? {}); + }, + + fromPartial, I>>(object: I): SatPerms_ColumnList { + const message = createBaseSatPerms_ColumnList(); + message.names = object.names?.map((e) => e) || []; + return message; + }, +}; + +messageTypeRegistry.set(SatPerms_ColumnList.$type, SatPerms_ColumnList); + function createBaseSatPerms_Grant(): SatPerms_Grant { return { $type: "Electric.Satellite.SatPerms.Grant", id: "", table: undefined, role: undefined, - privileges: [], - columns: [], + privilege: 0, + columns: undefined, scope: undefined, path: undefined, check: undefined, @@ -4663,13 +4771,11 @@ export const SatPerms_Grant = { if (message.role !== undefined) { SatPerms_RoleName.encode(message.role, writer.uint32(26).fork()).ldelim(); } - writer.uint32(34).fork(); - for (const v of message.privileges) { - writer.int32(v); + if (message.privilege !== 0) { + writer.uint32(32).int32(message.privilege); } - writer.ldelim(); - for (const v of message.columns) { - writer.uint32(42).string(v!); + if (message.columns !== undefined) { + SatPerms_ColumnList.encode(message.columns, writer.uint32(42).fork()).ldelim(); } if (message.scope !== undefined) { SatPerms_Table.encode(message.scope, writer.uint32(50).fork()).ldelim(); @@ -4712,28 +4818,18 @@ export const SatPerms_Grant = { message.role = SatPerms_RoleName.decode(reader, reader.uint32()); continue; case 4: - if (tag === 32) { - message.privileges.push(reader.int32() as any); - - continue; - } - - if (tag === 34) { - const end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) { - message.privileges.push(reader.int32() as any); - } - - continue; + if (tag !== 32) { + break; } - break; + message.privilege = reader.int32() as any; + continue; case 5: if (tag !== 42) { break; } - message.columns.push(reader.string()); + message.columns = SatPerms_ColumnList.decode(reader, reader.uint32()); continue; case 6: if (tag !== 50) { @@ -4778,8 +4874,10 @@ export const SatPerms_Grant = { message.role = (object.role !== undefined && object.role !== null) ? SatPerms_RoleName.fromPartial(object.role) : undefined; - message.privileges = object.privileges?.map((e) => e) || []; - message.columns = object.columns?.map((e) => e) || []; + message.privilege = object.privilege ?? 0; + message.columns = (object.columns !== undefined && object.columns !== null) + ? SatPerms_ColumnList.fromPartial(object.columns) + : undefined; message.scope = (object.scope !== undefined && object.scope !== null) ? SatPerms_Table.fromPartial(object.scope) : undefined; @@ -4793,6 +4891,127 @@ export const SatPerms_Grant = { messageTypeRegistry.set(SatPerms_Grant.$type, SatPerms_Grant); +function createBaseSatPerms_Revoke(): SatPerms_Revoke { + return { + $type: "Electric.Satellite.SatPerms.Revoke", + id: "", + table: undefined, + role: undefined, + privilege: 0, + scope: undefined, + path: undefined, + }; +} + +export const SatPerms_Revoke = { + $type: "Electric.Satellite.SatPerms.Revoke" as const, + + encode(message: SatPerms_Revoke, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.table !== undefined) { + SatPerms_Table.encode(message.table, writer.uint32(18).fork()).ldelim(); + } + if (message.role !== undefined) { + SatPerms_RoleName.encode(message.role, writer.uint32(26).fork()).ldelim(); + } + if (message.privilege !== 0) { + writer.uint32(32).int32(message.privilege); + } + if (message.scope !== undefined) { + SatPerms_Table.encode(message.scope, writer.uint32(50).fork()).ldelim(); + } + if (message.path !== undefined) { + SatPerms_Path.encode(message.path, writer.uint32(58).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SatPerms_Revoke { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSatPerms_Revoke(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.table = SatPerms_Table.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.role = SatPerms_RoleName.decode(reader, reader.uint32()); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.privilege = reader.int32() as any; + continue; + case 6: + if (tag !== 50) { + break; + } + + message.scope = SatPerms_Table.decode(reader, reader.uint32()); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.path = SatPerms_Path.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + create, I>>(base?: I): SatPerms_Revoke { + return SatPerms_Revoke.fromPartial(base ?? {}); + }, + + fromPartial, I>>(object: I): SatPerms_Revoke { + const message = createBaseSatPerms_Revoke(); + message.id = object.id ?? ""; + message.table = (object.table !== undefined && object.table !== null) + ? SatPerms_Table.fromPartial(object.table) + : undefined; + message.role = (object.role !== undefined && object.role !== null) + ? SatPerms_RoleName.fromPartial(object.role) + : undefined; + message.privilege = object.privilege ?? 0; + message.scope = (object.scope !== undefined && object.scope !== null) + ? SatPerms_Table.fromPartial(object.scope) + : undefined; + message.path = (object.path !== undefined && object.path !== null) + ? SatPerms_Path.fromPartial(object.path) + : undefined; + return message; + }, +}; + +messageTypeRegistry.set(SatPerms_Revoke.$type, SatPerms_Revoke); + function createBaseSatPerms_Assign(): SatPerms_Assign { return { $type: "Electric.Satellite.SatPerms.Assign", @@ -4922,16 +5141,183 @@ export const SatPerms_Assign = { messageTypeRegistry.set(SatPerms_Assign.$type, SatPerms_Assign); +function createBaseSatPerms_Unassign(): SatPerms_Unassign { + return { + $type: "Electric.Satellite.SatPerms.Unassign", + id: "", + table: undefined, + userColumn: undefined, + roleColumn: undefined, + roleName: undefined, + scope: undefined, + }; +} + +export const SatPerms_Unassign = { + $type: "Electric.Satellite.SatPerms.Unassign" as const, + + encode(message: SatPerms_Unassign, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.table !== undefined) { + SatPerms_Table.encode(message.table, writer.uint32(18).fork()).ldelim(); + } + if (message.userColumn !== undefined) { + writer.uint32(26).string(message.userColumn); + } + if (message.roleColumn !== undefined) { + writer.uint32(34).string(message.roleColumn); + } + if (message.roleName !== undefined) { + writer.uint32(42).string(message.roleName); + } + if (message.scope !== undefined) { + SatPerms_Table.encode(message.scope, writer.uint32(50).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SatPerms_Unassign { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSatPerms_Unassign(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.table = SatPerms_Table.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.userColumn = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.roleColumn = reader.string(); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.roleName = reader.string(); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.scope = SatPerms_Table.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + create, I>>(base?: I): SatPerms_Unassign { + return SatPerms_Unassign.fromPartial(base ?? {}); + }, + + fromPartial, I>>(object: I): SatPerms_Unassign { + const message = createBaseSatPerms_Unassign(); + message.id = object.id ?? ""; + message.table = (object.table !== undefined && object.table !== null) + ? SatPerms_Table.fromPartial(object.table) + : undefined; + message.userColumn = object.userColumn ?? undefined; + message.roleColumn = object.roleColumn ?? undefined; + message.roleName = object.roleName ?? undefined; + message.scope = (object.scope !== undefined && object.scope !== null) + ? SatPerms_Table.fromPartial(object.scope) + : undefined; + return message; + }, +}; + +messageTypeRegistry.set(SatPerms_Unassign.$type, SatPerms_Unassign); + +function createBaseSatPerms_Sqlite(): SatPerms_Sqlite { + return { $type: "Electric.Satellite.SatPerms.Sqlite", stmt: "" }; +} + +export const SatPerms_Sqlite = { + $type: "Electric.Satellite.SatPerms.Sqlite" as const, + + encode(message: SatPerms_Sqlite, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.stmt !== "") { + writer.uint32(10).string(message.stmt); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SatPerms_Sqlite { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSatPerms_Sqlite(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.stmt = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + create, I>>(base?: I): SatPerms_Sqlite { + return SatPerms_Sqlite.fromPartial(base ?? {}); + }, + + fromPartial, I>>(object: I): SatPerms_Sqlite { + const message = createBaseSatPerms_Sqlite(); + message.stmt = object.stmt ?? ""; + return message; + }, +}; + +messageTypeRegistry.set(SatPerms_Sqlite.$type, SatPerms_Sqlite); + function createBaseSatPerms_Role(): SatPerms_Role { - return { $type: "Electric.Satellite.SatPerms.Role", id: "", role: "", userId: "", assignId: "", scope: undefined }; + return { $type: "Electric.Satellite.SatPerms.Role", rowId: [], role: "", userId: "", assignId: "", scope: undefined }; } export const SatPerms_Role = { $type: "Electric.Satellite.SatPerms.Role" as const, encode(message: SatPerms_Role, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.id !== "") { - writer.uint32(10).string(message.id); + for (const v of message.rowId) { + writer.uint32(10).string(v!); } if (message.role !== "") { writer.uint32(18).string(message.role); @@ -4960,7 +5346,7 @@ export const SatPerms_Role = { break; } - message.id = reader.string(); + message.rowId.push(reader.string()); continue; case 2: if (tag !== 18) { @@ -5005,7 +5391,7 @@ export const SatPerms_Role = { fromPartial, I>>(object: I): SatPerms_Role { const message = createBaseSatPerms_Role(); - message.id = object.id ?? ""; + message.rowId = object.rowId?.map((e) => e) || []; message.role = object.role ?? ""; message.userId = object.userId ?? ""; message.assignId = object.assignId ?? ""; @@ -5018,19 +5404,119 @@ export const SatPerms_Role = { messageTypeRegistry.set(SatPerms_Role.$type, SatPerms_Role); +function createBaseSatPerms_DDLX(): SatPerms_DDLX { + return { $type: "Electric.Satellite.SatPerms.DDLX", grants: [], revokes: [], assigns: [], unassigns: [], sqlite: [] }; +} + +export const SatPerms_DDLX = { + $type: "Electric.Satellite.SatPerms.DDLX" as const, + + encode(message: SatPerms_DDLX, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.grants) { + SatPerms_Grant.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.revokes) { + SatPerms_Revoke.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.assigns) { + SatPerms_Assign.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.unassigns) { + SatPerms_Unassign.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.sqlite) { + SatPerms_Sqlite.encode(v!, writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SatPerms_DDLX { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSatPerms_DDLX(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.grants.push(SatPerms_Grant.decode(reader, reader.uint32())); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.revokes.push(SatPerms_Revoke.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.assigns.push(SatPerms_Assign.decode(reader, reader.uint32())); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.unassigns.push(SatPerms_Unassign.decode(reader, reader.uint32())); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.sqlite.push(SatPerms_Sqlite.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + create, I>>(base?: I): SatPerms_DDLX { + return SatPerms_DDLX.fromPartial(base ?? {}); + }, + + fromPartial, I>>(object: I): SatPerms_DDLX { + const message = createBaseSatPerms_DDLX(); + message.grants = object.grants?.map((e) => SatPerms_Grant.fromPartial(e)) || []; + message.revokes = object.revokes?.map((e) => SatPerms_Revoke.fromPartial(e)) || []; + message.assigns = object.assigns?.map((e) => SatPerms_Assign.fromPartial(e)) || []; + message.unassigns = object.unassigns?.map((e) => SatPerms_Unassign.fromPartial(e)) || []; + message.sqlite = object.sqlite?.map((e) => SatPerms_Sqlite.fromPartial(e)) || []; + return message; + }, +}; + +messageTypeRegistry.set(SatPerms_DDLX.$type, SatPerms_DDLX); + function createBaseSatPerms_Rules(): SatPerms_Rules { - return { $type: "Electric.Satellite.SatPerms.Rules", grants: [], assigns: [] }; + return { $type: "Electric.Satellite.SatPerms.Rules", id: Long.UZERO, parentId: undefined, grants: [], assigns: [] }; } export const SatPerms_Rules = { $type: "Electric.Satellite.SatPerms.Rules" as const, encode(message: SatPerms_Rules, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.id.isZero()) { + writer.uint32(8).uint64(message.id); + } + if (message.parentId !== undefined) { + writer.uint32(16).uint64(message.parentId); + } for (const v of message.grants) { - SatPerms_Grant.encode(v!, writer.uint32(10).fork()).ldelim(); + SatPerms_Grant.encode(v!, writer.uint32(26).fork()).ldelim(); } for (const v of message.assigns) { - SatPerms_Assign.encode(v!, writer.uint32(18).fork()).ldelim(); + SatPerms_Assign.encode(v!, writer.uint32(34).fork()).ldelim(); } return writer; }, @@ -5043,14 +5529,28 @@ export const SatPerms_Rules = { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - if (tag !== 10) { + if (tag !== 8) { break; } - message.grants.push(SatPerms_Grant.decode(reader, reader.uint32())); + message.id = reader.uint64() as Long; continue; case 2: - if (tag !== 18) { + if (tag !== 16) { + break; + } + + message.parentId = reader.uint64() as Long; + continue; + case 3: + if (tag !== 26) { + break; + } + + message.grants.push(SatPerms_Grant.decode(reader, reader.uint32())); + continue; + case 4: + if (tag !== 34) { break; } @@ -5071,6 +5571,10 @@ export const SatPerms_Rules = { fromPartial, I>>(object: I): SatPerms_Rules { const message = createBaseSatPerms_Rules(); + message.id = (object.id !== undefined && object.id !== null) ? Long.fromValue(object.id) : Long.UZERO; + message.parentId = (object.parentId !== undefined && object.parentId !== null) + ? Long.fromValue(object.parentId) + : undefined; message.grants = object.grants?.map((e) => SatPerms_Grant.fromPartial(e)) || []; message.assigns = object.assigns?.map((e) => SatPerms_Assign.fromPartial(e)) || []; return message; @@ -5080,15 +5584,30 @@ export const SatPerms_Rules = { messageTypeRegistry.set(SatPerms_Rules.$type, SatPerms_Rules); function createBaseSatPerms_Roles(): SatPerms_Roles { - return { $type: "Electric.Satellite.SatPerms.Roles", roles: [] }; + return { + $type: "Electric.Satellite.SatPerms.Roles", + id: Long.UZERO, + parentId: undefined, + rulesId: Long.UZERO, + roles: [], + }; } export const SatPerms_Roles = { $type: "Electric.Satellite.SatPerms.Roles" as const, encode(message: SatPerms_Roles, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.id.isZero()) { + writer.uint32(8).uint64(message.id); + } + if (message.parentId !== undefined) { + writer.uint32(16).uint64(message.parentId); + } + if (!message.rulesId.isZero()) { + writer.uint32(24).uint64(message.rulesId); + } for (const v of message.roles) { - SatPerms_Role.encode(v!, writer.uint32(18).fork()).ldelim(); + SatPerms_Role.encode(v!, writer.uint32(34).fork()).ldelim(); } return writer; }, @@ -5100,8 +5619,29 @@ export const SatPerms_Roles = { while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.id = reader.uint64() as Long; + continue; case 2: - if (tag !== 18) { + if (tag !== 16) { + break; + } + + message.parentId = reader.uint64() as Long; + continue; + case 3: + if (tag !== 24) { + break; + } + + message.rulesId = reader.uint64() as Long; + continue; + case 4: + if (tag !== 34) { break; } @@ -5122,6 +5662,13 @@ export const SatPerms_Roles = { fromPartial, I>>(object: I): SatPerms_Roles { const message = createBaseSatPerms_Roles(); + message.id = (object.id !== undefined && object.id !== null) ? Long.fromValue(object.id) : Long.UZERO; + message.parentId = (object.parentId !== undefined && object.parentId !== null) + ? Long.fromValue(object.parentId) + : undefined; + message.rulesId = (object.rulesId !== undefined && object.rulesId !== null) + ? Long.fromValue(object.rulesId) + : Long.UZERO; message.roles = object.roles?.map((e) => SatPerms_Role.fromPartial(e)) || []; return message; }, diff --git a/components/electric/config/runtime.exs b/components/electric/config/runtime.exs index ba059876c2..01ff310c99 100644 --- a/components/electric/config/runtime.exs +++ b/components/electric/config/runtime.exs @@ -200,7 +200,8 @@ config :electric, Electric.Features, proxy_ddlx_grant: false, proxy_ddlx_revoke: false, proxy_ddlx_assign: false, - proxy_ddlx_unassign: false + proxy_ddlx_unassign: false, + proxy_ddlx_sqlite: false {:ok, conn_config} = database_url_config diff --git a/components/electric/lib/electric/ddlx/command.ex b/components/electric/lib/electric/ddlx/command.ex index 3d1b48e803..de7adf247e 100644 --- a/components/electric/lib/electric/ddlx/command.ex +++ b/components/electric/lib/electric/ddlx/command.ex @@ -1,31 +1,240 @@ -defprotocol Electric.DDLX.Command do - @spec pg_sql(t()) :: [String.t()] - def pg_sql(command) +defprotocol Electric.DDLX.Command.PgSQL do + @spec to_sql(t()) :: [String.t()] + def to_sql(cmd) +end + +alias Electric.Satellite.SatPerms + +defmodule Electric.DDLX.Command do + alias Electric.DDLX + alias Electric.DDLX.Command.PgSQL + + defstruct [:action, :stmt, :tag, tables: []] + + @type t() :: %__MODULE__{ + action: struct(), + stmt: String.t(), + tag: String.t(), + tables: [Electric.Postgres.relation()] + } + + def tag(%__MODULE__{tag: tag}) do + tag + end + + @perms_with_ids [:assigns, :unassigns, :grants, :revokes] + @perms_without_ids [:sqlite] + + def ddlx(cmds) do + ddlx = + Enum.reduce(@perms_with_ids, %SatPerms.DDLX{}, fn type, ddlx -> + Map.update!(ddlx, type, fn [] -> + cmds + |> Keyword.get(type, []) + |> Enum.map(&put_id/1) + end) + end) - @spec table_name(t()) :: String.t() | {String.t(), String.t()} - def table_name(command) + Enum.reduce(@perms_without_ids, ddlx, &Map.put(&2, &1, Keyword.get(cmds, &1, []))) + end + + def put_id(%{id: id} = cmd) when is_struct(cmd) and id in ["", nil] do + Map.put(cmd, :id, command_id(cmd)) + end + + def put_id(cmd) when is_struct(cmd) do + cmd + end - @spec tag(t()) :: String.t() - def tag(command) + def pg_sql(cmd) do + PgSQL.to_sql(cmd) + end + + def table_names(%__MODULE__{tables: tables}), do: tables + + def enabled?(%__MODULE__{action: cmd}) do + command_enabled?(cmd) + end + + def electric_enable({_, _} = table) do + table_name = Electric.Utils.inspect_relation(table) + + %__MODULE__{ + action: %DDLX.Command.Enable{table_name: table_name}, + stmt: "CALL electric.electrify('#{table_name}');", + tag: "ELECTRIC ENABLE", + tables: [table] + } + end + + # shortcut the enable command, which has to be enabled + defp command_enabled?(%DDLX.Command.Enable{}), do: true + defp command_enabled?(%DDLX.Command.Disable{}), do: false + + defp command_enabled?(%SatPerms.DDLX{} = ddlx) do + ddlx + |> command_list() + |> Enum.map(&feature_flag/1) + |> Enum.all?(&Electric.Features.enabled?/1) + end + + def command_list(%SatPerms.DDLX{} = ddlx) do + Stream.concat([ddlx.grants, ddlx.revokes, ddlx.assigns, ddlx.unassigns]) + end + + @feature_flags %{ + SatPerms.Grant => :proxy_ddlx_grant, + SatPerms.Revoke => :proxy_ddlx_revoke, + SatPerms.Assign => :proxy_ddlx_assign, + SatPerms.Unassign => :proxy_ddlx_unassign, + SatPerms.Sqlite => :proxy_ddlx_sqlite + } + + # either we have a specific flag for the command or we fallback to the + # default setting for the features module, which is `false` + defp feature_flag(%cmd{}) do + @feature_flags[cmd] || Electric.Features.default_key() + end + + def command_id(%SatPerms.Grant{} = grant) do + hash([ + grant.table, + grant.role, + grant.scope, + grant.privilege + ]) + end + + def command_id(%SatPerms.Revoke{} = revoke) do + hash([ + revoke.table, + revoke.role, + revoke.scope, + revoke.privilege + ]) + end + + def command_id(%SatPerms.Assign{} = assign) do + hash([ + assign.table, + assign.user_column, + assign.role_column, + assign.role_name, + assign.scope + ]) + end - @spec to_protobuf(t()) :: [Electric.Satellite.Protobuf.perms_msg()] | [] - def to_protobuf(command) + def command_id(%SatPerms.Unassign{} = unassign) do + hash([ + unassign.table, + unassign.user_column, + unassign.role_column, + unassign.role_name, + unassign.scope + ]) + end + + def command_id(%SatPerms.Sqlite{} = sqlite) do + hash([ + sqlite.stmt + ]) + end + + # hash the given terms in the struct together. `SHA1` is chosen because it is smaller in terms + # of bytes, rather than for any cryptographic reason. Since the hash/id is used in the naming of + # triggers and tables within pg, a bigger hash, such as `SHA256`, would use too many of the 64 + # available bytes for these pg objects. This is the same reason to use encode32 rather than + # encode16 -- it just eats fewer of the available characters. + defp hash(terms) do + terms + |> Enum.map(&fingerprint/1) + |> Enum.intersperse("\n") + |> then(&:crypto.hash(:sha, &1)) + |> Base.encode32(case: :lower, padding: false) + end + + defp fingerprint(nil) do + <<0>> + end + + defp fingerprint(string) when is_binary(string) do + string + end + + defp fingerprint(%SatPerms.Table{} = table) do + [table.schema, ".", table.name] + end + + defp fingerprint(%SatPerms.RoleName{role: {:predefined, :AUTHENTICATED}}) do + "__electric__.__authenticated__" + end + + defp fingerprint(%SatPerms.RoleName{role: {:predefined, :ANYONE}}) do + "__electric__.__anyone__" + end + + defp fingerprint(%SatPerms.RoleName{role: {:application, role}}) do + role + end + + defp fingerprint(priv) when priv in [:SELECT, :INSERT, :UPDATE, :DELETE] do + to_string(priv) + end + + defimpl Electric.DDLX.Command.PgSQL do + def to_sql(%Electric.DDLX.Command{action: action}) do + PgSQL.to_sql(action) + end + end end -defimpl Electric.DDLX.Command, for: List do - def pg_sql(commands) do - Enum.flat_map(commands, &Electric.DDLX.Command.pg_sql/1) +defimpl Electric.DDLX.Command.PgSQL, for: SatPerms.DDLX do + alias Electric.Postgres.Extension + + def to_sql(%SatPerms.DDLX{} = ddlx) do + Enum.concat([ + serialise_ddlx(ddlx), + ddlx + |> Electric.DDLX.Command.command_list() + |> Enum.flat_map(&Electric.DDLX.Command.PgSQL.to_sql/1) + ]) end - def table_name([cmd]) do - Electric.DDLX.Command.table_name(cmd) + defp serialise_ddlx(ddlx) do + encoded = Protox.encode!(ddlx) |> IO.iodata_to_binary() |> Base.encode16() + + [ + "INSERT INTO #{Extension.ddlx_table()} (ddlx) VALUES ('\\x#{encoded}'::bytea);" + ] end +end - def tag([cmd | _commands]) do - Electric.DDLX.Command.tag(cmd) +defimpl Electric.DDLX.Command.PgSQL, for: SatPerms.Grant do + def to_sql(%SatPerms.Grant{} = _grant) do + [] end +end + +defimpl Electric.DDLX.Command.PgSQL, for: SatPerms.Revoke do + def to_sql(%SatPerms.Revoke{} = _revoke) do + [] + end +end + +defimpl Electric.DDLX.Command.PgSQL, for: SatPerms.Assign do + def to_sql(%SatPerms.Assign{} = _assign) do + [] + end +end + +defimpl Electric.DDLX.Command.PgSQL, for: SatPerms.Unassign do + def to_sql(%SatPerms.Unassign{} = _unassign) do + [] + end +end - def to_protobuf(cmds) do - Enum.flat_map(cmds, &Electric.DDLX.Command.to_protobuf/1) +defimpl Electric.DDLX.Command.PgSQL, for: SatPerms.Sqlite do + def to_sql(%SatPerms.Sqlite{} = _sqlite) do + [] end end diff --git a/components/electric/lib/electric/ddlx/command/assign.ex b/components/electric/lib/electric/ddlx/command/assign.ex index eb28c11bb1..ad2fa6102e 100644 --- a/components/electric/lib/electric/ddlx/command/assign.ex +++ b/components/electric/lib/electric/ddlx/command/assign.ex @@ -1,27 +1,10 @@ defmodule Electric.DDLX.Command.Assign do alias Electric.DDLX.Command + alias Electric.Satellite.SatPerms import Electric.DDLX.Parser.Build - @type t() :: %__MODULE__{ - table_name: String.t(), - user_column: String.t(), - scope: String.t(), - role_name: String.t(), - role_column: String.t(), - if_statement: String.t() - } - - defstruct [ - :table_name, - :user_column, - :scope, - :role_name, - :role_column, - :if_statement - ] - - def build(params, opts) do + def build(params, opts, ddlx) do with {:ok, user_table_schema} <- fetch_attr(params, :user_table_schema, default_schema(opts)), {:ok, user_table_name} <- fetch_attr(params, :user_table_name), {:ok, user_column} <- fetch_attr(params, :user_table_column), @@ -37,59 +20,25 @@ defmodule Electric.DDLX.Command.Assign do attrs = Enum.reduce([scope_attrs, user_attrs, role_attrs], [], &Keyword.merge/2) - {:ok, struct(__MODULE__, attrs)} - end - end - - defimpl Command do - alias Electric.Satellite.SatPerms, as: P - - import Electric.DDLX.Command.Common - - def pg_sql(assign) do - [ - """ - CALL electric.assign( - assign_table_full_name => #{sql_repr(assign.table_name)}, - scope => #{sql_repr(assign.scope)}, - user_column_name => #{sql_repr(assign.user_column)}, - role_name_string => #{sql_repr(assign.role_name)}, - role_column_name => #{sql_repr(assign.role_column)}, - if_fn => #{sql_repr(assign.if_statement)} - ); - """ - ] - end - - def table_name(%{table_name: table_name}) do - table_name - end - - def tag(_a), do: "ELECTRIC ASSIGN" - - def to_protobuf(assign) do - %{table_name: {table_schema, table_name}} = assign - - scope = - case assign do - %{scope: {scope_schema, scope_name}} -> - %P.Table{schema: scope_schema, name: scope_name} - - %{scope: nil} -> - nil - end - - [ - %P.Assign{ - # id: assign.id, - table: %P.Table{schema: table_schema, name: table_name}, - user_column: assign.user_column, - role_column: assign.role_column, - role_name: assign.role_name, - scope: scope, - if: assign.if_statement - } - ] + {:ok, + %Command{ + action: + Command.ddlx( + assigns: [ + %SatPerms.Assign{ + table: protobuf_table(attrs[:table_name]), + user_column: attrs[:user_column], + role_column: attrs[:role_column], + role_name: attrs[:role_name], + scope: protobuf_scope(attrs[:scope]), + if: attrs[:if_statement] + } + ] + ), + stmt: ddlx, + tables: [attrs[:table_name]], + tag: "ELECTRIC ASSIGN" + }} end end end diff --git a/components/electric/lib/electric/ddlx/command/common.ex b/components/electric/lib/electric/ddlx/command/common.ex index e7cc84d27e..0371e9b319 100644 --- a/components/electric/lib/electric/ddlx/command/common.ex +++ b/components/electric/lib/electric/ddlx/command/common.ex @@ -1,6 +1,8 @@ defmodule Electric.DDLX.Command.Common do + alias Electric.Satellite.SatPerms + def sql_repr(nil) do - "null" + "NULL" end def sql_repr(value) when is_binary(value) do @@ -24,6 +26,10 @@ defmodule Electric.DDLX.Command.Common do ~s['"#{schema}"."#{table}"'] end + def sql_repr(%SatPerms.Table{schema: schema, name: name}) do + sql_repr({schema, name}) + end + defp escape_quotes(value) do :binary.replace(value, "'", "''", [:global]) end diff --git a/components/electric/lib/electric/ddlx/command/disable.ex b/components/electric/lib/electric/ddlx/command/disable.ex index 5a692007c6..0b9e816dab 100644 --- a/components/electric/lib/electric/ddlx/command/disable.ex +++ b/components/electric/lib/electric/ddlx/command/disable.ex @@ -15,30 +15,30 @@ defmodule Electric.DDLX.Command.Disable do defstruct @keys - def build(params, opts) do + def build(params, opts, ddlx) do with {:ok, table_schema} <- fetch_attr(params, :table_schema, default_schema(opts)), {:ok, table_name} <- fetch_attr(params, :table_name) do {:ok, struct(__MODULE__, table_name: {table_schema, table_name})} + + {:ok, + %Command{ + action: struct(__MODULE__, table_name: {table_schema, table_name}), + stmt: ddlx, + tables: [{table_schema, table_name}], + tag: "ELECTRIC DISABLE" + }} end end - defimpl Command do + defimpl Command.PgSQL do import Electric.DDLX.Command.Common - def pg_sql(disable) do + def to_sql(disable) do [ """ CALL electric.disable(#{sql_repr(disable.table_name)}); """ ] end - - def table_name(%{table_name: table_name}) do - table_name - end - - def tag(_a), do: "ELECTRIC DISABLE" - - def to_protobuf(_), do: [] end end diff --git a/components/electric/lib/electric/ddlx/command/enable.ex b/components/electric/lib/electric/ddlx/command/enable.ex index d5cb86dea0..40d458ff27 100644 --- a/components/electric/lib/electric/ddlx/command/enable.ex +++ b/components/electric/lib/electric/ddlx/command/enable.ex @@ -15,30 +15,28 @@ defmodule Electric.DDLX.Command.Enable do defstruct @keys - def build(params, opts) do + def build(params, opts, ddlx) do with {:ok, table_schema} <- fetch_attr(params, :table_schema, default_schema(opts)), {:ok, table_name} <- fetch_attr(params, :table_name) do - {:ok, struct(__MODULE__, table_name: {table_schema, table_name})} + {:ok, + %Command{ + action: struct(__MODULE__, table_name: {table_schema, table_name}), + stmt: ddlx, + tables: [{table_schema, table_name}], + tag: "ELECTRIC ENABLE" + }} end end - defimpl Command do + defimpl Command.PgSQL do import Electric.DDLX.Command.Common - def pg_sql(enable) do + def to_sql(enable) do [ """ CALL electric.enable(#{sql_repr(enable.table_name)}); """ ] end - - def table_name(%{table_name: table_name}) do - table_name - end - - def tag(_a), do: "ELECTRIC ENABLE" - - def to_protobuf(_), do: [] end end diff --git a/components/electric/lib/electric/ddlx/command/error.ex b/components/electric/lib/electric/ddlx/command/error.ex index bc74feb2a0..e74a2ee20f 100644 --- a/components/electric/lib/electric/ddlx/command/error.ex +++ b/components/electric/lib/electric/ddlx/command/error.ex @@ -8,17 +8,15 @@ defmodule Electric.DDLX.Command.Error do message: String.t() } - @keys [ + @enforce_keys [:sql, :message] + + defstruct [ :sql, - :line, - :position, - :message + :message, + line: 0, + position: 0 ] - @enforce_keys @keys - - defstruct @keys - @behaviour Exception @impl Exception @@ -36,17 +34,9 @@ defmodule Electric.DDLX.Command.Error do message end - defimpl Command do - def pg_sql(_) do + defimpl Command.PgSQL do + def to_sql(_) do [] end - - def table_name(_) do - "" - end - - def tag(_), do: "ELECTRIC ERROR" - - def to_protobuf(_), do: [] end end diff --git a/components/electric/lib/electric/ddlx/command/grant.ex b/components/electric/lib/electric/ddlx/command/grant.ex index c9273a1df4..aadb82f9b7 100644 --- a/components/electric/lib/electric/ddlx/command/grant.ex +++ b/components/electric/lib/electric/ddlx/command/grant.ex @@ -1,150 +1,38 @@ defmodule Electric.DDLX.Command.Grant do alias Electric.DDLX.Command - alias Electric.Satellite.SatPerms, as: P + alias Electric.Satellite.SatPerms - import Electric.DDLX.Parser.Build, except: [validate_scope_information: 2] + import Electric.DDLX.Parser.Build - @type t() :: %__MODULE__{ - privileges: [String.t()], - on_table: String.t(), - role: String.t(), - column_names: [String.t()], - scope: String.t(), - using_path: String.t(), - check_fn: String.t() - } - - @keys [ - :privileges, - :on_table, - :role, - :column_names, - :scope, - :using_path, - :check_fn - ] - - @enforce_keys @keys - - defstruct @keys - - def build(params, opts) do + def build(params, opts, ddlx) do with {:ok, table_schema} <- fetch_attr(params, :table_schema, default_schema(opts)), {:ok, table_name} <- fetch_attr(params, :table_name), - {:ok, column_names} <- fetch_attr(params, :column_names, ["*"]), - {:ok, role_attrs} <- validate_scope_information(params, opts), + {:ok, columns} <- protobuf_columns(Keyword.get(params, :column_names, nil), ddlx), + {:ok, scope} <- validate_scope_information(params, opts), + {:ok, role_name} <- fetch_attr(params, :role_name), {:ok, privileges} <- fetch_attr(params, :privilege), {:ok, using_path} <- fetch_attr(params, :using, nil), {:ok, check_fn} <- fetch_attr(params, :check, nil) do - {role, role_attrs} = Keyword.pop!(role_attrs, :role_name) - scope = Keyword.get(role_attrs, :scope, nil) || "__global__" + grants = + for privilege <- protobuf_privs(privileges) do + %SatPerms.Grant{ + table: protobuf_table(table_schema, table_name), + columns: columns, + role: protobuf_role(role_name), + scope: protobuf_scope(scope[:scope]), + privilege: privilege, + path: using_path, + check: check_fn + } + end {:ok, - struct( - __MODULE__, - on_table: {table_schema, table_name}, - column_names: column_names, - role: role, - scope: scope, - privileges: privileges, - using_path: using_path, - check_fn: check_fn - )} - end - end - - defp validate_scope_information(params, opts) do - with {:ok, role_name} <- fetch_attr(params, :role_name), - {:ok, attrs} <- split_role_def(role_name, opts), - attrs = maybe_add_scope(attrs, params, opts) do - {:ok, attrs} - end - end - - defp maybe_add_scope(attrs, params, opts) do - if !attrs[:scope] do - with {:ok, scope_table_name} <- fetch_attr(params, :scope_table_name) do - {:ok, scope_schema_name} = - fetch_attr(params, :scope_schema_name, default_schema(opts)) - - Keyword.put(attrs, :scope, {scope_schema_name, scope_table_name}) - else - _ -> attrs - end - else - attrs - end - end - - defimpl Command do - import Electric.DDLX.Command.Common - - def pg_sql(grant) do - for privilege <- grant.privileges do - """ - CALL electric.grant( - privilege_name => #{sql_repr(privilege)}, - on_table_name => #{sql_repr(grant.on_table)}, - role_name => #{sql_repr(grant.role)}, - columns => #{sql_repr(grant.column_names)}, - scope_name => #{sql_repr(grant.scope)}, - using_path => #{sql_repr(grant.using_path)}, - check_fn => #{sql_repr(grant.check_fn)}); - """ - end - end - - def table_name(%{on_table: table_name}) do - table_name - end - - def tag(_a), do: "ELECTRIC GRANT" - - def to_protobuf(grant) do - %{on_table: {schema, name}} = grant - - [ - %P.Grant{ - table: %P.Table{schema: schema, name: name}, - role: pb_role(grant.role), - privileges: pb_privs(grant.privileges), - columns: grant.column_names, - scope: scope(grant), - path: grant.using_path, - check: grant.check_fn - } - ] - end - - defp pb_role("__electric__.__authenticated__") do - %P.RoleName{role: {:predefined, :AUTHENTICATED}} - end - - defp pb_role("__electric__.__anyone__") do - %P.RoleName{role: {:predefined, :ANYONE}} - end - - defp pb_role(role) when is_binary(role) do - %P.RoleName{role: {:application, role}} - end - - defp scope(%{scope: {ss, sn}}) do - %P.Table{schema: ss, name: sn} - end - - defp scope(_), do: nil - - defp pb_privs(privs) do - Enum.map(privs, &priv_to_pb/1) - end - - defp priv_to_pb(p) do - case p do - "update" -> :UPDATE - "select" -> :SELECT - "delete" -> :DELETE - "insert" -> :INSERT - end + %Command{ + action: Command.ddlx(grants: grants), + stmt: ddlx, + tables: [{table_schema, table_name}], + tag: "ELECTRIC GRANT" + }} end end end diff --git a/components/electric/lib/electric/ddlx/command/revoke.ex b/components/electric/lib/electric/ddlx/command/revoke.ex index 801fb0e0d0..90a7b6135e 100644 --- a/components/electric/lib/electric/ddlx/command/revoke.ex +++ b/components/electric/lib/electric/ddlx/command/revoke.ex @@ -1,79 +1,33 @@ defmodule Electric.DDLX.Command.Revoke do alias Electric.DDLX.Command + alias Electric.Satellite.SatPerms - import Electric.DDLX.Parser.Build, except: [validate_scope_information: 2] + import Electric.DDLX.Parser.Build - @type t() :: %__MODULE__{ - privileges: [String.t()], - on_table: String.t(), - role: String.t(), - column_names: [String.t()], - scope: String.t() - } - - @keys [ - :privileges, - :on_table, - :role, - :column_names, - :scope - ] - - @enforce_keys @keys - - defstruct @keys - - def build(params, opts) do + def build(params, opts, ddlx) do with {:ok, table_schema} <- fetch_attr(params, :table_schema, default_schema(opts)), {:ok, table_name} <- fetch_attr(params, :table_name), - {:ok, column_names} <- fetch_attr(params, :column_names, ["*"]), - {:ok, role_attrs} <- validate_scope_information(params, opts), + {:ok, _columns} <- protobuf_columns(Keyword.get(params, :column_names, nil), ddlx), + {:ok, scope} <- validate_scope_information(params, opts), + {:ok, role_name} <- fetch_attr(params, :role_name), {:ok, privileges} <- fetch_attr(params, :privilege) do - {role, role_attrs} = Keyword.pop!(role_attrs, :role_name) - scope = Keyword.get(role_attrs, :scope, nil) || "__global__" + revokes = + for privilege <- protobuf_privs(privileges) do + %SatPerms.Revoke{ + table: protobuf_table(table_schema, table_name), + role: protobuf_role(role_name), + scope: protobuf_scope(scope[:scope]), + privilege: privilege + } + end {:ok, - struct( - __MODULE__, - on_table: {table_schema, table_name}, - column_names: column_names, - role: role, - scope: scope, - privileges: privileges - )} + %Command{ + action: Command.ddlx(revokes: revokes), + stmt: ddlx, + tables: [{table_schema, table_name}], + tag: "ELECTRIC REVOKE" + }} end end - - defp validate_scope_information(params, opts) do - with {:ok, role_name} <- fetch_attr(params, :role_name), - {:ok, attrs} <- split_role_def(role_name, opts) do - {:ok, attrs} - end - end - - defimpl Command do - import Electric.DDLX.Command.Common - - def pg_sql(revoke) do - for privilege <- revoke.privileges do - """ - CALL electric.revoke( - #{sql_repr(privilege)}, - #{sql_repr(revoke.on_table)}, - #{sql_repr(revoke.role)}, - #{sql_repr(revoke.column_names)}, - #{sql_repr(revoke.scope)} - ); - """ - end - end - - def table_name(%{on_table: table_name}) do - table_name - end - - def tag(_a), do: "ELECTRIC REVOKE" - - def to_protobuf(_), do: [] - end end diff --git a/components/electric/lib/electric/ddlx/command/sqlite.ex b/components/electric/lib/electric/ddlx/command/sqlite.ex index 1d6d7f23f5..b46ad3175c 100644 --- a/components/electric/lib/electric/ddlx/command/sqlite.ex +++ b/components/electric/lib/electric/ddlx/command/sqlite.ex @@ -1,45 +1,18 @@ defmodule Electric.DDLX.Command.SQLite do alias Electric.DDLX.Command + alias Electric.Satellite.SatPerms import Electric.DDLX.Parser.Build - @type t() :: %__MODULE__{ - sqlite_statement: String.t() - } - - @keys [ - :sqlite_statement - ] - - @enforce_keys @keys - - defstruct @keys - - def build(params, _opts) do + def build(params, _opts, ddlx) do with {:ok, stmt} <- fetch_attr(params, :statement) do - {:ok, %__MODULE__{sqlite_statement: stmt}} + {:ok, + %Command{ + action: Command.ddlx(sqlite: [%SatPerms.Sqlite{stmt: stmt}]), + stmt: ddlx, + tables: [], + tag: "ELECTRIC SQLITE" + }} end end - - defimpl Command do - import Electric.DDLX.Command.Common - - def pg_sql(sqlite) do - [ - """ - CALL electric.sqlite(sql => #{sql_repr(sqlite.sqlite_statement)}); - """ - ] - end - - def table_name(_) do - "" - end - - def tag(_) do - "" - end - - def to_protobuf(_), do: [] - end end diff --git a/components/electric/lib/electric/ddlx/command/unassign.ex b/components/electric/lib/electric/ddlx/command/unassign.ex index a31c1513b4..97e5c7531d 100644 --- a/components/electric/lib/electric/ddlx/command/unassign.ex +++ b/components/electric/lib/electric/ddlx/command/unassign.ex @@ -1,29 +1,10 @@ defmodule Electric.DDLX.Command.Unassign do alias Electric.DDLX.Command + alias Electric.Satellite.SatPerms import Electric.DDLX.Parser.Build - @type t() :: %__MODULE__{ - table_name: String.t(), - user_column: String.t(), - scope: String.t(), - role_name: String.t(), - role_column: String.t() - } - - @keys [ - :table_name, - :user_column, - :scope, - :role_name, - :role_column - ] - - @enforce_keys @keys - - defstruct @keys - - def build(params, opts) do + def build(params, opts, ddlx) do with {:ok, user_table_schema} <- fetch_attr(params, :user_table_schema, default_schema(opts)), {:ok, user_table_name} <- fetch_attr(params, :user_table_name), {:ok, user_column} <- fetch_attr(params, :user_table_column), @@ -37,32 +18,24 @@ defmodule Electric.DDLX.Command.Unassign do attrs = Enum.reduce([scope_attrs, user_attrs, role_attrs], [], &Keyword.merge/2) - {:ok, struct(__MODULE__, attrs)} + {:ok, + %Command{ + action: + Command.ddlx( + unassigns: [ + %SatPerms.Unassign{ + table: protobuf_table(attrs[:table_name]), + user_column: attrs[:user_column], + role_column: attrs[:role_column], + role_name: attrs[:role_name], + scope: protobuf_scope(attrs[:scope]) + } + ] + ), + stmt: ddlx, + tables: [attrs[:table_name]], + tag: "ELECTRIC UNASSIGN" + }} end end - - defimpl Command do - import Electric.DDLX.Command.Common - - def pg_sql(unassign) do - [ - """ - CALL electric.unassign( - assign_table_full_name => #{sql_repr(unassign.table_name)}, - scope => #{sql_repr(unassign.scope)}, - user_column_name => #{sql_repr(unassign.user_column)}, - role_name_string => #{sql_repr(unassign.role_name)}, - role_column_name => #{sql_repr(unassign.role_column)}); - """ - ] - end - - def table_name(%{table_name: table_name}) do - table_name - end - - def tag(_a), do: "ELECTRIC UNASSIGN" - - def to_protobuf(_), do: [] - end end diff --git a/components/electric/lib/electric/ddlx/parser.ex b/components/electric/lib/electric/ddlx/parser.ex index 449024c7b7..5a8edec3c2 100644 --- a/components/electric/lib/electric/ddlx/parser.ex +++ b/components/electric/lib/electric/ddlx/parser.ex @@ -17,8 +17,17 @@ defmodule Electric.DDLX.Parser do :electric_ddlx_parser.parse(tokens) end - defp build_cmd({:ok, {module, attrs}}, _ddlx, opts) do - module.build(attrs, opts) + defp build_cmd({:ok, {module, attrs}}, ddlx, opts) do + case module.build(attrs, opts, ddlx) do + {:ok, command} -> + {:ok, command} + + {:error, message} when is_binary(message) -> + {:error, %Command.Error{message: message, sql: ddlx}} + + {:error, %Command.Error{}} = error -> + error + end end defp build_cmd({:error, {{line, position, _}, :electric_ddlx_parser, messages}}, ddlx, _opts) do diff --git a/components/electric/lib/electric/ddlx/parser/build.ex b/components/electric/lib/electric/ddlx/parser/build.ex index bc1e7141ed..4a08484386 100644 --- a/components/electric/lib/electric/ddlx/parser/build.ex +++ b/components/electric/lib/electric/ddlx/parser/build.ex @@ -1,4 +1,7 @@ defmodule Electric.DDLX.Parser.Build do + alias Electric.Satellite.SatPerms + alias Electric.DDLX.Command + def default_schema(opts) do Keyword.get(opts, :default_schema, "public") end @@ -90,4 +93,48 @@ defmodule Electric.DDLX.Parser.Build do _ -> {:ok, []} end end + + def protobuf_table({table_schema, table_name}) do + protobuf_table(table_schema, table_name) + end + + def protobuf_table(table_schema, table_name) do + %SatPerms.Table{schema: table_schema, name: table_name} + end + + def protobuf_scope({ss, sn}) do + %SatPerms.Table{schema: ss, name: sn} + end + + def protobuf_scope(_), do: nil + + def protobuf_columns(nil, _ddlx) do + {:ok, nil} + end + + def protobuf_columns([], ddlx) do + {:error, + %Command.Error{sql: ddlx, line: 1, position: 1, message: "Invalid empty column list"}} + end + + def protobuf_columns(names, _ddlx) do + {:ok, %SatPerms.ColumnList{names: names}} + end + + # the parser already returns the correct :UPDATE, :INSERT, etc atoms + def protobuf_privs(privs) do + privs + end + + def protobuf_role(:AUTHENTICATED) do + %SatPerms.RoleName{role: {:predefined, :AUTHENTICATED}} + end + + def protobuf_role(:ANYONE) do + %SatPerms.RoleName{role: {:predefined, :ANYONE}} + end + + def protobuf_role(role) when is_binary(role) do + %SatPerms.RoleName{role: {:application, role}} + end end diff --git a/components/electric/lib/electric/ddlx/parser/tokenizer.ex b/components/electric/lib/electric/ddlx/parser/tokenizer.ex index 7e045a851e..1e2f51c7d7 100644 --- a/components/electric/lib/electric/ddlx/parser/tokenizer.ex +++ b/components/electric/lib/electric/ddlx/parser/tokenizer.ex @@ -31,6 +31,7 @@ defmodule Electric.DDLX.Parser.Tokenizer.Tokens do deftoken(:token, "UNASSIGN", do: :UNASSIGN) deftoken(:token, "UPDATE", do: :UPDATE) deftoken(:token, "USING", do: :USING) + deftoken(:token, "WHERE", do: :WHERE) deftoken(:token, "WRITE", do: :WRITE) def token(s), do: s end diff --git a/components/electric/lib/electric/postgres/extension.ex b/components/electric/lib/electric/postgres/extension.ex index 5c6774bd2d..c1c58ca9cc 100644 --- a/components/electric/lib/electric/postgres/extension.ex +++ b/components/electric/lib/electric/postgres/extension.ex @@ -23,7 +23,6 @@ defmodule Electric.Postgres.Extension do @version_relation "migration_versions" @ddl_relation "ddl_commands" - @schema_relation "schema" @electrified_tracking_relation "electrified" @transaction_marker_relation "transaction_marker" @acked_client_lsn_relation "acknowledged_client_lsns" @@ -32,9 +31,10 @@ defmodule Electric.Postgres.Extension do @client_checkpoints_relation "client_checkpoints" @client_additional_data_relation "client_additional_data" - @grants_relation "grants" - @roles_relation "roles" - @assignments_relation "assignments" + # permissions storage and management + @ddlx_commands_relation "ddlx_commands" + @global_perms_relation "global_perms_state" + @user_perms_relation "user_perms_state" electric = "e_ident(@schema, &1) @@ -50,9 +50,9 @@ defmodule Electric.Postgres.Extension do @client_checkpoints_table electric.(@client_checkpoints_relation) @client_additional_data_table electric.(@client_additional_data_relation) - @grants_table electric.(@grants_relation) - @roles_table electric.(@roles_relation) - @assignments_table electric.(@assignments_relation) + @ddlx_table electric.(@ddlx_commands_relation) + @global_perms_table electric.(@global_perms_relation) + @user_perms_table electric.(@user_perms_relation) @client_additional_data_subject_type electric.("client_additional_data_subject") @@ -124,14 +124,14 @@ defmodule Electric.Postgres.Extension do def client_checkpoints_table, do: @client_checkpoints_table def client_additional_data_table, do: @client_additional_data_table - def grants_table, do: @grants_table - def roles_table, do: @roles_table - def assignments_table, do: @assignments_table + def ddlx_table, do: @ddlx_table + def global_perms_table, do: @global_perms_table + def user_perms_table, do: @user_perms_table def ddl_relation, do: {@schema, @ddl_relation} def version_relation, do: {@schema, @version_relation} - def schema_relation, do: {@schema, @schema_relation} def electrified_tracking_relation, do: {@schema, @electrified_tracking_relation} + def ddlx_relation, do: {@schema, @ddlx_commands_relation} def acked_client_lsn_relation, do: {@schema, @acked_client_lsn_relation} def publication_name, do: @publication_name @@ -150,6 +150,8 @@ defmodule Electric.Postgres.Extension do defguard is_acked_client_lsn_relation(relation) when relation == {@schema, @acked_client_lsn_relation} + defguard is_perms_relation(relation) when relation == {@schema, @ddlx_commands_relation} + def extract_ddl_sql(%{"txid" => _, "txts" => _, "query" => query}) do {:ok, query} end @@ -298,10 +300,8 @@ defmodule Electric.Postgres.Extension do {@schema, @ddl_relation}, {@schema, @electrified_tracking_relation}, {@schema, @transaction_marker_relation}, - {@schema, @grants_relation}, - {@schema, @roles_relation}, - {@schema, @assignments_relation}, - {@schema, @acked_client_lsn_relation} + {@schema, @acked_client_lsn_relation}, + {@schema, @ddlx_commands_relation} ] @doc """ @@ -367,7 +367,6 @@ defmodule Electric.Postgres.Extension do Migrations.Migration_20230605141256_ElectrifyFunction, Migrations.Migration_20230715000000_UtilitiesTable, Migrations.Migration_20230814170123_RenameDDLX, - Migrations.Migration_20230814170745_ElectricDDL, Migrations.Migration_20230829000000_AcknowledgedClientLsnsTable, Migrations.Migration_20230918115714_DDLCommandUniqueConstraint, Migrations.Migration_20230921161045_DropEventTriggers, @@ -378,7 +377,9 @@ defmodule Electric.Postgres.Extension do Migrations.Migration_20231206130400_ConvertReplicaTriggersToAlways, Migrations.Migration_20240110110200_DropUnusedFunctions, Migrations.Migration_20240205141200_ReinstallTriggerFunctionWriteCorrectMaxTag, + Migrations.Migration_20240212161153_DDLXCommands, Migrations.Migration_20240213160300_DropGenerateElectrifiedSqlFunction, + Migrations.Migration_20240214131615_PermissionsState, Migrations.Migration_20240417131000_ClientReconnectionInfoTables ] end diff --git a/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl.ex b/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl.ex deleted file mode 100644 index a635aad1e5..0000000000 --- a/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl.ex +++ /dev/null @@ -1,77 +0,0 @@ -defmodule Electric.Postgres.Extension.Migrations.Migration_20230814170745_ElectricDDL do - alias Electric.Postgres.Extension - - require EEx - - @behaviour Extension.Migration - - sql_file = Path.expand("20230814170745_electric_ddl/ddlx_init.sql.eex", __DIR__) - - @external_resource sql_file - - @impl true - def version, do: 2023_08_14_17_07_45 - - @impl true - def up(schema) do - grants_table = Extension.grants_table() - roles_table = Extension.roles_table() - assignments_table = Extension.assignments_table() - - ddlx_sql = ddlx_init_sql(schema, grants_table, roles_table, assignments_table) - - tables = [ - """ - CREATE TABLE IF NOT EXISTS #{grants_table} ( - privilege VARCHAR(20) NOT NULL, - on_table VARCHAR(64) NOT NULL, - role VARCHAR(64) NOT NULL, - column_name VARCHAR(64) NOT NULL, - scope VARCHAR(64) NOT NULL, - using_path TEXT, - check_fn TEXT, - CONSTRAINT grants_pkey PRIMARY KEY (privilege, on_table, role, scope, column_name) - ); - """, - """ - CREATE TABLE IF NOT EXISTS #{roles_table} ( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - user_id VARCHAR(256) NOT NULL, - scope_table VARCHAR(64), - scope_id VARCHAR(256) - ); - """, - """ - CREATE TABLE IF NOT EXISTS #{assignments_table} ( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - table_name VARCHAR(64) NOT NULL, - scope_table VARCHAR(64) NOT NULL, - user_column VARCHAR(64) NOT NULL, - role_name VARCHAR(64) NOT NULL, - role_column VARCHAR(64) NOT NULL, - if_fn TEXT, - CONSTRAINT unique_assign UNIQUE (table_name, scope_table, user_column, role_name, role_column) - ); - """ - ] - - publish_tables = - Enum.map( - [grants_table, roles_table, assignments_table], - &Extension.add_table_to_publication_sql/1 - ) - - tables ++ [ddlx_sql] ++ publish_tables - end - - @impl true - def down(_), do: [] - - EEx.function_from_file(:defp, :ddlx_init_sql, sql_file, [ - :schema, - :grants_table, - :roles_table, - :assignments_table - ]) -end diff --git a/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl/ddlx_init.sql.eex b/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl/ddlx_init.sql.eex deleted file mode 100644 index d3ae868134..0000000000 --- a/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl/ddlx_init.sql.eex +++ /dev/null @@ -1,663 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - --- CUT HERE enable function -CREATE OR REPLACE PROCEDURE <%= schema %>.enable(table_name text) SECURITY DEFINER AS -$$ - BEGIN - CALL <%= schema %>.electrify(table_name); - END; -$$ LANGUAGE plpgsql; - --- CUT HERE disable function -CREATE OR REPLACE PROCEDURE <%= schema %>.disable(table_name text) SECURITY DEFINER AS -$$ - BEGIN - END; -$$ LANGUAGE plpgsql; - --- CUT HERE grant function -CREATE OR REPLACE PROCEDURE <%= schema %>.grant(privilege_name text, - on_table_name text, - role_name text, - columns text[], - scope_name text, - using_path text, - check_fn text) - SECURITY DEFINER AS $$ - - DECLARE - col TEXT; - - BEGIN - FOREACH col IN ARRAY columns - LOOP - INSERT INTO <%= grants_table %> ( privilege, on_table, role , column_name, scope, using_path, check_fn) - VALUES (privilege_name, on_table_name, role_name, col, scope_name, using_path, check_fn) - ON CONFLICT ON CONSTRAINT grants_pkey DO UPDATE SET - (using_path, check_fn) = (EXCLUDED.using_path, EXCLUDED.check_fn); - END LOOP; - END; -$$ LANGUAGE plpgsql; - --- CUT HERE revoke function -CREATE OR REPLACE PROCEDURE <%= schema %>.revoke( - privilege_name text, - on_table_name text, - role_name text, - columns text[], - scope_name text -) SECURITY DEFINER AS $$ - - DECLARE - all_columns BOOLEAN; - - BEGIN - PERFORM '*' = ANY(columns) As all_columns; - - IF all_columns THEN - DELETE FROM <%= grants_table %> WHERE - privilege = privilege_name AND - on_table = on_table_name AND - role = role_name AND - scope = scope_name; - ELSE - DELETE FROM <%= grants_table %> WHERE - privilege = privilege_name AND - on_table = on_table_name AND - role = role_name AND - scope = scope_name AND - column_name = any(columns); - END IF; - END; -$$ LANGUAGE plpgsql; - - --- CUT HERE assign function -CREATE OR REPLACE PROCEDURE <%= schema %>.assign( - assign_schema text, - assign_table text, - scope text, - user_column_name text, - role_name_string text, - role_column_name text, - if_fn text -) SECURITY DEFINER AS $$ - -DECLARE - assignment_id uuid; - assign_table_full_name TEXT; - scope_table_not_null TEXT; - role_name_not_null TEXT; - role_column_not_null TEXT; - if_fn_not_null TEXT; - role_def TEXT; - assignment_name TEXT; - user_column_type TEXT; - scope_key_count int; - user_key_count int; - scope_key RECORD; - user_key RECORD; - primary_key RECORD; - -BEGIN - - -- return types for the introspection of foreign keys - CREATE TEMP TABLE scope_fkeys - ( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ); - - CREATE TEMP TABLE user_fkeys - ( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ); - - CREATE TEMP TABLE pkeys - ( - columns name[10], - types information_schema.character_data[10] - ); - - -- gets the columns and types for the assign_table's primary key - INSERT INTO pkeys SELECT * from <%= schema %>.find_pk(assign_schema, assign_table); - SELECT * FROM pkeys LIMIT 1 INTO primary_key; - - - -- gets the foreign key pointing to the user - INSERT INTO user_fkeys SELECT * from <%= schema %>.find_fk_for_column(assign_schema,assign_table, user_column_name); - SELECT COUNT(*) FROM user_fkeys INTO user_key_count; - - IF user_key_count = 0 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'Could not find a foreign key pointing to the user table'; - END IF; - - SELECT * FROM user_fkeys LIMIT 1 INTO user_key; - - SELECT data_type FROM information_schema.columns - WHERE table_name = user_key.to_table and column_name = user_key.to_columns[1] - INTO user_column_type; - - -- sets some things to default strings if the function args are null - IF scope IS NULL THEN scope_table_not_null = '__none__'; ELSE scope_table_not_null = scope; END IF; - IF if_fn IS NULL THEN if_fn_not_null = 'true'; ELSE if_fn_not_null = if_fn; END IF; - - IF role_name_string IS NULL AND role_column_name IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'You must give either a role_name_string or a role_column_name'; - END IF; - - IF NOT role_name_string IS NULL AND NOT role_column_name IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'You must give either a role_name_string or a role_column_name but not both'; - END IF; - - assign_table_full_name = format('%s.%s', assign_schema, assign_table); - - IF role_name_string IS NULL THEN - role_name_not_null = '__none__'; - role_column_not_null = role_column_name; - role_def = format('NEW.%s', role_column_name); - ELSE - role_name_not_null = role_name_string; - role_column_not_null = '__none__'; - role_def = format(E'\'%s\'', role_name_string); - END IF; - - -- reads the foreign key for the scope if it exists - IF NOT scope IS NULL THEN - INSERT INTO scope_fkeys SELECT * from <%= schema %>.find_fk_to_table(assign_schema,assign_table, scope); - SELECT COUNT(*) FROM scope_fkeys INTO scope_key_count; - - IF scope_key_count > 1 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - -- The assign_table is assumed to have a single foreign key pointing to the scope table - RAISE EXCEPTION 'Too many foreign keys for the scope table'; - END IF; - - IF scope_key_count = 0 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - -- The assign_table is assumed to have a single foreign key pointing to the scope table - RAISE EXCEPTION 'Could not find a foreign key pointing to the scope table'; - END IF; - - SELECT * FROM scope_fkeys LIMIT 1 INTO scope_key; - - END IF; - - -- Creates the assignment itself. - INSERT INTO <%= assignments_table %> (table_name, scope_table, user_column, role_name, role_column, if_fn) - VALUES (assign_table_full_name, scope_table_not_null, user_column_name, role_name_not_null, role_column_not_null, if_fn) - RETURNING id INTO assignment_id; - - if assignment_id IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'Could not create assignment'; - END IF; - - - -- this is a canonical name used by components owned by this assignment - assignment_name = REPLACE(format('%s', assignment_id), '-', '_'); - - /* - Creates big fat join table. Every time the assignment rule is used and a user is given a role a row will be created - in both this join table and in the table electric.roles. This table serves as a polymorphic join between the roles - table and the different types of both scope table and assignment table, and handles clean up correctly via fk cascade on delete. - - This table have 4 or 5 foreign keys - - It has foreign keys with ON DELETE CASCADE pointing to: - - The assignment created above. This assignment is the rule that causes all the entries in this join to be created in owns them. - - The user that the role has been given too. - - The assignment table item that assigned the role. - - The row in the scope table if one is specified. - - So that any of these being deleted will remove the join. - - And it has a foreign key pointing to the role in electric.roles which it will delete with a trigger. - */ - - EXECUTE format('CREATE TABLE IF NOT EXISTS <%= schema %>.assignment_%s_join ( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - user_id %s, - assignment_id uuid, - role_id uuid, - FOREIGN KEY(role_id) - REFERENCES <%= roles_table %> (id), - FOREIGN KEY(user_id) - REFERENCES %s.%s(%s) - ON DELETE CASCADE, - FOREIGN KEY(assignment_id) - REFERENCES <%= assignments_table %> (id) - ON DELETE CASCADE - );', - assignment_name, - user_key.to_types[1], - user_key.to_schema, - user_key.to_table, - user_key.to_columns[1] - ); - - -- Adds a foreign key to the join table pointing to the assign_table - for counter in 1..ARRAY_LENGTH(primary_key.columns, 1) - loop - EXECUTE format('ALTER TABLE <%= schema %>.assignment_%s_join ADD COLUMN IF NOT EXISTS %s_%s %s;', - assignment_name, - assign_table, - primary_key.columns[counter], - primary_key.types[counter] - ); - end loop; - - EXECUTE format('ALTER TABLE <%= schema %>.assignment_%s_join - ADD CONSTRAINT electric_%s_join_%s_fk - FOREIGN KEY (%s_%s) - REFERENCES %s.%s(%s) - ON DELETE CASCADE;', - assignment_name, - assignment_name, - assign_table, - assign_table, - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - assign_schema, - assign_table, - ARRAY_TO_STRING(primary_key.columns, ', ') - ); - - -- defines insert and update trigger functions for the assign_table - -- when there is no scope - IF scope IS NULL THEN - - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema %>.upsert_role_%1$s() RETURNS TRIGGER SECURITY DEFINER - AS $%2$s$ - DECLARE - role_key uuid; - join_key uuid; - BEGIN - - SELECT id, role_id FROM <%= schema %>.assignment_%1$s_join WHERE assignment_id = \'%4$s\' AND ( %5$s_%6$s ) = ( NEW.%7$s ) INTO join_key, role_key; - IF ( %8$s ) THEN - IF join_key IS NULL THEN - INSERT INTO <%= roles_table %> (user_id, role) - VALUES (NEW.%9$s, %10$s) returning id INTO role_key; - INSERT INTO <%= schema %>.assignment_%1$s_join (user_id, %5$s_%6$s, role_id, assignment_id) - VALUES (NEW.%9$s, NEW.%7$s, role_key, \'%4$s\'); - ELSE - UPDATE <%= schema %>.assignment_%1$s_join SET user_id = NEW.%9$s - WHERE id = join_key; - UPDATE <%= roles_table %> SET (user_id, role) = (NEW.%9$s, %10s) - WHERE id = role_key; - END IF; - ELSE - IF NOT join_key IS NULL THEN - DELETE FROM <%= schema %>.assignment_%1$s_join WHERE id = join_key; - END IF; - END IF; - RETURN NEW; - END; - $%2$s$ LANGUAGE plpgsql;', - --1 - assignment_name, - --2 - '', - --3 - '', - --4 - assignment_id, - --5 - assign_table, - --6 - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - --7 - ARRAY_TO_STRING(primary_key.columns, ', NEW.'), - --8 - if_fn_not_null, - --9 - user_key.from_columns[1], - --10 - role_def, - --11 - scope - ); - - -- and when there is a scope - ELSE - for counter in 1..ARRAY_LENGTH(scope_key.from_columns, 1) - loop - EXECUTE format('ALTER TABLE <%= schema %>.assignment_%s_join ADD COLUMN IF NOT EXISTS %s %s;', - assignment_name, - scope_key.from_columns[counter], - scope_key.to_types[counter] - ); - end loop; - - EXECUTE format('ALTER TABLE <%= schema %>.assignment_%s_join - ADD CONSTRAINT electric_%s_join_scope_fk - FOREIGN KEY (%s) - REFERENCES %s.%s(%s) - ON DELETE CASCADE;', - assignment_name, - assignment_name, - ARRAY_TO_STRING(scope_key.from_columns, ', '), - scope_key.to_schema, - scope_key.to_table, - ARRAY_TO_STRING(scope_key.to_columns, ', ') - ); - - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema %>.upsert_role_%1$s() RETURNS TRIGGER SECURITY DEFINER - AS $%2$s$ - DECLARE - scope_key TEXT; - scope_list TEXT[]; - role_key uuid; - join_key uuid; - BEGIN - - scope_list := ARRAY[NEW.%3$s::text]; - scope_key := ARRAY_TO_STRING(scope_list, \', \' ); - - SELECT id, role_id FROM <%= schema %>.assignment_%1$s_join WHERE assignment_id = \'%4$s\' AND ( %5$s_%6$s ) = ( NEW.%7$s ) INTO join_key, role_key; - IF ( %8$s ) THEN - IF join_key IS NULL THEN - INSERT INTO <%= roles_table %> (user_id, role, scope_table, scope_id) - VALUES (NEW.%9$s, %10$s, \'%11$s\', scope_key) returning id INTO role_key; - INSERT INTO <%= schema %>.assignment_%1$s_join (user_id, %12$s, %5$s_%6$s, role_id, assignment_id) - VALUES (NEW.%9$s, NEW.%13$s, NEW.%7$s, role_key, \'%4$s\'); - ELSE - UPDATE <%= schema %>.assignment_%1$s_join SET (user_id, %12$s) - = (NEW.%9$s, NEW.%13$s) WHERE id = join_key; - UPDATE <%= roles_table %> SET (user_id, role, scope_table, scope_id) - = (NEW.%9$s, %10$s, \'%11$s\', scope_key) WHERE id = role_key; - END IF; - ELSE - IF NOT join_key IS NULL THEN - DELETE FROM <%= schema %>.assignment_%1$s_join WHERE id = join_key; - END IF; - END IF; - RETURN NEW; - END; - $%2$s$ LANGUAGE plpgsql;', - --1 - assignment_name, - --2 - '', - --3 - ARRAY_TO_STRING(scope_key.from_columns, '::text, NEW.'), - --4 - assignment_id, - --5 - assign_table, - --6 - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - --7 - ARRAY_TO_STRING(primary_key.columns, ', NEW.'), - --8 - if_fn_not_null, - --9 - user_key.from_columns[1], - --10 - role_def, - --11 - scope, - --12 - ARRAY_TO_STRING(scope_key.from_columns, ', '), - --13 - ARRAY_TO_STRING(scope_key.from_columns, ', NEW.') - ); - END IF; - - -- adds a trigger to the join table that deletes the role itself - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema %>.cleanup_role_%s() RETURNS TRIGGER SECURITY DEFINER - AS $%s$ - BEGIN - DELETE FROM <%= roles_table %> WHERE id = OLD.role_id; - RETURN OLD; - END; - $%s$ LANGUAGE plpgsql;', - assignment_name, - '', - '' - ); - - EXECUTE format('CREATE OR REPLACE TRIGGER electric_cleanup_role_%s - AFTER DELETE ON <%= schema %>.assignment_%s_join - FOR EACH ROW - EXECUTE FUNCTION <%= schema %>.cleanup_role_%s();', - assignment_name, - assignment_name, - assignment_name - ); - - -- adds the insert and update triggers functions to the assign_table - EXECUTE format('CREATE OR REPLACE TRIGGER electric_insert_role_%s - AFTER INSERT ON %s - FOR EACH ROW - EXECUTE FUNCTION <%= schema %>.upsert_role_%s();', - assignment_name, - assign_table, - assignment_name - ); - - EXECUTE format('CREATE OR REPLACE TRIGGER electric_update_role_%s - AFTER UPDATE ON %s - FOR EACH ROW - EXECUTE FUNCTION <%= schema %>.upsert_role_%s();', - assignment_name, - assign_table, - assignment_name - ); - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; -END; -$$ LANGUAGE plpgsql; - - --- CUT HERE unassign function -CREATE OR REPLACE PROCEDURE <%= schema %>.unassign( - assign_schema text, - assign_table text, - scope text, - user_column_name text, - role_name_string text, - role_column_name text -) SECURITY DEFINER AS -$$ - -DECLARE - assignment_id uuid; - assignment_name TEXT; - scope_table_not_null TEXT; - role_name_not_null TEXT; - role_column_not_null TEXT; - assign_table_full_name TEXT; - -BEGIN - - IF role_name_string IS NULL THEN role_name_not_null = '__none__'; ELSE role_name_not_null = role_name_string; END IF; - IF role_column_name IS NULL THEN role_column_not_null = '__none__'; ELSE role_column_not_null = role_column_name; END IF; - IF scope IS NULL THEN scope_table_not_null = '__none__'; ELSE scope_table_not_null = scope; END IF; - - assign_table_full_name = format('%s.%s', assign_schema, assign_table); - - SELECT id FROM <%= assignments_table %> - WHERE table_name = assign_table_full_name - AND scope_table = scope_table_not_null - AND user_column = user_column_name - AND role_name = role_name_not_null - AND role_column = role_column_not_null - INTO assignment_id; - - assignment_name = REPLACE(format('%s', assignment_id), '-', '_'); - - -- remove triggers - EXECUTE format('DROP TRIGGER IF EXISTS electric_cleanup_role_%s ON <%= schema %>.assignment_%s_join;', - assignment_name, - assignment_name - ); - - EXECUTE format('DROP TRIGGER IF EXISTS electric_insert_role_%s ON %s;', - assignment_name, - assign_table - ); - - EXECUTE format('DROP TRIGGER IF EXISTS electric_update_role_%s ON %s;', - assignment_name, - assign_table - ); - - -- remove functions - EXECUTE format('DROP FUNCTION IF EXISTS <%= schema %>.cleanup_role_%s;', - assignment_name - ); - - EXECUTE format('DROP FUNCTION IF EXISTS <%= schema %>.upsert_role_%s;', - assignment_name - ); - - -- remove join table - EXECUTE format('DROP TABLE IF EXISTS <%= schema %>.assignment_%s_join;', - assignment_name - ); - - -- remove assignment - DELETE FROM <%= assignments_table %> WHERE id = assignment_id; -END; -$$ LANGUAGE plpgsql; - --- CUT HERE sqlite function -CREATE OR REPLACE PROCEDURE <%= schema %>.sqlite(sql text) - SECURITY DEFINER AS $$ - BEGIN - NULL; - END; -$$ LANGUAGE plpgsql; - - --- CUT HERE find foreign keys -CREATE OR REPLACE FUNCTION <%= schema %>.find_fk_to_table( - src_schema text, - src_table text, - dst_table text) RETURNS TABLE( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ) SECURITY DEFINER AS $$ - - BEGIN - RETURN QUERY - SELECT sch.nspname AS "from_schema", - tbl.relname AS "from_table", - ARRAY_AGG(col.attname ORDER BY u.attposition) AS "from_columns", - f_sch.nspname AS "to_schema", - f_tbl.relname AS "to_table", - ARRAY_AGG(f_col.attname ORDER BY f_u.attposition) AS "to_columns", - ARRAY_AGG((SELECT data_type FROM information_schema.columns WHERE table_name = src_table and column_name = col.attname) ORDER BY f_u.attposition) AS "to_types" - FROM pg_constraint c - LEFT JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE - LEFT JOIN LATERAL UNNEST(c.confkey) WITH ORDINALITY AS f_u(attnum, attposition) ON f_u.attposition = u.attposition - JOIN pg_class tbl ON tbl.oid = c.conrelid - JOIN pg_namespace sch ON sch.oid = tbl.relnamespace - LEFT JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum) - LEFT JOIN pg_class f_tbl ON f_tbl.oid = c.confrelid - LEFT JOIN pg_namespace f_sch ON f_sch.oid = f_tbl.relnamespace - LEFT JOIN pg_attribute f_col ON (f_col.attrelid = f_tbl.oid AND f_col.attnum = f_u.attnum) - WHERE c.contype = 'f' and tbl.relname = src_table and f_tbl.relname = dst_table and sch.nspname = src_schema - GROUP BY "from_schema", "from_table", "to_schema", "to_table" - ORDER BY "from_schema", "from_table"; - END; - -$$ LANGUAGE plpgsql; - --- CUT HERE find foreign keys for column -CREATE OR REPLACE FUNCTION <%= schema %>.find_fk_for_column( - src_schema text, - src_table text, - src_column text) RETURNS TABLE( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ) SECURITY DEFINER AS $$ - - BEGIN - RETURN QUERY - SELECT sch.nspname AS "from_schema", - tbl.relname AS "from_table", - ARRAY_AGG(col.attname ORDER BY u.attposition) AS "from_columns", - f_sch.nspname AS "to_schema", - f_tbl.relname AS "to_table", - ARRAY_AGG(f_col.attname ORDER BY f_u.attposition) AS "to_columns", - ARRAY_AGG((SELECT data_type FROM information_schema.columns WHERE table_name = src_table and column_name = col.attname) ORDER BY f_u.attposition) AS "to_types" - FROM pg_constraint c - LEFT JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE - LEFT JOIN LATERAL UNNEST(c.confkey) WITH ORDINALITY AS f_u(attnum, attposition) ON f_u.attposition = u.attposition - JOIN pg_class tbl ON tbl.oid = c.conrelid - JOIN pg_namespace sch ON sch.oid = tbl.relnamespace - LEFT JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum) - LEFT JOIN pg_class f_tbl ON f_tbl.oid = c.confrelid - LEFT JOIN pg_namespace f_sch ON f_sch.oid = f_tbl.relnamespace - LEFT JOIN pg_attribute f_col ON (f_col.attrelid = f_tbl.oid AND f_col.attnum = f_u.attnum) - WHERE c.contype = 'f' and tbl.relname = src_table and col.attname = src_column and sch.nspname = src_schema - GROUP BY "from_schema", "from_table", "to_schema", "to_table" - ORDER BY "from_schema", "from_table"; - END; - -$$ LANGUAGE plpgsql; - --- CUT HERE find primary key -CREATE OR REPLACE FUNCTION <%= schema %>.find_pk( - src_schema text, - src_table text) RETURNS TABLE( - columns name[10], - types information_schema.character_data[10] - ) SECURITY DEFINER AS $$ - BEGIN - RETURN QUERY - SELECT ARRAY_AGG(col.attname ORDER BY u.attposition) AS "columns", - ARRAY_AGG((SELECT data_type FROM information_schema.columns WHERE table_name = src_table and column_name = col.attname) ORDER BY f_u.attposition) AS "types" - FROM pg_constraint c - LEFT JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE - LEFT JOIN LATERAL UNNEST(c.confkey) WITH ORDINALITY AS f_u(attnum, attposition) ON f_u.attposition = u.attposition - JOIN pg_class tbl ON tbl.oid = c.conrelid - JOIN pg_namespace sch ON sch.oid = tbl.relnamespace - LEFT JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum) - LEFT JOIN pg_class f_tbl ON f_tbl.oid = c.confrelid - LEFT JOIN pg_namespace f_sch ON f_sch.oid = f_tbl.relnamespace - LEFT JOIN pg_attribute f_col ON (f_col.attrelid = f_tbl.oid AND f_col.attnum = f_u.attnum) - WHERE c.contype = 'p' and tbl.relname = src_table and sch.nspname = src_schema; - END; - -$$ LANGUAGE plpgsql; - diff --git a/components/electric/lib/electric/postgres/extension/migrations/20240212161153_ddlx_commands.ex b/components/electric/lib/electric/postgres/extension/migrations/20240212161153_ddlx_commands.ex new file mode 100644 index 0000000000..8c8a7f0f4f --- /dev/null +++ b/components/electric/lib/electric/postgres/extension/migrations/20240212161153_ddlx_commands.ex @@ -0,0 +1,32 @@ +defmodule Electric.Postgres.Extension.Migrations.Migration_20240212161153_DDLXCommands do + alias Electric.Postgres.Extension + + @behaviour Extension.Migration + + @impl true + def version, do: 2024_02_12_16_11_53 + + @impl true + def up(schema) do + ddlx_table = Extension.ddlx_table() + txid_type = Extension.txid_type() + txts_type = Extension.txts_type() + + [ + """ + CREATE TABLE #{ddlx_table} ( + id serial8 NOT NULL PRIMARY KEY, + txid #{txid_type} NOT NULL DEFAULT #{schema}.current_xact_id(), + txts #{txts_type} NOT NULL DEFAULT #{schema}.current_xact_ts(), + ddlx bytea NOT NULL + ); + """, + Extension.add_table_to_publication_sql(ddlx_table) + ] + end + + @impl true + def down(_schema) do + [] + end +end diff --git a/components/electric/lib/electric/postgres/extension/migrations/20240214131615_permissions_state.ex b/components/electric/lib/electric/postgres/extension/migrations/20240214131615_permissions_state.ex new file mode 100644 index 0000000000..b244354ad2 --- /dev/null +++ b/components/electric/lib/electric/postgres/extension/migrations/20240214131615_permissions_state.ex @@ -0,0 +1,89 @@ +defmodule Electric.Postgres.Extension.Migrations.Migration_20240214131615_PermissionsState do + alias Electric.Postgres.Extension + alias Electric.Satellite.SatPerms + + @behaviour Extension.Migration + + @impl true + def version, do: 2024_02_14_13_16_15 + + @impl true + def up(schema) do + global_perms_table = Extension.global_perms_table() + user_perms_table = Extension.user_perms_table() + + empty_rules = + %SatPerms.Rules{id: 1} |> Protox.encode!() |> IO.iodata_to_binary() |> Base.encode16() + + [ + """ + CREATE TABLE #{global_perms_table} ( + id int8 NOT NULL PRIMARY KEY, + parent_id int8 UNIQUE REFERENCES #{global_perms_table} (id) ON DELETE SET NULL, + rules bytea NOT NULL, + inserted_at timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP + ); + """, + """ + CREATE UNIQUE INDEX ON #{global_perms_table} ((1)) WHERE parent_id IS NULL; + """, + """ + CREATE TABLE #{user_perms_table} ( + id serial8 NOT NULL PRIMARY KEY, + parent_id int8 REFERENCES #{user_perms_table} (id) ON DELETE SET NULL, + global_perms_id int8 NOT NULL REFERENCES #{global_perms_table} (id) ON DELETE CASCADE, + user_id text NOT NULL, + roles bytea NOT NULL, + inserted_at timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP + ); + """, + """ + CREATE UNIQUE INDEX ON #{user_perms_table} (user_id) WHERE parent_id IS NULL; + """, + """ + CREATE INDEX user_perms_user_id_idx ON #{user_perms_table} (user_id, id); + """, + """ + INSERT INTO #{global_perms_table} (id, rules) VALUES (1, '\\x#{empty_rules}'::bytea) + """, + """ + DROP TABLE IF EXISTS #{schema}.roles CASCADE + """, + """ + DROP TABLE IF EXISTS #{schema}.grants CASCADE + """, + """ + DROP TABLE IF EXISTS #{schema}.assignments CASCADE + """, + """ + DROP PROCEDURE IF EXISTS #{schema}.assign; + """, + """ + DROP PROCEDURE IF EXISTS #{schema}.unassign; + """, + """ + DROP PROCEDURE IF EXISTS #{schema}.grant; + """, + """ + DROP PROCEDURE IF EXISTS #{schema}.revoke; + """, + """ + DROP PROCEDURE IF EXISTS #{schema}.sqlite; + """, + """ + DROP FUNCTION IF EXISTS #{schema}.find_fk_to_table; + """, + """ + DROP FUNCTION IF EXISTS #{schema}.find_fk_for_column; + """, + """ + DROP FUNCTION IF EXISTS #{schema}.find_pk; + """ + ] + end + + @impl true + def down(_schema) do + [] + end +end diff --git a/components/electric/lib/electric/postgres/extension/permissions.ex b/components/electric/lib/electric/postgres/extension/permissions.ex new file mode 100644 index 0000000000..4e865983a6 --- /dev/null +++ b/components/electric/lib/electric/postgres/extension/permissions.ex @@ -0,0 +1,149 @@ +defmodule Electric.Postgres.Extension.Permissions do + alias Electric.Postgres.Extension + alias Electric.Satellite.SatPerms + + @global_perms_table Extension.global_perms_table() + @user_perms_table Extension.user_perms_table() + + @shared_global_query """ + SELECT "id", "parent_id", "rules" FROM #{@global_perms_table} + """ + + @current_global_query """ + #{@shared_global_query} + ORDER BY "id" DESC + LIMIT 1 + """ + + @specific_global_query """ + #{@shared_global_query} + WHERE id = $1 + LIMIT 1 + """ + + @shared_user_query """ + SELECT u.id, + u.parent_id, + u.roles, + g.rules + FROM #{@user_perms_table} u + INNER JOIN #{@global_perms_table} g ON g.id = u.global_perms_id + """ + + @current_user_query """ + #{@shared_user_query} + WHERE (u.user_id = $1) + ORDER BY u.id DESC + LIMIT 1 + """ + + @specific_user_query """ + #{@shared_user_query} + WHERE (u.user_id = $1) AND (u.id = $2) + LIMIT 1 + """ + + # We need to duplicate all the current user perms that, which all depend on the previous version + # of the global rules. This query is complicated by the need to only select the most current + # version of each user's permissions (because for a given rules id, a user may have multiple + # versions of their roles). + @save_global_query """ + WITH global_perms AS ( + INSERT INTO #{@global_perms_table} (id, parent_id, rules) + VALUES ($1, $2, $3) RETURNING id, parent_id + ) + INSERT INTO #{@user_perms_table} (user_id, parent_id, roles, global_perms_id) + SELECT u.*, global_perms.id FROM + (SELECT DISTINCT user_id FROM #{@user_perms_table} ORDER BY user_id) uid + JOIN LATERAL ( + SELECT ui.user_id, ui.id, ui.roles FROM #{@user_perms_table} ui + WHERE ui.user_id = uid.user_id + ORDER BY ui.id DESC + LIMIT 1 + ) u ON TRUE, global_perms + """ + + @create_user_query """ + WITH global_perms AS ( + SELECT id, rules + FROM #{@global_perms_table} + ORDER BY id DESC + LIMIT 1 + ), user_perms AS ( + INSERT INTO #{@user_perms_table} (user_id, parent_id, roles, global_perms_id) + SELECT $1, $2, $3, g.id + FROM global_perms g + RETURNING id + ) + SELECT user_perms.id AS user_id, + global_perms.id AS global_id, + global_perms.rules + FROM user_perms, global_perms + """ + + def global(conn) do + with {:ok, _cols, [row]} <- :epgsql.equery(conn, @current_global_query, []), + {_id, _parent_id, bytes} = row do + Protox.decode(bytes, SatPerms.Rules) + end + end + + def global(conn, id) do + with {:ok, _cols, [row]} <- :epgsql.equery(conn, @specific_global_query, [id]), + {_id, _parent_id, bytes} = row do + Protox.decode(bytes, SatPerms.Rules) + end + end + + def save_global(conn, %SatPerms.Rules{id: id, parent_id: parent_id} = rules) do + with {:ok, iodata} <- Protox.encode(rules), + bytes = IO.iodata_to_binary(iodata), + {:ok, _users} <- :epgsql.equery(conn, @save_global_query, [id, parent_id, bytes]) do + :ok + end + end + + def user(conn, user_id) do + load_user_perms(conn, user_id, @current_user_query, [user_id], fn conn -> + insert_user(conn, user_id) + end) + end + + def user(conn, user_id, perms_id) do + load_user_perms(conn, user_id, @specific_user_query, [user_id, perms_id], fn _conn -> + {:error, "no user permissions found for user=#{user_id} id=#{perms_id}"} + end) + end + + def save_user(conn, user_id, %SatPerms.Roles{} = roles) do + insert_user(conn, user_id, roles) + end + + defp load_user_perms(conn, user_id, query, binds, not_found_fun) do + case :epgsql.equery(conn, query, binds) do + {:ok, _, [{id, _parent_id, roles_bytes, rules_bytes}]} -> + with {:ok, roles} <- Protox.decode(roles_bytes, SatPerms.Roles), + {:ok, rules} <- Protox.decode(rules_bytes, SatPerms.Rules) do + {:ok, %SatPerms{id: id, user_id: user_id, rules: rules, roles: roles.roles}} + end + + {:ok, _, []} -> + not_found_fun.(conn) + + error -> + error + end + end + + defp insert_user(conn, user_id, roles \\ %SatPerms.Roles{}) do + encoded_roles = + roles |> Protox.encode!() |> IO.iodata_to_binary() + + with {:ok, _, [row]} <- + :epgsql.equery(conn, @create_user_query, [user_id, roles.parent_id, encoded_roles]), + {id, _global_perms_id, rules} = row, + {:ok, rules} = Protox.decode(rules, SatPerms.Rules) do + {:ok, %SatPerms{id: id, user_id: user_id, rules: rules, roles: roles.roles}} + end + end +end diff --git a/components/electric/lib/electric/postgres/extension/schema_cache.ex b/components/electric/lib/electric/postgres/extension/schema_cache.ex index a438b54218..b1bd2bedc2 100644 --- a/components/electric/lib/electric/postgres/extension/schema_cache.ex +++ b/components/electric/lib/electric/postgres/extension/schema_cache.ex @@ -71,7 +71,7 @@ defmodule Electric.Postgres.Extension.SchemaCache do end @impl SchemaLoader - def connect(conn_config, _opts) do + def connect(_opts, conn_config) do {:ok, Connectors.origin(conn_config)} end @@ -150,6 +150,36 @@ defmodule Electric.Postgres.Extension.SchemaCache do call(origin, {:tx_version, row}) end + @impl SchemaLoader + def global_permissions(origin) do + call(origin, :global_permissions) + end + + @impl SchemaLoader + def global_permissions(origin, id) do + call(origin, {:global_permissions, id}) + end + + @impl SchemaLoader + def save_global_permissions(origin, rules) do + call(origin, {:save_global_permissions, rules}) + end + + @impl SchemaLoader + def user_permissions(origin, user_id) do + call(origin, {:user_permissions, user_id}) + end + + @impl SchemaLoader + def user_permissions(origin, user_id, permissions_id) do + call(origin, {:user_permissions, user_id, permissions_id}) + end + + @impl SchemaLoader + def save_user_permissions(origin, user_id, roles) do + call(origin, {:save_user_permissions, user_id, roles}) + end + def relation(origin, oid) when is_integer(oid) do call(origin, {:relation, oid}) end @@ -372,6 +402,54 @@ defmodule Electric.Postgres.Extension.SchemaCache do {:reply, result, state} end + def handle_call(:global_permissions, _from, state) do + {:reply, SchemaLoader.global_permissions(state.backend), state} + end + + def handle_call({:global_permissions, id}, _from, state) do + {:reply, SchemaLoader.global_permissions(state.backend, id), state} + end + + def handle_call({:save_global_permissions, rules}, _from, state) do + case SchemaLoader.save_global_permissions(state.backend, rules) do + {:ok, backend} -> + {:reply, {:ok, state.origin}, %{state | backend: backend}} + + error -> + {:reply, error, state} + end + end + + def handle_call({:user_permissions, user_id}, _from, state) do + case SchemaLoader.user_permissions(state.backend, user_id) do + {:ok, backend, roles} -> + {:reply, {:ok, state.origin, roles}, %{state | backend: backend}} + + error -> + {:reply, error, state} + end + end + + def handle_call({:user_permissions, user_id, permissions_id}, _from, state) do + case SchemaLoader.user_permissions(state.backend, user_id, permissions_id) do + {:ok, roles} -> + {:reply, {:ok, roles}, state} + + error -> + {:reply, error, state} + end + end + + def handle_call({:save_user_permissions, user_id, roles}, _from, state) do + case SchemaLoader.save_user_permissions(state.backend, user_id, roles) do + {:ok, backend} -> + {:reply, {:ok, state.origin}, %{state | backend: backend}} + + error -> + {:reply, error, state} + end + end + # Prevent deadlocks: # the list of electrified tables is cached and this refresh_subscription call # is done via an async Task because otherwise we get into a deadlock in the diff --git a/components/electric/lib/electric/postgres/extension/schema_loader.ex b/components/electric/lib/electric/postgres/extension/schema_loader.ex index d07da1cea6..49ec687ac2 100644 --- a/components/electric/lib/electric/postgres/extension/schema_loader.ex +++ b/components/electric/lib/electric/postgres/extension/schema_loader.ex @@ -1,6 +1,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader do alias Electric.Postgres.{Schema, Extension.Migration} alias Electric.Replication.Connectors + alias Electric.Satellite.SatPerms alias __MODULE__.Version @type state() :: term() @@ -18,8 +19,8 @@ defmodule Electric.Postgres.Extension.SchemaLoader do @type t() :: {module(), state()} @type tx_fk_row() :: %{binary() => integer() | binary()} - @callback connect(Connectors.config(), Keyword.t()) :: {:ok, state()} - @callback load(state()) :: {:ok, Version.t()} + @callback connect(term(), Connectors.config()) :: {:ok, state()} + @callback load(state()) :: {:ok, Version.t()} | {:error, binary()} @callback load(state(), version()) :: {:ok, Version.t()} | {:error, binary()} @callback save(state(), version(), Schema.t(), [String.t()]) :: {:ok, state(), Version.t()} | {:error, term()} @@ -33,8 +34,29 @@ defmodule Electric.Postgres.Extension.SchemaLoader do @callback index_electrified?(state(), relation()) :: {:ok, boolean()} | {:error, term()} @callback tx_version(state(), tx_fk_row()) :: {:ok, version()} | {:error, term()} + # ok, so these permissions related callbacks are definitely the last nail in the coffin of the + # `SchemaLoader` idea. basically we need the same kind of access to some usually pg-backed + # permissions state data as we do to the schema state. seems pointless to duplicate the pg + # connection stuff, plus why have two connection pools when we already have one. + @callback global_permissions(state()) :: {:ok, %SatPerms.Rules{}} | {:error, term()} + @callback global_permissions(state(), id :: integer()) :: + {:ok, %SatPerms.Rules{}} | {:error, term()} + # loading user permissions for a new user requires inserting an empty state + @callback user_permissions(state(), user_id :: binary()) :: + {:ok, state(), %SatPerms{}} | {:error, term()} + + @callback user_permissions(state(), user_id :: binary(), id :: integer()) :: + {:ok, %SatPerms{}} | {:error, term()} + + @callback save_global_permissions(state(), %SatPerms.Rules{}) :: + {:ok, state()} | {:error, term()} + @callback save_user_permissions(state(), user_id :: binary(), %SatPerms.Roles{}) :: + {:ok, state(), %SatPerms{}} | {:error, term()} + @default_backend {__MODULE__.Epgsql, []} + @behaviour __MODULE__ + def get(opts, key, default \\ @default_backend) do case Keyword.get(opts, key, default) do module when is_atom(module) -> @@ -45,42 +67,51 @@ defmodule Electric.Postgres.Extension.SchemaLoader do end end + @impl true def connect({module, opts}, conn_config) do - with {:ok, state} <- module.connect(conn_config, opts) do + with {:ok, state} <- module.connect(opts, conn_config) do {:ok, {module, state}} end end + @impl true def load({module, state}) do module.load(state) end + @impl true def load({module, state}, version) do module.load(state, version) end + @impl true def save({module, state}, version, schema, stmts) do with {:ok, state, schema_version} <- module.save(state, version, schema, stmts) do {:ok, {module, state}, schema_version} end end + @impl true def relation_oid({module, state}, rel_type, schema, table) do module.relation_oid(state, rel_type, schema, table) end + @impl true def refresh_subscription({module, state}, name) do module.refresh_subscription(state, name) end + @impl true def migration_history({module, state}, version) do module.migration_history(state, version) end + @impl true def known_migration_version?({module, state}, version) do module.known_migration_version?(state, version) end + @impl true def internal_schema({module, state}) do module.internal_schema(state) end @@ -91,15 +122,56 @@ defmodule Electric.Postgres.Extension.SchemaLoader do end end + @impl true def table_electrified?({module, state}, relation) do module.table_electrified?(state, relation) end + @impl true def index_electrified?({module, state}, relation) do module.index_electrified?(state, relation) end + @impl true def tx_version({module, state}, row) do module.tx_version(state, row) end + + @impl true + def global_permissions({module, state}) do + module.global_permissions(state) + end + + @impl true + def global_permissions({module, state}, id) do + module.global_permissions(state, id) + end + + @impl true + def save_global_permissions({module, state}, rules) do + with {:ok, state} <- module.save_global_permissions(state, rules) do + {:ok, {module, state}} + end + end + + @impl true + def user_permissions({module, state}, user_id) do + with {:ok, state, perms} <- module.user_permissions(state, user_id) do + {:ok, {module, state}, perms} + end + end + + @impl true + def user_permissions({module, state}, user_id, perms_id) do + with {:ok, perms} <- module.user_permissions(state, user_id, perms_id) do + {:ok, perms} + end + end + + @impl true + def save_user_permissions({module, state}, user_id, roles) do + with {:ok, state, perms} <- module.save_user_permissions(state, user_id, roles) do + {:ok, {module, state}, perms} + end + end end diff --git a/components/electric/lib/electric/postgres/extension/schema_loader/epgsql.ex b/components/electric/lib/electric/postgres/extension/schema_loader/epgsql.ex index 14bf123052..8bfab458d9 100644 --- a/components/electric/lib/electric/postgres/extension/schema_loader/epgsql.ex +++ b/components/electric/lib/electric/postgres/extension/schema_loader/epgsql.ex @@ -65,8 +65,8 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do @pool_timeout 5_000 - @impl true - def connect(conn_config, _opts) do + @impl SchemaLoader + def connect(_opts, conn_config) do {:ok, _pool} = NimblePool.start_link( worker: {ConnectionPool, conn_config}, @@ -88,7 +88,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do ) end - @impl true + @impl SchemaLoader def load(pool) do checkout!(pool, fn conn -> with {:ok, version, schema} <- Extension.current_schema(conn) do @@ -97,7 +97,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def load(pool, version) do checkout!(pool, fn conn -> with {:ok, version, schema} <- Extension.schema_version(conn, version) do @@ -106,7 +106,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def save(pool, version, schema, stmts) do checkout!(pool, fn conn -> with :ok <- Extension.save_schema(conn, version, schema, stmts) do @@ -115,7 +115,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def relation_oid(_conn, :trigger, _schema, _table) do raise RuntimeError, message: "oid lookup for triggers no implemented" end @@ -126,7 +126,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def refresh_subscription(pool, name) do checkout!(pool, fn conn -> query = ~s|ALTER SUBSCRIPTION "#{name}" REFRESH PUBLICATION WITH (copy_data = false)| @@ -147,21 +147,21 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def migration_history(pool, version) do checkout!(pool, fn conn -> Extension.migration_history(conn, version) end) end - @impl true + @impl SchemaLoader def known_migration_version?(pool, version) do checkout!(pool, fn conn -> Extension.known_migration_version?(conn, version) end) end - @impl true + @impl SchemaLoader def internal_schema(pool) do checkout!(pool, fn conn -> oid_loader = &Client.relation_oid(conn, &1, &2, &3) @@ -172,24 +172,74 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def table_electrified?(pool, {schema, name}) do checkout!(pool, fn conn -> Extension.electrified?(conn, schema, name) end) end - @impl true + @impl SchemaLoader def index_electrified?(pool, {schema, name}) do checkout!(pool, fn conn -> Extension.index_electrified?(conn, schema, name) end) end - @impl true + @impl SchemaLoader def tx_version(pool, row) do checkout!(pool, fn conn -> Extension.tx_version(conn, row) end) end + + @impl SchemaLoader + def global_permissions(pool) do + checkout!(pool, fn conn -> + Extension.Permissions.global(conn) + end) + end + + @impl SchemaLoader + def global_permissions(pool, id) do + checkout!(pool, fn conn -> + Extension.Permissions.global(conn, id) + end) + end + + @impl SchemaLoader + def save_global_permissions(pool, permissions) do + checkout!(pool, fn conn -> + with :ok <- Extension.Permissions.save_global(conn, permissions) do + {:ok, pool} + end + end) + end + + @impl SchemaLoader + def user_permissions(pool, user_id) do + checkout!(pool, fn conn -> + with {:ok, perms} <- Extension.Permissions.user(conn, user_id) do + {:ok, pool, perms} + end + end) + end + + @impl SchemaLoader + def user_permissions(pool, user_id, perms_id) do + checkout!(pool, fn conn -> + with {:ok, perms} <- Extension.Permissions.user(conn, user_id, perms_id) do + {:ok, perms} + end + end) + end + + @impl SchemaLoader + def save_user_permissions(pool, user_id, roles) do + checkout!(pool, fn conn -> + with {:ok, perms} <- Extension.Permissions.save_user(conn, user_id, roles) do + {:ok, pool, perms} + end + end) + end end diff --git a/components/electric/lib/electric/postgres/extension/schema_loader/version.ex b/components/electric/lib/electric/postgres/extension/schema_loader/version.ex index 00c1a9596f..914f7a208b 100644 --- a/components/electric/lib/electric/postgres/extension/schema_loader/version.ex +++ b/components/electric/lib/electric/postgres/extension/schema_loader/version.ex @@ -19,7 +19,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Version do primary_keys: %{relation() => [String.t()]} } - @spec new(nil | version(), Schema.t()) :: t() + @spec new(version() | nil, Schema.t()) :: t() def new(version, %Schema.Proto.Schema{} = schema) do %__MODULE__{version: version, schema: schema} |> Map.update!(:tables, &cache_tables_by_name(&1, schema)) @@ -108,6 +108,19 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Version do end end + def foreign_keys(%__MODULE__{} = version, {_, _} = relation, {_, _} = target) do + graph = fk_graph(version) + + case Graph.edges(graph, relation, target) do + [] -> + {:error, + "no foreign key found from #{Electric.Utils.inspect_relation(relation)} to #{Electric.Utils.inspect_relation(target)}"} + + [fk] -> + {:ok, fk.label} + end + end + @spec fk_graph(t()) :: Graph.t() def fk_graph(%__MODULE__{fk_graph: fk_graph}) do fk_graph diff --git a/components/electric/lib/electric/postgres/proxy/handler.ex b/components/electric/lib/electric/postgres/proxy/handler.ex index 835a42d309..41260b12d1 100644 --- a/components/electric/lib/electric/postgres/proxy/handler.ex +++ b/components/electric/lib/electric/postgres/proxy/handler.ex @@ -230,7 +230,7 @@ defmodule Electric.Postgres.Proxy.Handler do %{loader: {loader_module, loader_opts}, connector_config: connector_config} = state - {:ok, loader_conn} = loader_module.connect(connector_config, loader_opts) + {:ok, loader_conn} = loader_module.connect(loader_opts, connector_config) {:ok, {stack, _state} = injector} = state.injector_opts diff --git a/components/electric/lib/electric/postgres/proxy/query_analyser.ex b/components/electric/lib/electric/postgres/proxy/query_analyser.ex index 01b43008b9..4820238e88 100644 --- a/components/electric/lib/electric/postgres/proxy/query_analyser.ex +++ b/components/electric/lib/electric/postgres/proxy/query_analyser.ex @@ -17,7 +17,11 @@ defmodule Electric.Postgres.Proxy.QueryAnalysis do ] @type t() :: %__MODULE__{ - action: atom() | {atom(), binary()} | {:electric, Electric.DDLX.Command.t()}, + action: + atom() + | {atom(), binary()} + | {atom(), atom()} + | {:electric, Electric.DDLX.Command.t()}, table: nil | {String.t(), String.t()}, type: :table | :index, ast: Electric.DDLX.Command.t() | struct(), @@ -559,9 +563,21 @@ defimpl QueryAnalyser, for: PgQuery.CallStmt do def analyse(stmt, analysis, state) do case extract_electric(stmt, analysis) do + {:electric, %Electric.DDLX.Command.Error{} = error, _analysis} -> + %{ + analysis + | allowed?: false, + electrified?: true, + error: error + } + {:electric, command, analysis} -> + # TODO: the new grant syntax will result in multiple tables being involved. + # is this analysis.table field being used for anything after this point? {:ok, table} = - parse_table_name(DDLX.Command.table_name(command), default_schema: state.default_schema) + command + |> DDLX.Command.table_names() + |> parse_table_name(default_schema: state.default_schema) analysis = %{ analysis @@ -573,7 +589,7 @@ defimpl QueryAnalyser, for: PgQuery.CallStmt do capture?: true } - if command_enabled?(command) do + if DDLX.Command.enabled?(command) do analysis else %{ @@ -601,8 +617,9 @@ defimpl QueryAnalyser, for: PgQuery.CallStmt do end end - defp parse_table_name({_schema, _name} = table_name, _opts), do: {:ok, table_name} - defp parse_table_name(name, opts) when is_binary(name), do: NameParser.parse(name, opts) + defp parse_table_name([], _opts), do: {:ok, nil} + defp parse_table_name([{_schema, _name} = table_name | _], _opts), do: {:ok, table_name} + defp parse_table_name([name | _], opts) when is_binary(name), do: NameParser.parse(name, opts) defp extract_electric(stmt, analysis) do case function_name(stmt) do @@ -626,7 +643,9 @@ defimpl QueryAnalyser, for: PgQuery.CallStmt do ["electric", "electrify"] -> {:table, table} = Parser.table_name(stmt) - command = %DDLX.Command.Enable{table_name: table} + + command = DDLX.Command.electric_enable(table) + {:electric, command, analysis} _ -> @@ -641,26 +660,4 @@ defimpl QueryAnalyser, for: PgQuery.CallStmt do defp function_name(%PgQuery.CallStmt{funccall: %{funcname: funcname}}) do Enum.map(funcname, &Parser.string_node_val/1) end - - # shortcut the enable command, which has to be enabled - defp command_enabled?(%DDLX.Command.Enable{}), do: true - - defp command_enabled?(cmd) do - cmd - |> feature_flag() - |> Electric.Features.enabled?() - end - - @feature_flags %{ - DDLX.Command.Grant => :proxy_ddlx_grant, - DDLX.Command.Revoke => :proxy_ddlx_revoke, - DDLX.Command.Assign => :proxy_ddlx_assign, - DDLX.Command.Unassign => :proxy_ddlx_unassign - } - - # either we have a specific flag for the command or we fallback to the - # default setting for the features module, which is `false` - defp feature_flag(%cmd{}) do - @feature_flags[cmd] || Electric.Features.default_key() - end end diff --git a/components/electric/lib/electric/replication/changes.ex b/components/electric/lib/electric/replication/changes.ex index 8bf91bca75..4c48134d6b 100644 --- a/components/electric/lib/electric/replication/changes.ex +++ b/components/electric/lib/electric/replication/changes.ex @@ -30,6 +30,7 @@ defmodule Electric.Replication.Changes do Changes.NewRecord.t() | Changes.UpdatedRecord.t() | Changes.DeletedRecord.t() + | Changes.UpdatedPermissions.t() defmodule Transaction do alias Electric.Replication.Changes @@ -210,6 +211,36 @@ defmodule Electric.Replication.Changes do defstruct [:relation] end + defmodule UpdatedPermissions do + defmodule UserPermissions do + # When a user's permissions are changed, through some role change, only connections for that + # user need to do anything and since we know the entire permissions state for the user, + # including the important id, at this point just send them along + defstruct [:user_id, :permissions] + + @type t() :: %__MODULE__{user_id: binary(), permissions: %Electric.Satellite.SatPerms{}} + end + + defmodule GlobalPermissions do + # When the global permissions change, i.e. some ddlx command is received via the proxy, then + # every connected user will have to update their permissions. The actual permission id for a + # given user is not knowable without asking pg, so it has to mean every active connection + # bashing the db to load the new permissions for the user. So it's pointless including the + # actual global permissions state. + defstruct [:permissions_id] + + @type t() :: %__MODULE__{ + permissions_id: integer() + } + end + + defstruct [:type, :permissions] + + @type t() :: + %__MODULE__{type: :user, permissions: UserPermissions.t()} + | %__MODULE__{type: :global, permissions: GlobalPermissions.t()} + end + @spec filter_changes_belonging_to_user(Transaction.t(), binary()) :: Transaction.t() def filter_changes_belonging_to_user(%Transaction{changes: changes} = tx, user_id) do %{tx | changes: Enum.filter(changes, &Changes.Ownership.change_belongs_to_user?(&1, user_id))} diff --git a/components/electric/lib/electric/replication/eval/env/known_functions.ex b/components/electric/lib/electric/replication/eval/env/known_functions.ex index 953408bf82..dac02c9910 100644 --- a/components/electric/lib/electric/replication/eval/env/known_functions.ex +++ b/components/electric/lib/electric/replication/eval/env/known_functions.ex @@ -63,7 +63,7 @@ defmodule Electric.Replication.Eval.Env.KnownFunctions do defpostgres "*numeric_type* + *numeric_type* -> *numeric_type*", delegate: &Kernel.+/2 defpostgres "*numeric_type* - *numeric_type* -> *numeric_type*", delegate: &Kernel.-/2 defpostgres "*integral_type* / *integral_type* -> bool", delegate: &Kernel.div/2 - defpostgres "float8 / float8 -> bool", delegate: &Kernel.//2 + defpostgres "float8 / float8 -> float8", delegate: &Kernel.//2 defpostgres "numeric ^ numeric -> numeric", delegate: &Float.pow/2 defpostgres "float8 ^ float8 -> float8", delegate: &Float.pow/2 defpostgres "|/ float8 -> float8", delegate: &:math.sqrt/1 diff --git a/components/electric/lib/electric/replication/eval/parser.ex b/components/electric/lib/electric/replication/eval/parser.ex index 7e20af38c5..93071bae9c 100644 --- a/components/electric/lib/electric/replication/eval/parser.ex +++ b/components/electric/lib/electric/replication/eval/parser.ex @@ -19,7 +19,16 @@ defmodule Electric.Replication.Eval.Parser do end defmodule Func do - defstruct [:args, :type, :implementation, :name, strict?: true, immutable?: true, location: 0] + defstruct [ + :args, + :type, + :implementation, + :name, + strict?: true, + immutable?: true, + location: 0, + cast: nil + ] end @valid_types (Electric.Postgres.supported_types() ++ @@ -439,7 +448,8 @@ defmodule Electric.Replication.Eval.Parser do type: target_type, args: [arg], implementation: impl, - name: "#{type}_to_#{target_type}" + name: "#{type}_to_#{target_type}", + cast: {type, target_type} }} :error -> diff --git a/components/electric/lib/electric/replication/eval/runner.ex b/components/electric/lib/electric/replication/eval/runner.ex index 789a5aa237..77dd25e60f 100644 --- a/components/electric/lib/electric/replication/eval/runner.ex +++ b/components/electric/lib/electric/replication/eval/runner.ex @@ -4,6 +4,9 @@ defmodule Electric.Replication.Eval.Runner do alias Electric.Replication.Eval.Env alias Electric.Replication.Eval.Parser.{Const, Func, Ref} + @type value() :: binary() | integer() | float() | boolean() + @type val_map() :: %{optional([String.t(), ...]) => value()} + @doc """ Generate a ref values object based on the record and a given table name """ @@ -62,7 +65,7 @@ defmodule Electric.Replication.Eval.Runner do defp try_apply(%Func{implementation: impl} = func, args) do case impl do {module, fun} -> apply(module, fun, args) - fun -> apply(fun, args) + fun when is_function(fun) -> apply(fun, args) end rescue _ -> diff --git a/components/electric/lib/electric/replication/postgres/migration_consumer.ex b/components/electric/lib/electric/replication/postgres/migration_consumer.ex index 33b3742253..02a4d2999b 100644 --- a/components/electric/lib/electric/replication/postgres/migration_consumer.ex +++ b/components/electric/lib/electric/replication/postgres/migration_consumer.ex @@ -4,7 +4,8 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do """ use GenStage - import Electric.Postgres.Extension, only: [is_ddl_relation: 1, is_extension_relation: 1] + import Electric.Postgres.Extension, + only: [is_ddl_relation: 1, is_extension_relation: 1, is_perms_relation: 1] alias Electric.Postgres.{ Extension, @@ -14,9 +15,10 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do Schema } - alias Electric.Replication.Changes.{NewRecord, Transaction} + alias Electric.Replication.Changes.NewRecord alias Electric.Replication.Connectors alias Electric.Replication.Postgres.Client + alias Electric.Satellite.Permissions alias Electric.Telemetry.Metrics @@ -58,6 +60,8 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do |> SchemaLoader.get(:backend, SchemaCache) |> SchemaLoader.connect(conn_config) + {:ok, permissions_consumer} = Permissions.State.new(loader) + refresh_sub? = Keyword.get(opts, :refresh_subscription, true) Logger.info("Starting #{__MODULE__} using #{elem(loader, 0)} backend") @@ -68,6 +72,7 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do subscription: subscription, producer: producer, loader: loader, + permissions: permissions_consumer, opts: opts, refresh_subscription: refresh_sub?, refresh_enum_types: Keyword.get(opts, :refresh_enum_types, true), @@ -92,22 +97,34 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do @impl GenStage def handle_events(transactions, _from, state) do - {:noreply, filter_transactions(transactions), process_migrations(transactions, state)} + {txns, state} = process_transactions(transactions, state) + {:noreply, txns, state} + end + + defp process_transactions(transactions, state) do + {_transactions, _state} = + Enum.map_reduce(transactions, state, &process_transaction/2) end - defp filter_transactions(transactions) do - Enum.map(transactions, &filter_transaction/1) + defp process_transaction(tx, state) do + {changes, state} = + {tx.changes, state} + |> process_migrations() + |> process_permissions() + |> filter_changes() + + {%{tx | changes: changes}, state} end - # FIXME: we need this to prevent extension metadata tables from being - # replicated between pg instances. Should be removed once we're only - # replicating a subset of tables, rather than all - defp filter_transaction(%Transaction{changes: changes} = tx) do + defp filter_changes({changes, state}) do filtered = Enum.filter(changes, fn %{relation: relation} when is_ddl_relation(relation) -> true + %{relation: relation} when is_perms_relation(relation) -> + false + %{relation: relation} = change when is_extension_relation(relation) -> Logger.debug("---- Filtering #{inspect(change)}") false @@ -116,38 +133,46 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do true end) - %{tx | changes: filtered} + {filtered, state} end - defp filter_transaction(change) do - change - end + defp process_permissions({changes, state}) do + %{permissions: consumer_state, loader: loader} = state - defp process_migrations(transactions, %{loader: loader} = state) do - {:ok, %{version: schema_version}} = SchemaLoader.load(loader) + {:ok, changes, consumer_state, loader} = + Permissions.State.update(changes, consumer_state, loader) - {state, num_applied_migrations} = - transactions - |> transactions_to_migrations(state) - |> skip_applied_migrations(schema_version) - |> Enum.reduce({state, 0}, fn migration, {state, num_applied} -> - {perform_migration(migration, state), num_applied + 1} + {changes, %{state | permissions: consumer_state, loader: loader}} + end + + defp process_migrations({changes, %{loader: loader} = state}) do + {:ok, %{version: current_schema_version}} = SchemaLoader.load(loader) + + {state, migration_versions} = + changes + |> transaction_changes_to_migrations(state) + |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) + |> skip_applied_migrations(current_schema_version) + |> Enum.reduce({state, []}, fn migration, {state, versions} -> + {state, schema_version} = perform_migration(migration, state) + {state, [schema_version | versions]} end) - if num_applied_migrations > 0 do - refresh_subscription(state) - else - state - end - end + case migration_versions do + [] -> + {changes, state} + + [schema_version | _] -> + state = + state + |> refresh_permissions_consumer(schema_version) + |> refresh_subscription() - defp transactions_to_migrations(transactions, state) do - transactions - |> Enum.flat_map(&transaction_changes_to_migrations(&1, state)) - |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) + {changes, state} + end end - defp transaction_changes_to_migrations(%Transaction{changes: changes}, state) do + defp transaction_changes_to_migrations(changes, state) do for %NewRecord{record: record, relation: relation} <- changes, is_ddl_relation(relation) do {:ok, version} = SchemaLoader.tx_version(state.loader, record) {:ok, sql} = Extension.extract_ddl_sql(record) @@ -168,7 +193,7 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do %{migration_version: version} ) - %{state | loader: loader} + {%{state | loader: loader}, schema_version} end # update the subscription to add any new @@ -183,6 +208,11 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do state end + defp refresh_permissions_consumer(state, schema_version) do + consumer_state = Permissions.State.update_schema(state.permissions, schema_version) + %{state | permissions: consumer_state} + end + @impl GenStage def handle_cancel({:down, _}, _from, %{producer: producer} = state) do Logger.warning("producer is down: #{inspect(producer)}") diff --git a/components/electric/lib/electric/satellite/permissions.ex b/components/electric/lib/electric/satellite/permissions.ex index 2acdab3cfa..6d8071ba15 100644 --- a/components/electric/lib/electric/satellite/permissions.ex +++ b/components/electric/lib/electric/satellite/permissions.ex @@ -154,8 +154,20 @@ defmodule Electric.Satellite.Permissions do """ use Electric.Satellite.Protobuf + alias Electric.Postgres.Extension.SchemaLoader alias Electric.Replication.Changes - alias Electric.Satellite.Permissions.{Grant, Read, Role, Graph, Transient, WriteBuffer, Trigger} + + alias Electric.Satellite.Permissions.{ + Eval, + Grant, + Graph, + Read, + Role, + Transient, + Trigger, + WriteBuffer + } + alias Electric.Satellite.{Auth, SatPerms} require Logger @@ -196,7 +208,6 @@ defmodule Electric.Satellite.Permissions do end defstruct [ - :source, :roles, :scoped_roles, :auth, @@ -204,6 +215,8 @@ defmodule Electric.Satellite.Permissions do :write_buffer, :triggers, :intermediate_roles, + :grants, + source: %{rules: %{grants: [], assigns: []}, roles: [], schema: nil}, transient_lut: Transient ] @@ -235,17 +248,21 @@ defmodule Electric.Satellite.Permissions do @type t() :: %__MODULE__{ roles: role_lookup(), + grants: [Grant.t()], source: %{ - grants: [%SatPerms.Grant{}], + rules: %{ + grants: [%SatPerms.Grant{}], + assigns: [%SatPerms.Assign{}] + }, roles: [%SatPerms.Role{}], - assigns: [%SatPerms.Assign{}] + schema: SchemaLoader.Version.t() }, auth: Auth.t(), transient_lut: Transient.lut(), write_buffer: WriteBuffer.t(), scopes: [relation()], scoped_roles: %{relation => [Role.t()]}, - triggers: %{relation() => [Trigger.assign_trigger_fun()]} + triggers: Trigger.triggers() } @doc """ @@ -278,19 +295,59 @@ defmodule Electric.Satellite.Permissions do - `roles` should be a list of `%SatPerms.Role{}` protobuf structs """ - @spec update(empty() | t(), %SatPerms.Rules{}, [%SatPerms.Role{}]) :: t() - def update(%__MODULE__{} = perms, rules, roles) do - %{grants: grants, assigns: assigns} = rules + @spec update(empty() | t(), SchemaLoader.Version.t(), %SatPerms.Rules{}, [%SatPerms.Role{}]) :: + t() + def update(%__MODULE__{} = perms, schema_version, rules, roles) do + update(perms, schema: schema_version, rules: rules, roles: roles) + end + + def update(%__MODULE__{} = perms, attrs) when is_list(attrs) do + perms + |> update_schema(Keyword.get(attrs, :schema)) + |> update_rules(Keyword.get(attrs, :rules)) + |> update_roles(Keyword.get(attrs, :roles)) + |> rebuild() + end + + defp update_schema(perms, nil) do + perms + end + + defp update_schema(perms, %SchemaLoader.Version{} = schema_version) do + %{perms | source: %{perms.source | schema: schema_version}} + end + + defp update_roles(perms, nil) do + perms + end - assigned_roles = build_roles(roles, perms.auth) + defp update_roles(perms, roles) when is_list(roles) do + %{perms | source: %{perms.source | roles: roles}} + end + + defp update_rules(perms, nil) do + perms + end + + defp update_rules(perms, %{grants: _, assigns: _} = rules) do + %{perms | source: %{perms.source | rules: Map.take(rules, [:grants, :assigns])}} + end + + defp rebuild(perms) do + %{roles: roles, rules: rules, schema: schema_version} = perms.source + + assigned_roles = build_roles(roles, perms.auth, rules.assigns) scoped_roles = compile_scopes(assigned_roles) - triggers = build_triggers(assigns) + evaluator = Eval.new(schema_version, perms.auth) + grants = Enum.map(rules.grants, &Grant.new(&1, evaluator)) + + triggers = Trigger.assign_triggers(rules.assigns, schema_version, &trigger_callback/3) %{ perms - | source: %{grants: grants, assigns: assigns, roles: roles}, - roles: build_role_grants(assigned_roles, grants), + | roles: build_role_grants(assigned_roles, grants), scoped_roles: scoped_roles, + grants: grants, scopes: Map.keys(scoped_roles), triggers: triggers } @@ -309,18 +366,11 @@ defmodule Electric.Satellite.Permissions do roles |> Stream.map(&{&1, Role.matching_grants(&1, grants)}) |> Stream.reject(fn {_role, grants} -> Enum.empty?(grants) end) - |> Stream.map(&build_grants/1) |> Stream.flat_map(&invert_role_lookup/1) |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) |> Map.new(&classify_roles/1) end - defp build_triggers(assigns) do - assigns - |> Stream.flat_map(&Trigger.for_assign/1) - |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) - end - # For every `{table, privilege}` tuple we have a set of roles that the current user has. # If any of those roles are global, then it's equvilent to saying that the user can perform # `privilege` on `table` no matter what the scope. This function analyses the roles for a @@ -336,18 +386,11 @@ defmodule Electric.Satellite.Permissions do # expand the grants into a list of `{{relation, privilege}, %RoleGrant{}}` # so that we can create a LUT of table and required privilege to role defp invert_role_lookup({role, grants}) do - Stream.flat_map(grants, fn grant -> - Enum.map( - grant.privileges, - &{{grant.table, &1}, %RoleGrant{grant: grant, role: role}} - ) + Stream.map(grants, fn grant -> + {{grant.table, grant.privilege}, %RoleGrant{grant: grant, role: role}} end) end - defp build_grants({role, grants}) do - {role, Enum.map(grants, &Grant.new/1)} - end - defp compile_scopes(roles) do roles |> Stream.filter(&Role.has_scope?/1) @@ -356,8 +399,15 @@ defmodule Electric.Satellite.Permissions do |> Map.new() end - defp build_roles(roles, auth) do + defp build_roles(roles, auth, assigns) do + # after a global (rules) permission change, we copy across all users' permissions without + # modification. if an assign is removed this may leave users with serialised roles with no + # corresponding assign. so we should filter a user's roles based on the set of existing + # assigns + assign_ids = MapSet.new(assigns, & &1.id) + roles + |> Stream.filter(&MapSet.member?(assign_ids, &1.assign_id)) |> Enum.map(&Role.new/1) |> add_authenticated(auth) |> add_anyone() @@ -456,15 +506,10 @@ defmodule Electric.Satellite.Permissions do "role #{inspect(role)} grant #{inspect(grant)} gives permission for #{inspect(change)}" ) - write_buffer = Graph.apply_change(write_buffer, perms.scopes, change) - write_buffer = - perms.triggers - |> Map.get(change.relation, []) - |> Enum.flat_map(fn trigger_fun -> - trigger_fun.(change, write_buffer, perms.auth) - end) - |> update_transient_roles(perms, write_buffer) + write_buffer + |> Graph.apply_change(perms.scopes, change) + |> apply_triggers(change, perms) {:cont, {:ok, write_buffer}} end @@ -472,6 +517,45 @@ defmodule Electric.Satellite.Permissions do ) end + defp apply_triggers(write_buffer, change, perms) do + %{auth: %{user_id: user_id}} = perms + + {[^change | effects], _user_id} = + Trigger.apply(change, perms.triggers, user_id) + + update_transient_roles(effects, perms, write_buffer) + end + + defp trigger_callback(event, _change, user_id) do + case event do + {e, %{user_id: ^user_id} = role} when e in [:insert, :delete] -> + {[{e, Role.new(role)}], user_id} + + # update nothing to do with us + {e, _role} when e in [:insert, :delete] -> + {[], user_id} + + # update keeps role belonging to our user + {:update, %{user_id: ^user_id}, %{user_id: ^user_id} = new} -> + {[{:update, Role.new(new)}], user_id} + + # update has moved role to new user + {:update, %{user_id: ^user_id} = old, _new} -> + {[{:delete, Role.new(old)}], user_id} + + # update has moved role us + {:update, _old, %{user_id: ^user_id} = new} -> + {[{:insert, Role.new(new)}], user_id} + + # update nothing to do with us + {:update, _old, _new} -> + {[], user_id} + + :passthrough -> + {[], user_id} + end + end + @spec verify_write(change(), t(), Graph.impl(), lsn()) :: RoleGrant.t() | {:error, String.t()} defp verify_write(change, perms, graph, lsn) do action = required_permission(change) @@ -569,9 +653,8 @@ defmodule Electric.Satellite.Permissions do true end - defp change_passes_check?(_grant, _change) do - # TODO: test change against check function - true + defp change_passes_check?(grant, change) do + Eval.execute!(grant.check, change) end defp change_in_scope?(graph, scope_relation, scope_id, change) do @@ -600,13 +683,11 @@ defmodule Electric.Satellite.Permissions do end {:error, - "user does not have permission to " <> + "permissions: user does not have permission to " <> action <> Electric.Utils.inspect_relation(relation)} end def update_transient_roles(role_changes, %__MODULE__{} = perms, write_buffer) do - %{source: %{grants: grants}} = perms - - WriteBuffer.update_transient_roles(write_buffer, role_changes, grants) + WriteBuffer.update_transient_roles(write_buffer, role_changes, perms.grants) end end diff --git a/components/electric/lib/electric/satellite/permissions/eval.ex b/components/electric/lib/electric/satellite/permissions/eval.ex new file mode 100644 index 0000000000..aac3222d0d --- /dev/null +++ b/components/electric/lib/electric/satellite/permissions/eval.ex @@ -0,0 +1,281 @@ +defmodule Electric.Satellite.Permissions.Eval do + @moduledoc """ + A wrapper around the functions in `Electric.Replication.Eval` to give DDLX `GRANT .. WHERE` and + `ASSIGN ... IF` clauses more flexibility and to precompile expressions for evaluation against a + given table. + + Specifically allows for generic tests that will work for inserts, updates and deletes by + intelligently re-writing references to `THIS.column`, `ROW.column` and `column` to `NEW.column`, + `OLD.column` or, for updates, `(NEW.column) AND (OLD.column)`. + """ + alias Electric.Satellite.Auth + + alias Electric.Replication.Changes + alias Electric.Replication.Eval + alias Electric.Replication.Eval.Env + alias Electric.Replication.Eval.Parser + alias Electric.Replication.Eval.Parser.{Func, Ref} + alias Electric.Replication.Eval.Runner + + alias Electric.Postgres.Extension.SchemaLoader + require Record + + # allow for where clauses to refer to the current row as `ROW` or `THIS` + @this ["this", "row"] + @valid_ops [:insert, :delete, :update] + @prefixes ~w(this row new old) + @base_auth_refs %{ + ["auth", "user_id"] => :text + } + + defstruct tables: %{}, context: %{types: %{}, values: %{}} + + @type context() :: %{ + types: %{[String.t(), ...] => Env.pg_type()}, + values: %{[String.t(), ...] => term()} + } + @type t() :: %__MODULE__{ + context: context(), + tables: %{ + Electric.Postgres.relation() => %{[Electric.Postgres.name()] => Env.pg_type()} + } + } + + defmodule ExpressionContext do + defstruct [:query, :context, :relation, :columns, :expr] + + @type t() :: %__MODULE__{ + context: Runner.val_map(), + relation: Electric.Postgres.relation(), + columns: %{Electric.Postgres.name() => Env.pg_type()}, + expr: %{ + insert: Eval.Expr.t(), + delete: Eval.Expr.t(), + update: Eval.Expr.t() + } + } + end + + def new(%SchemaLoader.Version{} = schema_version) do + new_evaluator(%__MODULE__{}, schema_version) + end + + def new(%SchemaLoader.Version{} = schema_version, %Auth{} = auth) do + evaluator = struct(__MODULE__, context: auth_context(auth)) + new_evaluator(evaluator, schema_version) + end + + defp new_evaluator(%__MODULE__{} = evaluator, %SchemaLoader.Version{tables: tables}) do + Enum.reduce(tables, evaluator, fn {relation, table_schema}, eval -> + columns = + Map.new(table_schema.columns, fn column -> + {[column.name], String.to_atom(column.type.name)} + end) + + Map.update!(eval, :tables, &Map.put(&1, relation, columns)) + end) + end + + @doc """ + Permissions where clauses are always defined for a specific table. + + This pre-compiles the given expression for the given table using the table column type + information. + + The `query` must return a `:bool` value. + + Because of the expansion of `ROW` (and `THIS`) expressions depending on the operation (`UPDATE`, + `DELETE` etc) this compilation is done per operation and the resulting expressions stored in a + lookup table. + """ + def expression_context(_evaluator, nil, _table) do + {:ok, nil} + end + + def expression_context(evaluator, query, {_, _} = table) do + with {:ok, refs} <- refs(evaluator, table), + {:ok, expr} <- parse_and_validate_expression(query, refs), + expr_cxt = new_expression_context(evaluator, query, table) do + {:ok, struct(expr_cxt, expr: expand_row_aliases(expr))} + end + end + + defp parse_and_validate_expression(query, refs) do + case Parser.parse_and_validate_expression(query, refs, env()) do + {:ok, %{returns: :bool} = expr} -> + {:ok, expr} + + {:ok, %{returns: returns} = _expr} -> + {:error, + "where clause must return a boolean value: got #{inspect(query)} -> #{to_string(returns)}"} + + error -> + error + end + end + + def env do + # allow for implicitly casting from a uuid to a text. + # this is useful for permissions tests as our e.g. auth.user_id is a generic text value + # (because we don't know what the developer will use as their ids) + # and if the db user id fields are uuids, we end up comparing uuid fields with text values. + # adding this cast removes the need to add an explicit cast of the uuid column to text everywhere + # so we can do `users.id = auth.user_id` rather than `users.id::text = auth.user_id` + # Since the cast of a uuid to a text is a "noop", this feels pretty safe. + Env.new(implicit_casts: %{{:uuid, :text} => :as_is}) + end + + def execute!(%ExpressionContext{} = expr_cxt, change) do + {:ok, result} = execute(expr_cxt, change) + result + end + + def execute( + %ExpressionContext{relation: rel} = expr_cxt, + %Changes.UpdatedRecord{relation: rel} = change + ) do + values = + Map.new( + Enum.concat( + Enum.map(change.record, fn {k, v} -> {["new", k], v} end), + Enum.map(change.old_record, fn {k, v} -> {["old", k], v} end) + ) + ) + + execute_expr(expr_cxt, :update, values) + end + + def execute( + %ExpressionContext{relation: rel} = expr_cxt, + %Changes.NewRecord{relation: rel} = change + ) do + values = + Map.new(change.record, fn {k, v} -> {["new", k], v} end) + + execute_expr(expr_cxt, :insert, values) + end + + def execute( + %ExpressionContext{relation: rel} = expr_cxt, + %Changes.DeletedRecord{relation: rel} = change + ) do + values = + Map.new(change.old_record, fn {k, v} -> {["old", k], v} end) + + execute_expr(expr_cxt, :delete, values) + end + + # allows for testing a record (either old or new) against an expression + def evaluate!(%ExpressionContext{} = expr_cxt, record) when is_map(record) do + values = Map.new(record, fn {k, v} -> {["new", k], v} end) + + {:ok, result} = execute_expr(expr_cxt, :insert, values) + result + end + + defp execute_expr(expr_cxt, op, values) do + expr = Map.fetch!(expr_cxt.expr, op) + values = Map.merge(values, expr_cxt.context) + Runner.execute(expr, values) + end + + defp new_expression_context(%__MODULE__{context: context, tables: tables}, query, table) do + struct(ExpressionContext, + query: query, + context: context.values, + relation: table, + columns: Map.fetch!(tables, table) + ) + end + + defp refs(%__MODULE__{} = evaluator, table) do + with {:ok, table_refs} <- table_refs(evaluator, table) do + {:ok, Map.merge(table_refs, evaluator.context.types)} + end + end + + defp auth_context(%Auth{} = auth) do + # TODO: add types of any claims in the auth struct + %{types: @base_auth_refs, values: %{["auth", "user_id"] => auth.user_id}} + end + + defp table_refs(%__MODULE__{tables: tables}, table) do + with {:ok, table_columns} <- Map.fetch(tables, table) do + refs = + Enum.reduce(@prefixes, table_columns, fn prefix, env -> + Enum.reduce(table_columns, env, fn {column, type}, env -> + Map.put(env, [prefix | column], type) + end) + end) + + {:ok, refs} + end + end + + def expand_row_aliases(expr) do + Map.new(@valid_ops, fn op -> + {op, expand_row_aliases(expr, op)} + end) + end + + def expand_row_aliases(%Eval.Expr{eval: ast, returns: :bool} = expr, action) do + %{expr | eval: expand_expr(ast, alias_expansion(action))} + end + + defp expand_expr(expr, mapping) do + if uses_alias?(expr) do + expand_references(expr, mapping) + else + expr + end + end + + defp expand_references(expr, [n]) do + replace_alias(expr, n) + end + + defp expand_references(expr, [n1, n2]) do + %Func{ + args: [replace_alias(expr, n1), replace_alias(expr, n2)], + type: :bool, + name: "and", + location: expr.location, + implementation: &Kernel.and/2 + } + end + + defp replace_alias(args, pre) when is_list(args) do + Enum.map(args, &replace_alias(&1, pre)) + end + + defp replace_alias(%Ref{path: [column_name]} = ref, base) do + %{ref | path: [base, column_name]} + end + + defp replace_alias(%Ref{path: [this | rest]} = ref, base) when this in @this do + %{ref | path: [base | rest]} + end + + defp replace_alias(%Ref{} = ref, _pre) do + ref + end + + defp replace_alias(%Func{} = func, pre) do + %{func | args: replace_alias(func.args, pre)} + end + + defp replace_alias(expr, _pre) do + expr + end + + defp uses_alias?(%Func{args: args}), do: Enum.any?(args, &uses_alias?/1) + + # a single element ref means is referring to a column in the current row, eg. `username = 'something'` + defp uses_alias?(%Ref{path: [_column_name]}), do: true + defp uses_alias?(%Ref{path: [this | _rest]}), do: this in @this + defp uses_alias?(_), do: false + + defp alias_expansion(:update), do: ["new", "old"] + defp alias_expansion(:delete), do: ["old"] + defp alias_expansion(:insert), do: ["new"] +end diff --git a/components/electric/lib/electric/satellite/permissions/grant.ex b/components/electric/lib/electric/satellite/permissions/grant.ex index 139203aadd..46844fc2a4 100644 --- a/components/electric/lib/electric/satellite/permissions/grant.ex +++ b/components/electric/lib/electric/satellite/permissions/grant.ex @@ -3,32 +3,35 @@ defmodule Electric.Satellite.Permissions.Grant do A "compiled" version of a grant statement """ + alias Electric.Satellite.Permissions.Eval alias Electric.Satellite.SatPerms - defstruct [:table, :role, :privileges, :columns, :scope, :check, :source, path: []] + defstruct [:table, :role, :privilege, :columns, :scope, :check, :source, path: []] @type relation() :: Electric.Postgres.relation() @type t() :: %__MODULE__{ table: relation(), role: String.t() | :AUTHENTICATED | :ANYONE, - privileges: [Electric.Satellite.Permissions.privilege()], + privilege: Electric.Satellite.Permissions.privilege(), columns: :all | MapSet.t(), scope: relation(), - check: String.t(), + check: nil | Eval.ExpressionContext.t(), path: [String.t()], source: %SatPerms.Grant{} } - @spec new(%SatPerms.Grant{}) :: t() - def new(%SatPerms.Grant{} = grant) do + @spec new(%SatPerms.Grant{}, Eval.t()) :: t() + def new(%SatPerms.Grant{} = grant, evaluator) do + table = make_relation(grant.table) + %__MODULE__{ - table: make_relation(grant.table), + table: table, role: make_role(grant.role), - privileges: grant.privileges, + privilege: grant.privilege, columns: make_columns(grant.columns), scope: make_relation(grant.scope), - check: make_check(grant.check), + check: make_check(grant, table, evaluator), path: make_path(grant.path), source: grant } @@ -41,15 +44,18 @@ defmodule Electric.Satellite.Permissions.Grant do # no columns specified so defaults to all defp make_columns(nil), do: :all - defp make_columns(["*"]), do: :all - defp make_columns(columns), do: MapSet.new(columns) + defp make_columns(%SatPerms.ColumnList{names: columns}), do: MapSet.new(columns) defp make_path(empty) when empty in [nil, []], do: nil defp make_path(path), do: path - defp make_check(check) do - # TODO: compile to an actual function - check + defp make_check(%{check: nil}, _table, _evaluator) do + nil + end + + defp make_check(%{check: query}, table, evaluator) when is_binary(query) do + {:ok, expr} = Eval.expression_context(evaluator, query, table) + expr end def columns_valid?(%__MODULE__{columns: :all}, _columns), do: true diff --git a/components/electric/lib/electric/satellite/permissions/graph.ex b/components/electric/lib/electric/satellite/permissions/graph.ex index da56ec5d80..305f55ce4e 100644 --- a/components/electric/lib/electric/satellite/permissions/graph.ex +++ b/components/electric/lib/electric/satellite/permissions/graph.ex @@ -78,15 +78,6 @@ defmodule Electric.Satellite.Permissions.Graph do """ @callback parent(impl(), scope_root(), relation(), record()) :: {relation(), id()} | nil - @doc """ - Return the path through the tables' foreign keys that gets from the given relation to the root. - - If `relation` is the same as `root` then should return `[root]`. - - If there is no path from `relation` to `root`, returns `nil`. - """ - @callback relation_path(impl(), scope_root(), relation()) :: [relation(), ...] | nil - @behaviour __MODULE__ defguardp is_relation(r) when is_tuple(r) and tuple_size(r) == 2 @@ -250,13 +241,4 @@ defmodule Electric.Satellite.Permissions.Graph do def primary_key({module, state}, relation, record) do module.primary_key(state, relation, record) end - - @impl __MODULE__ - def relation_path(_impl, root, root) do - [root] - end - - def relation_path({module, state}, root, relation) do - module.relation_path(state, root, relation) - end end diff --git a/components/electric/lib/electric/satellite/permissions/role.ex b/components/electric/lib/electric/satellite/permissions/role.ex index bbba854d90..b7a7bc40c7 100644 --- a/components/electric/lib/electric/satellite/permissions/role.ex +++ b/components/electric/lib/electric/satellite/permissions/role.ex @@ -1,6 +1,7 @@ defmodule Electric.Satellite.Permissions.Role do alias Electric.Satellite.SatPerms alias Electric.Satellite.Permissions + alias Electric.Satellite.Permissions.Grant defmodule Anyone do defstruct [] @@ -33,7 +34,7 @@ defmodule Electric.Satellite.Permissions.Role do @spec new(%SatPerms.Role{} | predefined()) :: t() def new(%SatPerms.Role{} = role) do %__MODULE__{ - id: role.id, + id: role.row_id, role: role.role, user_id: role.user_id, assign_id: role.assign_id, @@ -52,15 +53,15 @@ defmodule Electric.Satellite.Permissions.Role do def matching_grants(%Anyone{}, grants) do Enum.filter(grants, fn - %{role: %{role: {:predefined, :ANYONE}}} -> true - _ -> false + %Grant{role: :ANYONE} -> true + %Grant{role: _} -> false end) end def matching_grants(%Authenticated{}, grants) do Enum.filter(grants, fn - %{role: %{role: {:predefined, :AUTHENTICATED}}} -> true - _ -> false + %Grant{role: :AUTHENTICATED} -> true + %Grant{role: _} -> false end) end @@ -69,7 +70,7 @@ defmodule Electric.Satellite.Permissions.Role do %{role: role_name} = role grants - |> Stream.filter(&reject_predefined/1) + |> Stream.reject(&predefined/1) |> Stream.filter(&is_nil(&1.scope)) |> Enum.filter(&matching_role(&1, role_name)) end @@ -79,21 +80,18 @@ defmodule Electric.Satellite.Permissions.Role do %{role: role_name, scope: {role_scope, _id}} = role grants - |> Stream.filter(&reject_predefined/1) + |> Stream.reject(&predefined/1) |> Stream.filter(&matching_scope(&1, role_scope)) |> Enum.filter(&matching_role(&1, role_name)) end - defp reject_predefined(%{role: %{role: {:predefined, _}}}), do: false - defp reject_predefined(_grant), do: true + defp predefined(%Grant{role: role}), do: role in [:ANYONE, :AUTHENTICATED] - defp matching_role(%{role: %{role: {:application, role}}}, role), do: true - defp matching_role(_grant, _role), do: false + defp matching_role(%Grant{role: role}, role), do: true + defp matching_role(%Grant{}, _role), do: false - defp matching_scope(%{scope: %SatPerms.Table{schema: schema, name: name}}, {schema, name}), - do: true - - defp matching_scope(_, _), do: false + defp matching_scope(%Grant{scope: {schema, name}}, {schema, name}), do: true + defp matching_scope(%Grant{}, _), do: false defp make_scope(nil), do: nil defp make_scope(%SatPerms.Scope{table: %{schema: s, name: n}, id: id}), do: {{s, n}, id} diff --git a/components/electric/lib/electric/satellite/permissions/state.ex b/components/electric/lib/electric/satellite/permissions/state.ex new file mode 100644 index 0000000000..5913af0693 --- /dev/null +++ b/components/electric/lib/electric/satellite/permissions/state.ex @@ -0,0 +1,365 @@ +defmodule Electric.Satellite.Permissions.State do + @moduledoc """ + Accepts changes from the replication stream and transforms them into permissions state changes, + both global and per-user. + """ + + alias Electric.DDLX.Command + alias Electric.Satellite.SatPerms + alias Electric.Postgres.Extension.SchemaLoader + alias Electric.Replication.Changes + alias Electric.Postgres.Extension + alias Electric.Satellite.Permissions.Trigger + + @electric_ddlx Extension.ddlx_relation() + + @enforce_keys [:rules, :schema] + + defstruct [:rules, :schema, triggers: %{}] + + @type name() :: Electric.Postgres.name() + @type trigger_fun() :: + (Changes.change(), SchemaLoader.t() -> {[Changes.change()], SchemaLoader.t()}) + @typep update_fun() :: (%SatPerms{}, %SatPerms.Role{} -> {:ok, %SatPerms.Roles{}, boolean()}) + + @type t() :: %__MODULE__{ + rules: %SatPerms.Rules{}, + schema: SchemaLoader.Version.t(), + triggers: %{Electric.Postgres.relation() => trigger_fun()} + } + + @doc """ + Creates a new permissions consumer state, based on the current global rules and the current + schema version. + """ + @spec new(SchemaLoader.t()) :: {:ok, t()} | {:error, binary()} + def new(loader) do + with {:ok, schema_version} <- SchemaLoader.load(loader), + {:ok, rules} <- SchemaLoader.global_permissions(loader) do + {:ok, create_triggers(%__MODULE__{rules: rules, schema: schema_version})} + end + end + + @doc """ + Refreshes the current state after a migration updates the schema. + """ + @spec update_schema(t(), SchemaLoader.Version.t()) :: t() + def update_schema(state, %SchemaLoader.Version{} = schema_version) do + create_triggers(%{state | schema: schema_version}) + end + + @doc """ + Accept a transaction, or set of changes from a transaction and transform them into global- or + user-permissions changes. + """ + @spec update(Changes.Transaction.t(), t(), SchemaLoader.t()) :: + {:ok, Changes.Transaction.t(), t(), SchemaLoader.t()} + def update(%Changes.Transaction{changes: changes} = tx, state, loader) do + {:ok, changes, state, loader} = update(changes, state, loader) + + {:ok, %{tx | changes: changes}, state, loader} + end + + @spec update([Changes.change()], t(), SchemaLoader.t()) :: + {:ok, [Changes.change()], t(), SchemaLoader.t()} + def update(changes, state, loader) when is_list(changes) do + # group changes by relation -- this is really only to avoid churn on the global permissions + # rules which is an expensive operation. by grouping on the relation we can transform a series + # of ddlx permission commands into a single update to the global permissions struct + {changes, {state, loader}} = + changes + |> Enum.chunk_by(& &1.relation) + |> Enum.flat_map_reduce({state, loader}, &apply_changes/2) + + {:ok, changes, state, loader} + end + + # useful function for testing creation of global state + @doc false + def update_global(%SatPerms.DDLX{} = ddlx, loader) do + with {:ok, rules} <- SchemaLoader.global_permissions(loader) do + case mutate_global(ddlx, rules) do + {rules, 0} -> + {:ok, 0, loader, rules} + + {rules, n} -> + with {:ok, loader} <- SchemaLoader.save_global_permissions(loader, rules) do + {:ok, n, loader, rules} + end + end + end + end + + defp apply_changes([%{relation: @electric_ddlx} | _] = changes, {state, loader}) do + {:ok, rules} = SchemaLoader.global_permissions(loader) + + case Enum.reduce(changes, {rules, 0}, &apply_global_change/2) do + {_rules, 0} -> + {[], {state, loader}} + + {rules, _count} -> + {:ok, loader} = SchemaLoader.save_global_permissions(loader, rules) + + { + [updated_global_permissions(rules)], + {create_triggers(%{state | rules: rules}), loader} + } + end + end + + defp apply_changes(changes, {state, loader}) do + {changes, {_triggers, loader}} = + Enum.flat_map_reduce(changes, {state.triggers, loader}, &apply_triggers/2) + + {changes, {state, loader}} + end + + # the ddlx table is insert-only + defp apply_global_change(%Changes.NewRecord{} = change, {rules, count}) do + %{record: %{"ddlx" => ddlx_bytes}} = change + + pb_bytes = + case ddlx_bytes do + "\\x" <> rest -> Base.decode16!(rest, case: :lower) + bytes -> bytes + end + + {:ok, ddlx} = + Protox.decode(pb_bytes, SatPerms.DDLX) + + mutate_global(ddlx, rules, count) + end + + defp apply_triggers(change, {triggers, loader}) do + {changes, loader} = + Trigger.apply(change, triggers, loader) + + {changes, {triggers, loader}} + end + + defp update_roles_callback(:passthrough, _change, loader) do + {[], loader} + end + + defp update_roles_callback({:insert, role}, _change, loader) do + {:ok, loader, update_message} = mutate_user_perms(role, loader, &insert_role/2) + + {update_message, loader} + end + + defp update_roles_callback({:update, old_role, new_role}, _change, loader) do + if old_role.user_id == new_role.user_id do + {:ok, loader, update_message} = mutate_user_perms(new_role, loader, &update_role/2) + + {update_message, loader} + else + {:ok, loader, old_update_message} = mutate_user_perms(old_role, loader, &delete_role/2) + {:ok, loader, new_update_message} = mutate_user_perms(new_role, loader, &insert_role/2) + + { + Enum.concat( + old_update_message, + new_update_message + ), + loader + } + end + end + + defp update_roles_callback({:delete, role}, _change, loader) do + {:ok, loader, update_message} = mutate_user_perms(role, loader, &delete_role/2) + + {update_message, loader} + end + + @spec mutate_user_perms(%SatPerms.Role{}, SchemaLoader.t(), update_fun()) :: + {:ok, SchemaLoader.t(), [Changes.UpdatedPermissions.t()]} + defp mutate_user_perms(role, loader, update_fun) do + with {:ok, loader, perms} <- SchemaLoader.user_permissions(loader, role.user_id), + {:ok, roles, modified?} <- update_fun.(perms, role), + {roles, modified?} = gc_roles(perms, roles, modified?) do + if modified? do + with {:ok, loader, perms} <- + SchemaLoader.save_user_permissions(loader, role.user_id, roles) do + {:ok, loader, [updated_user_permissions(role.user_id, perms)]} + end + else + {:ok, loader, []} + end + end + end + + defp insert_role(perms, new_role) do + with roles <- load_roles(perms) do + {:ok, Map.update!(roles, :roles, &[new_role | &1]), true} + end + end + + defp update_role(perms, new_role) do + with user_roles <- load_roles(perms) do + {updated_roles, modified?} = + Enum.map_reduce(user_roles.roles, false, fn role, modified? -> + if role_match?(role, new_role), do: {new_role, true}, else: {role, modified?} + end) + + {:ok, %{user_roles | roles: updated_roles}, modified?} + end + end + + defp delete_role(perms, new_role) do + with user_roles <- load_roles(perms) do + {updated_roles, modified?} = + Enum.flat_map_reduce(user_roles.roles, false, fn role, modified? -> + if role_match?(role, new_role), do: {[], true}, else: {[role], modified?} + end) + + {:ok, %{user_roles | roles: updated_roles}, modified?} + end + end + + defp mutate_global(ddlx, rules, count \\ 0) + + defp mutate_global( + %SatPerms.DDLX{grants: [], revokes: [], assigns: [], unassigns: []}, + rules, + count + ) do + {rules, count} + end + + defp mutate_global(%SatPerms.DDLX{} = ddlx, rules, count) do + {apply_ddlx(rules, ddlx, count == 0), count + count_changes(ddlx)} + end + + defp role_match?(role1, role2) do + role1.assign_id == role2.assign_id && role1.row_id == role2.row_id + end + + defp load_roles(perms) do + %{id: id, roles: role_list, rules: %{id: rules_id}} = perms + + %SatPerms.Roles{ + parent_id: id, + rules_id: rules_id, + roles: role_list + } + end + + defp gc_roles(perms, roles, modified?) do + valid_assigns = MapSet.new(perms.rules.assigns, & &1.id) + + {updated_roles, modified?} = + Enum.flat_map_reduce(roles.roles, modified?, fn role, modified? -> + if MapSet.member?(valid_assigns, role.assign_id), + do: {[role], modified?}, + else: {[], true} + end) + + {%{roles | roles: updated_roles}, modified?} + end + + # the `%SatPerms.DDLX{}` struct contains multiple instances of say a `%SatPerms.Grant{}` but these + # multiple instances are the result of a single command (e.g. a `GRANT ALL...` will result in 4 + # separate entries in the `grants` list but represent a single statement). + # + # Thus the order they are applied in a migration is preserved by the ordering of the arrival of + # the DDLX structs through the replication stream. + # + # Since each struct's id is a fingerprint that acts as a primary key, we just need to operate on + # the existing rules keyed by this id. + # + # Public only for its usefulness in tests. + @doc false + @spec apply_ddlx(%SatPerms.Rules{}, %SatPerms.DDLX{}) :: %SatPerms.Rules{} + def apply_ddlx(rules, ddlx, is_first? \\ true) + + def apply_ddlx(%SatPerms.Rules{} = rules, %SatPerms.DDLX{} = ddlx, is_first?) do + rules + |> update_grants(ddlx.grants) + |> update_revokes(ddlx.revokes) + |> update_assigns(ddlx.assigns) + |> update_unassigns(ddlx.unassigns) + |> increment_id(is_first?) + end + + defp update_grants(rules, grants) do + add_rules(rules, :grants, grants) + end + + defp update_revokes(rules, revokes) do + remove_rules(rules, :grants, revokes) + end + + defp update_assigns(rules, assigns) do + add_rules(rules, :assigns, assigns) + end + + defp update_unassigns(rules, unassigns) do + remove_rules(rules, :assigns, unassigns) + end + + defp add_rules(rules, key, updates) do + update_rules(rules, key, updates, fn update, existing -> + Map.put(existing, update.id, update) + end) + end + + defp remove_rules(rules, key, updates) do + update_rules(rules, key, updates, fn update, existing -> + Map.delete(existing, update.id) + end) + end + + defp update_rules(rules, key, updates, update_fun) do + Map.update!(rules, key, fn existing -> + existing = Map.new(existing, &{&1.id, &1}) + + # be absolutely sure that every permission struct has an id set + updates + |> Stream.map(&Command.put_id/1) + |> Enum.reduce(existing, update_fun) + |> Map.values() + end) + end + + defp increment_id(%{id: id} = rules, true) do + %{rules | id: id + 1, parent_id: id} + end + + defp increment_id(rules, false) do + rules + end + + defp count_changes(ddlx) do + [:grants, :revokes, :assigns, :unassigns] + |> Enum.reduce(0, fn key, count -> + count + length(Map.fetch!(ddlx, key)) + end) + end + + defp updated_user_permissions(user_id, permissions) do + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: user_id, + permissions: permissions + } + } + end + + defp updated_global_permissions(permissions) do + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{ + permissions_id: permissions.id + } + } + end + + defp create_triggers(state) do + triggers = + Trigger.assign_triggers(state.rules.assigns, state.schema, &update_roles_callback/3) + + %{state | triggers: triggers} + end +end diff --git a/components/electric/lib/electric/satellite/permissions/trigger.ex b/components/electric/lib/electric/satellite/permissions/trigger.ex index e4c928d89c..f4d387ca7a 100644 --- a/components/electric/lib/electric/satellite/permissions/trigger.ex +++ b/components/electric/lib/electric/satellite/permissions/trigger.ex @@ -1,107 +1,237 @@ defmodule Electric.Satellite.Permissions.Trigger do + alias Electric.Postgres.Extension.SchemaLoader alias Electric.Replication.Changes - alias Electric.Satellite.{Auth, SatPerms} - alias Electric.Satellite.Permissions - alias Electric.Satellite.Permissions.Graph + alias Electric.Satellite.Permissions.Eval + alias Electric.Satellite.SatPerms + + @type role() :: %SatPerms.Role{} + @type role_event() :: + {:insert, new :: role()} + | {:update, old :: role(), new :: role()} + | {:delete, old :: role()} + @type callback_arg() :: term() + @type callback_result() :: {[Changes.change() | [term()]], callback_arg()} + @type callback_fun() :: (role_event(), Changes.change(), callback_arg() -> callback_result()) + @type trigger_fun() :: (Changes.change(), callback_arg() -> callback_result()) + @type triggers() :: %{Electric.Postgres.relation() => trigger_fun()} + + @doc """ + Create a callback map for the given list of assignments. + + The callback map is a map of relation => function. + + The function expects to be called with two arguments: + + 1. The change struct from the logical replication stream + 2. Some user defined argument that will be passed to the final callback function + (`trigger_callback_function()`) + + The `trigger_callback_function()` is called with 3 arguments: + + 1. The role change event which is a map of the original change in the data to the resulting + change in role + 2. The original pg change event + 3. The second argument to the original callback + + It should return a tuple `{effects :: [term()], callback_arg()}` which is list of effects, which + will be appended to the original change plus the modified callback argument it was given, or + `nil` which is the same as returning `{[], original_callback_arg}`. + """ + @spec assign_triggers([%SatPerms.Assign{}], SchemaLoader.Version.t(), callback_fun()) :: + triggers() + def assign_triggers(assigns, schema_version, trigger_callback_fun) + when is_function(trigger_callback_fun, 3) do + evaluator = Eval.new(schema_version) + + assigns + |> Enum.map(&for_assign(&1, schema_version, evaluator, trigger_callback_fun)) + |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) + end - @type assign_trigger_fun() :: - (Permissions.change(), Graph.impl(), Auth.t() -> [Permissions.Role.t()]) + def for_assign(assign, schema_version, trigger_callback_fun) do + for_assign(assign, schema_version, Eval.new(schema_version), trigger_callback_fun) + end - @spec for_assign(%SatPerms.Assign{}) :: [{Permissions.relation(), assign_trigger_fun()}] - def for_assign(assign) do + @doc false + @spec for_assign(%SatPerms.Assign{}, SchemaLoader.Version.t(), Eval.t(), callback_fun()) :: + {Electric.Postgres.relation(), trigger_fun()} + def for_assign(assign, schema_version, evaluator, trigger_callback_fun) + when is_function(trigger_callback_fun, 3) do %{table: %{schema: schema, name: name}} = assign - [ - {{schema, name}, &role_for_assign(&1, &2, &3, assign)} - ] + relation = {schema, name} + + {:ok, expression} = Eval.expression_context(evaluator, assign.if, relation) + + {:ok, fks} = + case assign do + %{scope: %{schema: scope_schema, name: scope_table}} -> + SchemaLoader.Version.foreign_keys( + schema_version, + relation, + {scope_schema, scope_table} + ) + + %{scope: nil} -> + {:ok, []} + end + + {:ok, pks} = SchemaLoader.Version.primary_keys(schema_version, relation) + + assign_data = + assign + |> Map.from_struct() + |> Map.put( + :watch_columns, + Enum.reject(fks ++ [assign.user_column, assign.role_column], &is_nil/1) + ) + |> Map.put(:where, expression) + + { + relation, + &change_trigger(&1, &2, assign_data, pks, fks, trigger_callback_fun) + } end - defp role_for_assign(%Changes.NewRecord{} = insert, graph, auth, assign) do - scopes = role_scopes(insert, assign, graph) + @doc """ + Apply the triggers to the given change. - for {role, id} <- build_roles(insert, graph, auth, assign, scopes) do - {:insert, {insert.relation, id}, role} - end + The `fallback` function is called when no trigger exists for the given relation. + + """ + @spec apply(Changes.change(), triggers(), callback_arg()) :: callback_result() + def apply(%{relation: relation} = change, triggers, callback_arg) do + {effects, callback_arg} = + triggers + |> Map.get(relation, [&null_trigger/2]) + |> Enum.flat_map_reduce(callback_arg, fn trigger_fun, arg -> + trigger_fun.(change, arg) || {[], arg} + end) + + {[change | effects], callback_arg} end - defp role_for_assign(%Changes.UpdatedRecord{} = update, graph, auth, assign) do - scopes = role_scopes(update, assign, graph) + # just ignore changes with no relation + def apply(_change, _triggers, callback_arg) do + {[], callback_arg} + end - for {role, id} <- build_roles(update, graph, auth, assign, scopes) do - {:update, {update.relation, id}, role} - end + defp null_trigger(_change, arg) do + {[], arg} end - defp role_for_assign(%Changes.DeletedRecord{} = delete, graph, auth, assign) do - # for deletes we need to know about the upstream graph because the local graph will already - # have the record as deleted, so we won't get scope information - upstream_graph = Electric.Satellite.Permissions.WriteBuffer.upstream_graph(graph) - scopes = role_scopes(delete, assign, upstream_graph) + defp change_trigger(%Changes.NewRecord{} = change, loader, assign, pks, fks, callback_fun) do + %{record: record} = change - id = Graph.primary_key(graph, delete.relation, delete.old_record) + # only assign the role if the where expression passes + if validate_where(assign, change) do + role = role(record, assign, pks, fks) - # include a force delete for any roles in the buffer plus a delete for - # any roles in the underlying shape data - [ - {:delete, {delete.relation, id}} - | for( - {role, id} <- build_roles(delete, graph, auth, assign, scopes), - do: {:delete, {delete.relation, id}, role} - ) - ] + callback_fun.({:insert, role}, change, loader) + else + callback_fun.(:passthrough, change, loader) + end end - defp build_roles(change, graph, auth, assign, scopes) do - record = - case change do - %Changes.DeletedRecord{old_record: record} -> record - %{record: record} -> record + defp change_trigger(%Changes.UpdatedRecord{} = change, loader, assign, pks, fks, callback_fun) do + %{old_record: old, record: new, changed_columns: changed_columns} = change + + if MapSet.size(changed_columns) > 0 do + # if role as been detatched, e.g. by a fk with delete action "SET NULL" or the role value has + # been nulled, then delete the role + role_nulled? = + assign.watch_columns + |> Enum.filter(&MapSet.member?(changed_columns, &1)) + |> Enum.map(&Map.fetch!(new, &1)) + |> Enum.any?(&is_nil/1) + + r = &role(&1, assign, pks, fks) + + if role_nulled? do + callback_fun.({:delete, r.(old)}, change, loader) + else + event = + case validate_where(assign, change) do + {true, true} -> + # - old: t, new: t -> update: row still has a matching role, but that role may have changed + {:update, r.(old), r.(new)} + + {true, false} -> + # - old: t, new: f -> delete: old row did create role before but now shouldn't + {:delete, r.(old)} + + {false, true} -> + # - old: f, new: t -> insert: old row didn't create a role, but should now + {:insert, r.(new)} + + {false, false} -> + # - old: f, new: f -> passthrough: no role existed before, none should be created + :passthrough + end + + callback_fun.(event, change, loader) end - - %{user_id: user_id} = auth - %{user_column: user_column} = assign - - with ^user_id <- Map.get(record, user_column, nil), - role_name = role_name(record, assign) do - id = Graph.primary_key(graph, change.relation, record) - - Enum.map(scopes, fn scope -> - {%Permissions.Role{ - id: id, - role: role_name, - user_id: user_id, - assign_id: assign.id, - scope: scope - }, id} - end) - else - _ -> [] end end - defp role_name(record, assign) do - case assign do - %{role_name: role_name, role_column: column} - when role_name in [nil, ""] and is_binary(column) -> - Map.fetch!(record, column) + defp change_trigger(%Changes.DeletedRecord{} = change, loader, assign, pks, fks, callback_fun) do + %{old_record: record} = change - %{role_name: name, role_column: role_column} - when role_column in [nil, ""] and is_binary(name) -> - name - end + role = role(record, assign, pks, fks) + + # send a delete even if say the row doesn't pass the where clause because + # we lose nothing by deleting something that isn't there. + # the callbacks should be able to handle a delete op on a non-existant role + callback_fun.({:delete, role}, change, loader) end - defp role_scopes(change, assign, graph) do - case assign do - %{scope: nil} -> - [nil] + defp validate_where(%{where: nil}, %Changes.UpdatedRecord{} = _change) do + {true, true} + end - %{scope: %{schema: schema, name: name}} -> - root = {schema, name} + defp validate_where( + %{where: %Eval.ExpressionContext{} = expr}, + %Changes.UpdatedRecord{} = change + ) do + %{old_record: old, record: new} = change + {Eval.evaluate!(expr, old), Eval.evaluate!(expr, new)} + end - graph - |> Graph.scope_id(root, change) - |> Enum.map(fn {id, _} -> {root, id} end) - end + defp validate_where(%{where: nil}, _change) do + true + end + + defp validate_where(%{where: %Eval.ExpressionContext{} = expr}, change) do + Eval.execute!(expr, change) + end + + defp role(record, assign, pks, fks) do + %SatPerms.Role{ + row_id: Enum.map(pks, &Map.fetch!(record, &1)), + role: role_name(record, assign), + user_id: Map.fetch!(record, assign.user_column), + assign_id: assign.id, + scope: role_scope(fks, record, assign) + } + end + + defp role_name(_record, %{role_column: nil, role_name: role_name}) when is_binary(role_name) do + role_name + end + + defp role_name(record, %{role_column: role_column}) when is_binary(role_column) do + Map.fetch!(record, role_column) + end + + defp role_scope(_fks, _record, %{scope: nil}) do + nil + end + + defp role_scope(fks, record, %{scope: %{schema: sname, name: tname}}) do + %SatPerms.Scope{table: role_table(sname, tname), id: Enum.map(fks, &Map.fetch!(record, &1))} + end + + defp role_table(schema, name) do + %SatPerms.Table{schema: schema, name: name} end end diff --git a/components/electric/lib/electric/satellite/permissions/write_buffer.ex b/components/electric/lib/electric/satellite/permissions/write_buffer.ex index a35abf11a4..180a6f9148 100644 --- a/components/electric/lib/electric/satellite/permissions/write_buffer.ex +++ b/components/electric/lib/electric/satellite/permissions/write_buffer.ex @@ -107,22 +107,26 @@ defmodule Electric.Satellite.Permissions.WriteBuffer do {__MODULE__, state(state, roles: roles, deleted_roles: deleted, role_grants: role_grants)} end - defp update_intermediate_role({:insert, {relation, id}, role}, {roles, deleted}) do + defp update_intermediate_role({:insert, role}, {roles, deleted}) do + key = role_key(role) + { - Map.put(roles, {relation, id}, role), - MapSet.delete(deleted, role_key(role)) + Map.put(roles, key, role), + MapSet.delete(deleted, key) } end - defp update_intermediate_role({:update, {relation, id}, role}, {roles, deleted}) do - {Map.put(roles, {relation, id}, role), deleted} + defp update_intermediate_role({:update, role}, {roles, deleted}) do + {Map.put(roles, role_key(role), role), deleted} end - defp update_intermediate_role({:delete, {relation, id}, role}, {roles, deleted}) do - case Map.pop(roles, {relation, id}) do + defp update_intermediate_role({:delete, role}, {roles, deleted}) do + key = role_key(role) + + case Map.pop(roles, key) do {nil, roles} -> # deleting a role that we haven't just written - {roles, MapSet.put(deleted, role_key(role))} + {roles, MapSet.put(deleted, key)} {%{}, roles} -> {roles, deleted} @@ -134,7 +138,7 @@ defmodule Electric.Satellite.Permissions.WriteBuffer do end defp role_key(role) do - {role.scope, role.assign_id, role.user_id, role.role} + {role.assign_id, role.id} end @moduledoc """ @@ -258,9 +262,4 @@ defmodule Electric.Satellite.Permissions.WriteBuffer do def modified_fks(state(upstream: upstream), root, update) do Permissions.Graph.modified_fks(upstream, root, update) end - - @impl Permissions.Graph - def relation_path(state(upstream: upstream), root, relation) do - Permissions.Graph.relation_path(upstream, root, relation) - end end diff --git a/components/electric/lib/electric/satellite/protobuf_messages.ex b/components/electric/lib/electric/satellite/protobuf_messages.ex index ddf7de4a3d..65ca4abcc7 100644 --- a/components/electric/lib/electric/satellite/protobuf_messages.ex +++ b/components/electric/lib/electric/satellite/protobuf_messages.ex @@ -2715,9 +2715,9 @@ end ) end, - defmodule Electric.Satellite.SatSubsDataError.ShapeReqError do + defmodule Electric.Satellite.SatPerms.DDLX do @moduledoc false - defstruct code: :CODE_UNSPECIFIED, message: "", request_id: "" + defstruct grants: [], revokes: [], assigns: [], unassigns: [], sqlite: [] ( ( @@ -2732,53 +2732,111 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_code(msg) |> encode_message(msg) |> encode_request_id(msg) + [] + |> encode_grants(msg) + |> encode_revokes(msg) + |> encode_assigns(msg) + |> encode_unassigns(msg) + |> encode_sqlite(msg) end ) [] [ - defp encode_code(acc, msg) do + defp encode_grants(acc, msg) do try do - if msg.code == :CODE_UNSPECIFIED do - acc - else - [ - acc, - "\b", - msg.code - |> Electric.Satellite.SatSubsDataError.ShapeReqError.Code.encode() - |> Protox.Encode.encode_enum() - ] + case msg.grants do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\n", Protox.Encode.encode_message(value)] + end) + ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:code, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:grants, "invalid field value"), __STACKTRACE__ end end, - defp encode_message(acc, msg) do + defp encode_revokes(acc, msg) do try do - if msg.message == "" do - acc - else - [acc, "\x12", Protox.Encode.encode_string(msg.message)] + case msg.revokes do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\x12", Protox.Encode.encode_message(value)] + end) + ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:message, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:revokes, "invalid field value"), __STACKTRACE__ end end, - defp encode_request_id(acc, msg) do + defp encode_assigns(acc, msg) do try do - if msg.request_id == "" do - acc - else - [acc, "\x1A", Protox.Encode.encode_string(msg.request_id)] + case msg.assigns do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\x1A", Protox.Encode.encode_message(value)] + end) + ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:request_id, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:assigns, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_unassigns(acc, msg) do + try do + case msg.unassigns do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\"", Protox.Encode.encode_message(value)] + end) + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:unassigns, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_sqlite(acc, msg) do + try do + case msg.sqlite do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "*", Protox.Encode.encode_message(value)] + end) + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:sqlite, "invalid field value"), __STACKTRACE__ end end ] @@ -2801,7 +2859,7 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatSubsDataError.ShapeReqError)) + parse_key_value(bytes, struct(Electric.Satellite.SatPerms.DDLX)) end ) ) @@ -2819,23 +2877,43 @@ raise %Protox.IllegalTagError{} {1, _, bytes} -> - {value, rest} = - Protox.Decode.parse_enum( - bytes, - Electric.Satellite.SatSubsDataError.ShapeReqError.Code - ) + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[code: value], rest} + {[grants: msg.grants ++ [Electric.Satellite.SatPerms.Grant.decode!(delimited)]], + rest} {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[message: Protox.Decode.validate_string(delimited)], rest} + + {[ + revokes: msg.revokes ++ [Electric.Satellite.SatPerms.Revoke.decode!(delimited)] + ], rest} {3, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[request_id: Protox.Decode.validate_string(delimited)], rest} + + {[ + assigns: msg.assigns ++ [Electric.Satellite.SatPerms.Assign.decode!(delimited)] + ], rest} + + {4, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + unassigns: + msg.unassigns ++ [Electric.Satellite.SatPerms.Unassign.decode!(delimited)] + ], rest} + + {5, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[sqlite: msg.sqlite ++ [Electric.Satellite.SatPerms.Sqlite.decode!(delimited)]], + rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -2866,7 +2944,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatSubsDataError.ShapeReqError, + Electric.Satellite.SatPerms.DDLX, &json_library_wrapper.decode!(json_library, &1) ) end @@ -2894,11 +2972,11 @@ } def defs() do %{ - 1 => - {:code, {:scalar, :CODE_UNSPECIFIED}, - {:enum, Electric.Satellite.SatSubsDataError.ShapeReqError.Code}}, - 2 => {:message, {:scalar, ""}, :string}, - 3 => {:request_id, {:scalar, ""}, :string} + 1 => {:grants, :unpacked, {:message, Electric.Satellite.SatPerms.Grant}}, + 2 => {:revokes, :unpacked, {:message, Electric.Satellite.SatPerms.Revoke}}, + 3 => {:assigns, :unpacked, {:message, Electric.Satellite.SatPerms.Assign}}, + 4 => {:unassigns, :unpacked, {:message, Electric.Satellite.SatPerms.Unassign}}, + 5 => {:sqlite, :unpacked, {:message, Electric.Satellite.SatPerms.Sqlite}} } end @@ -2908,11 +2986,11 @@ } def defs_by_name() do %{ - code: - {1, {:scalar, :CODE_UNSPECIFIED}, - {:enum, Electric.Satellite.SatSubsDataError.ShapeReqError.Code}}, - message: {2, {:scalar, ""}, :string}, - request_id: {3, {:scalar, ""}, :string} + assigns: {3, :unpacked, {:message, Electric.Satellite.SatPerms.Assign}}, + grants: {1, :unpacked, {:message, Electric.Satellite.SatPerms.Grant}}, + revokes: {2, :unpacked, {:message, Electric.Satellite.SatPerms.Revoke}}, + sqlite: {5, :unpacked, {:message, Electric.Satellite.SatPerms.Sqlite}}, + unassigns: {4, :unpacked, {:message, Electric.Satellite.SatPerms.Unassign}} } end ) @@ -2923,30 +3001,48 @@ [ %{ __struct__: Protox.Field, - json_name: "code", - kind: {:scalar, :CODE_UNSPECIFIED}, - label: :optional, - name: :code, + json_name: "grants", + kind: :unpacked, + label: :repeated, + name: :grants, tag: 1, - type: {:enum, Electric.Satellite.SatSubsDataError.ShapeReqError.Code} + type: {:message, Electric.Satellite.SatPerms.Grant} }, %{ __struct__: Protox.Field, - json_name: "message", - kind: {:scalar, ""}, - label: :optional, - name: :message, + json_name: "revokes", + kind: :unpacked, + label: :repeated, + name: :revokes, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatPerms.Revoke} }, %{ __struct__: Protox.Field, - json_name: "requestId", - kind: {:scalar, ""}, - label: :optional, - name: :request_id, + json_name: "assigns", + kind: :unpacked, + label: :repeated, + name: :assigns, tag: 3, - type: :string + type: {:message, Electric.Satellite.SatPerms.Assign} + }, + %{ + __struct__: Protox.Field, + json_name: "unassigns", + kind: :unpacked, + label: :repeated, + name: :unassigns, + tag: 4, + type: {:message, Electric.Satellite.SatPerms.Unassign} + }, + %{ + __struct__: Protox.Field, + json_name: "sqlite", + kind: :unpacked, + label: :repeated, + name: :sqlite, + tag: 5, + type: {:message, Electric.Satellite.SatPerms.Sqlite} } ] end @@ -2954,102 +3050,149 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:code) do + def field_def(:grants) do {:ok, %{ __struct__: Protox.Field, - json_name: "code", - kind: {:scalar, :CODE_UNSPECIFIED}, - label: :optional, - name: :code, + json_name: "grants", + kind: :unpacked, + label: :repeated, + name: :grants, tag: 1, - type: {:enum, Electric.Satellite.SatSubsDataError.ShapeReqError.Code} + type: {:message, Electric.Satellite.SatPerms.Grant} }} end - def field_def("code") do + def field_def("grants") do {:ok, %{ __struct__: Protox.Field, - json_name: "code", - kind: {:scalar, :CODE_UNSPECIFIED}, - label: :optional, - name: :code, + json_name: "grants", + kind: :unpacked, + label: :repeated, + name: :grants, tag: 1, - type: {:enum, Electric.Satellite.SatSubsDataError.ShapeReqError.Code} + type: {:message, Electric.Satellite.SatPerms.Grant} }} end [] ), ( - def field_def(:message) do + def field_def(:revokes) do {:ok, %{ __struct__: Protox.Field, - json_name: "message", - kind: {:scalar, ""}, - label: :optional, - name: :message, + json_name: "revokes", + kind: :unpacked, + label: :repeated, + name: :revokes, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatPerms.Revoke} }} end - def field_def("message") do + def field_def("revokes") do {:ok, %{ __struct__: Protox.Field, - json_name: "message", - kind: {:scalar, ""}, - label: :optional, - name: :message, + json_name: "revokes", + kind: :unpacked, + label: :repeated, + name: :revokes, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatPerms.Revoke} }} end [] ), ( - def field_def(:request_id) do + def field_def(:assigns) do {:ok, %{ __struct__: Protox.Field, - json_name: "requestId", - kind: {:scalar, ""}, - label: :optional, - name: :request_id, + json_name: "assigns", + kind: :unpacked, + label: :repeated, + name: :assigns, tag: 3, - type: :string + type: {:message, Electric.Satellite.SatPerms.Assign} }} end - def field_def("requestId") do + def field_def("assigns") do {:ok, %{ __struct__: Protox.Field, - json_name: "requestId", - kind: {:scalar, ""}, - label: :optional, - name: :request_id, + json_name: "assigns", + kind: :unpacked, + label: :repeated, + name: :assigns, tag: 3, - type: :string + type: {:message, Electric.Satellite.SatPerms.Assign} }} end - def field_def("request_id") do + [] + ), + ( + def field_def(:unassigns) do {:ok, %{ __struct__: Protox.Field, - json_name: "requestId", - kind: {:scalar, ""}, - label: :optional, - name: :request_id, - tag: 3, - type: :string - }} - end + json_name: "unassigns", + kind: :unpacked, + label: :repeated, + name: :unassigns, + tag: 4, + type: {:message, Electric.Satellite.SatPerms.Unassign} + }} + end + + def field_def("unassigns") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "unassigns", + kind: :unpacked, + label: :repeated, + name: :unassigns, + tag: 4, + type: {:message, Electric.Satellite.SatPerms.Unassign} + }} + end + + [] + ), + ( + def field_def(:sqlite) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "sqlite", + kind: :unpacked, + label: :repeated, + name: :sqlite, + tag: 5, + type: {:message, Electric.Satellite.SatPerms.Sqlite} + }} + end + + def field_def("sqlite") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "sqlite", + kind: :unpacked, + label: :repeated, + name: :sqlite, + tag: 5, + type: {:message, Electric.Satellite.SatPerms.Sqlite} + }} + end + + [] ), def field_def(_) do {:error, :no_such_field} @@ -3075,14 +3218,20 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:code) do - {:ok, :CODE_UNSPECIFIED} + def default(:grants) do + {:error, :no_default_value} end, - def default(:message) do - {:ok, ""} + def default(:revokes) do + {:error, :no_default_value} end, - def default(:request_id) do - {:ok, ""} + def default(:assigns) do + {:error, :no_default_value} + end, + def default(:unassigns) do + {:error, :no_default_value} + end, + def default(:sqlite) do + {:error, :no_default_value} end, def default(_) do {:error, :no_such_field} @@ -3096,7 +3245,7 @@ end ) end, - defmodule Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError do + defmodule Electric.Satellite.SatSubsDataError.ShapeReqError do @moduledoc false defstruct code: :CODE_UNSPECIFIED, message: "", request_id: "" @@ -3129,7 +3278,7 @@ acc, "\b", msg.code - |> Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code.encode() + |> Electric.Satellite.SatSubsDataError.ShapeReqError.Code.encode() |> Protox.Encode.encode_enum() ] end @@ -3182,10 +3331,7 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value( - bytes, - struct(Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError) - ) + parse_key_value(bytes, struct(Electric.Satellite.SatSubsDataError.ShapeReqError)) end ) ) @@ -3206,7 +3352,7 @@ {value, rest} = Protox.Decode.parse_enum( bytes, - Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code + Electric.Satellite.SatSubsDataError.ShapeReqError.Code ) {[code: value], rest} @@ -3250,7 +3396,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError, + Electric.Satellite.SatSubsDataError.ShapeReqError, &json_library_wrapper.decode!(json_library, &1) ) end @@ -3280,7 +3426,7 @@ %{ 1 => {:code, {:scalar, :CODE_UNSPECIFIED}, - {:enum, Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code}}, + {:enum, Electric.Satellite.SatSubsDataError.ShapeReqError.Code}}, 2 => {:message, {:scalar, ""}, :string}, 3 => {:request_id, {:scalar, ""}, :string} } @@ -3294,7 +3440,7 @@ %{ code: {1, {:scalar, :CODE_UNSPECIFIED}, - {:enum, Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code}}, + {:enum, Electric.Satellite.SatSubsDataError.ShapeReqError.Code}}, message: {2, {:scalar, ""}, :string}, request_id: {3, {:scalar, ""}, :string} } @@ -3312,7 +3458,7 @@ label: :optional, name: :code, tag: 1, - type: {:enum, Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code} + type: {:enum, Electric.Satellite.SatSubsDataError.ShapeReqError.Code} }, %{ __struct__: Protox.Field, @@ -3347,7 +3493,7 @@ label: :optional, name: :code, tag: 1, - type: {:enum, Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code} + type: {:enum, Electric.Satellite.SatSubsDataError.ShapeReqError.Code} }} end @@ -3360,7 +3506,7 @@ label: :optional, name: :code, tag: 1, - type: {:enum, Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code} + type: {:enum, Electric.Satellite.SatSubsDataError.ShapeReqError.Code} }} end @@ -3480,13 +3626,9 @@ end ) end, - defmodule Electric.Satellite.SatInStartReplicationReq do + defmodule Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError do @moduledoc false - defstruct lsn: "", - options: [], - subscription_ids: [], - schema_version: nil, - observed_transaction_data: [] + defstruct code: :CODE_UNSPECIFIED, message: "", request_id: "" ( ( @@ -3501,119 +3643,53 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] - |> encode_schema_version(msg) - |> encode_lsn(msg) - |> encode_options(msg) - |> encode_subscription_ids(msg) - |> encode_observed_transaction_data(msg) + [] |> encode_code(msg) |> encode_message(msg) |> encode_request_id(msg) end ) [] [ - defp encode_lsn(acc, msg) do + defp encode_code(acc, msg) do try do - if msg.lsn == "" do + if msg.code == :CODE_UNSPECIFIED do acc else - [acc, "\n", Protox.Encode.encode_bytes(msg.lsn)] - end - rescue - ArgumentError -> - reraise Protox.EncodingError.new(:lsn, "invalid field value"), __STACKTRACE__ - end - end, - defp encode_options(acc, msg) do - try do - case msg.options do - [] -> - acc - - values -> - [ - acc, - "\x12", - ( - {bytes, len} = - Enum.reduce(values, {[], 0}, fn value, {acc, len} -> - value_bytes = - :binary.list_to_bin([ - value - |> Electric.Satellite.SatInStartReplicationReq.Option.encode() - |> Protox.Encode.encode_enum() - ]) - - {[acc, value_bytes], len + byte_size(value_bytes)} - end) - - [Protox.Varint.encode(len), bytes] - ) - ] - end - rescue - ArgumentError -> - reraise Protox.EncodingError.new(:options, "invalid field value"), __STACKTRACE__ - end - end, - defp encode_subscription_ids(acc, msg) do - try do - case msg.subscription_ids do - [] -> - acc - - values -> - [ - acc, - Enum.reduce(values, [], fn value, acc -> - [acc, "\"", Protox.Encode.encode_string(value)] - end) - ] + [ + acc, + "\b", + msg.code + |> Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code.encode() + |> Protox.Encode.encode_enum() + ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:subscription_ids, "invalid field value"), - __STACKTRACE__ + reraise Protox.EncodingError.new(:code, "invalid field value"), __STACKTRACE__ end end, - defp encode_schema_version(acc, msg) do + defp encode_message(acc, msg) do try do - case msg.schema_version do - nil -> [acc] - child_field_value -> [acc, "*", Protox.Encode.encode_string(child_field_value)] + if msg.message == "" do + acc + else + [acc, "\x12", Protox.Encode.encode_string(msg.message)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:schema_version, "invalid field value"), - __STACKTRACE__ + reraise Protox.EncodingError.new(:message, "invalid field value"), __STACKTRACE__ end end, - defp encode_observed_transaction_data(acc, msg) do + defp encode_request_id(acc, msg) do try do - case msg.observed_transaction_data do - [] -> - acc - - values -> - [ - acc, - "2", - ( - {bytes, len} = - Enum.reduce(values, {[], 0}, fn value, {acc, len} -> - value_bytes = :binary.list_to_bin([Protox.Encode.encode_uint64(value)]) - {[acc, value_bytes], len + byte_size(value_bytes)} - end) - - [Protox.Varint.encode(len), bytes] - ) - ] + if msg.request_id == "" do + acc + else + [acc, "\x1A", Protox.Encode.encode_string(msg.request_id)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:observed_transaction_data, "invalid field value"), - __STACKTRACE__ + reraise Protox.EncodingError.new(:request_id, "invalid field value"), __STACKTRACE__ end end ] @@ -3636,7 +3712,10 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatInStartReplicationReq)) + parse_key_value( + bytes, + struct(Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError) + ) end ) ) @@ -3654,60 +3733,23 @@ raise %Protox.IllegalTagError{} {1, _, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[lsn: delimited], rest} - - {2, 2, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - - {[ - options: - msg.options ++ - Protox.Decode.parse_repeated_enum( - [], - delimited, - Electric.Satellite.SatInStartReplicationReq.Option - ) - ], rest} - - {2, _, bytes} -> {value, rest} = Protox.Decode.parse_enum( bytes, - Electric.Satellite.SatInStartReplicationReq.Option + Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code ) - {[options: msg.options ++ [value]], rest} - - {4, _, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - - {[ - subscription_ids: - msg.subscription_ids ++ [Protox.Decode.validate_string(delimited)] - ], rest} + {[code: value], rest} - {5, _, bytes} -> + {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[schema_version: Protox.Decode.validate_string(delimited)], rest} + {[message: Protox.Decode.validate_string(delimited)], rest} - {6, 2, bytes} -> + {3, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - - {[ - observed_transaction_data: - msg.observed_transaction_data ++ - Protox.Decode.parse_repeated_uint64([], delimited) - ], rest} - - {6, _, bytes} -> - {value, rest} = Protox.Decode.parse_uint64(bytes) - {[observed_transaction_data: msg.observed_transaction_data ++ [value]], rest} + {[request_id: Protox.Decode.validate_string(delimited)], rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -3738,7 +3780,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatInStartReplicationReq, + Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError, &json_library_wrapper.decode!(json_library, &1) ) end @@ -3766,11 +3808,11 @@ } def defs() do %{ - 1 => {:lsn, {:scalar, ""}, :bytes}, - 2 => {:options, :packed, {:enum, Electric.Satellite.SatInStartReplicationReq.Option}}, - 4 => {:subscription_ids, :unpacked, :string}, - 5 => {:schema_version, {:oneof, :_schema_version}, :string}, - 6 => {:observed_transaction_data, :packed, :uint64} + 1 => + {:code, {:scalar, :CODE_UNSPECIFIED}, + {:enum, Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code}}, + 2 => {:message, {:scalar, ""}, :string}, + 3 => {:request_id, {:scalar, ""}, :string} } end @@ -3780,11 +3822,11 @@ } def defs_by_name() do %{ - lsn: {1, {:scalar, ""}, :bytes}, - observed_transaction_data: {6, :packed, :uint64}, - options: {2, :packed, {:enum, Electric.Satellite.SatInStartReplicationReq.Option}}, - schema_version: {5, {:oneof, :_schema_version}, :string}, - subscription_ids: {4, :unpacked, :string} + code: + {1, {:scalar, :CODE_UNSPECIFIED}, + {:enum, Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code}}, + message: {2, {:scalar, ""}, :string}, + request_id: {3, {:scalar, ""}, :string} } end ) @@ -3795,48 +3837,30 @@ [ %{ __struct__: Protox.Field, - json_name: "lsn", - kind: {:scalar, ""}, + json_name: "code", + kind: {:scalar, :CODE_UNSPECIFIED}, label: :optional, - name: :lsn, + name: :code, tag: 1, - type: :bytes + type: {:enum, Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code} }, %{ __struct__: Protox.Field, - json_name: "options", - kind: :packed, - label: :repeated, - name: :options, + json_name: "message", + kind: {:scalar, ""}, + label: :optional, + name: :message, tag: 2, - type: {:enum, Electric.Satellite.SatInStartReplicationReq.Option} - }, - %{ - __struct__: Protox.Field, - json_name: "subscriptionIds", - kind: :unpacked, - label: :repeated, - name: :subscription_ids, - tag: 4, type: :string }, %{ __struct__: Protox.Field, - json_name: "schemaVersion", - kind: {:oneof, :_schema_version}, - label: :proto3_optional, - name: :schema_version, - tag: 5, + json_name: "requestId", + kind: {:scalar, ""}, + label: :optional, + name: :request_id, + tag: 3, type: :string - }, - %{ - __struct__: Protox.Field, - json_name: "observedTransactionData", - kind: :packed, - label: :repeated, - name: :observed_transaction_data, - tag: 6, - type: :uint64 } ] end @@ -3844,183 +3868,103 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:lsn) do + def field_def(:code) do {:ok, %{ __struct__: Protox.Field, - json_name: "lsn", - kind: {:scalar, ""}, + json_name: "code", + kind: {:scalar, :CODE_UNSPECIFIED}, label: :optional, - name: :lsn, + name: :code, tag: 1, - type: :bytes + type: {:enum, Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code} }} end - def field_def("lsn") do + def field_def("code") do {:ok, %{ __struct__: Protox.Field, - json_name: "lsn", - kind: {:scalar, ""}, + json_name: "code", + kind: {:scalar, :CODE_UNSPECIFIED}, label: :optional, - name: :lsn, + name: :code, tag: 1, - type: :bytes + type: {:enum, Electric.Satellite.SatSubsResp.SatSubsError.ShapeReqError.Code} }} end [] ), ( - def field_def(:options) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "options", - kind: :packed, - label: :repeated, - name: :options, - tag: 2, - type: {:enum, Electric.Satellite.SatInStartReplicationReq.Option} - }} - end - - def field_def("options") do + def field_def(:message) do {:ok, %{ __struct__: Protox.Field, - json_name: "options", - kind: :packed, - label: :repeated, - name: :options, + json_name: "message", + kind: {:scalar, ""}, + label: :optional, + name: :message, tag: 2, - type: {:enum, Electric.Satellite.SatInStartReplicationReq.Option} - }} - end - - [] - ), - ( - def field_def(:subscription_ids) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "subscriptionIds", - kind: :unpacked, - label: :repeated, - name: :subscription_ids, - tag: 4, type: :string }} end - def field_def("subscriptionIds") do + def field_def("message") do {:ok, %{ __struct__: Protox.Field, - json_name: "subscriptionIds", - kind: :unpacked, - label: :repeated, - name: :subscription_ids, - tag: 4, + json_name: "message", + kind: {:scalar, ""}, + label: :optional, + name: :message, + tag: 2, type: :string }} end - def field_def("subscription_ids") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "subscriptionIds", - kind: :unpacked, - label: :repeated, - name: :subscription_ids, - tag: 4, - type: :string - }} - end + [] ), ( - def field_def(:schema_version) do + def field_def(:request_id) do {:ok, %{ __struct__: Protox.Field, - json_name: "schemaVersion", - kind: {:oneof, :_schema_version}, - label: :proto3_optional, - name: :schema_version, - tag: 5, + json_name: "requestId", + kind: {:scalar, ""}, + label: :optional, + name: :request_id, + tag: 3, type: :string }} end - def field_def("schemaVersion") do + def field_def("requestId") do {:ok, %{ __struct__: Protox.Field, - json_name: "schemaVersion", - kind: {:oneof, :_schema_version}, - label: :proto3_optional, - name: :schema_version, - tag: 5, + json_name: "requestId", + kind: {:scalar, ""}, + label: :optional, + name: :request_id, + tag: 3, type: :string }} end - def field_def("schema_version") do + def field_def("request_id") do {:ok, %{ __struct__: Protox.Field, - json_name: "schemaVersion", - kind: {:oneof, :_schema_version}, - label: :proto3_optional, - name: :schema_version, - tag: 5, + json_name: "requestId", + kind: {:scalar, ""}, + label: :optional, + name: :request_id, + tag: 3, type: :string }} end ), - ( - def field_def(:observed_transaction_data) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "observedTransactionData", - kind: :packed, - label: :repeated, - name: :observed_transaction_data, - tag: 6, - type: :uint64 - }} - end - - def field_def("observedTransactionData") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "observedTransactionData", - kind: :packed, - label: :repeated, - name: :observed_transaction_data, - tag: 6, - type: :uint64 - }} - end - - def field_def("observed_transaction_data") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "observedTransactionData", - kind: :packed, - label: :repeated, - name: :observed_transaction_data, - tag: 6, - type: :uint64 - }} - end - ), def field_def(_) do {:error, :no_such_field} end @@ -4045,20 +3989,14 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:lsn) do - {:ok, ""} - end, - def default(:options) do - {:error, :no_default_value} - end, - def default(:subscription_ids) do - {:error, :no_default_value} + def default(:code) do + {:ok, :CODE_UNSPECIFIED} end, - def default(:schema_version) do - {:error, :no_default_value} + def default(:message) do + {:ok, ""} end, - def default(:observed_transaction_data) do - {:error, :no_default_value} + def default(:request_id) do + {:ok, ""} end, def default(_) do {:error, :no_such_field} @@ -4072,9 +4010,9 @@ end ) end, - defmodule Electric.Satellite.SatAuthReq do + defmodule Electric.Satellite.SatPerms.Revoke do @moduledoc false - defstruct id: "", token: "", headers: [] + defstruct id: "", table: nil, role: nil, privilege: :DELETE, scope: nil, path: nil ( ( @@ -4089,7 +4027,13 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_id(msg) |> encode_token(msg) |> encode_headers(msg) + [] + |> encode_scope(msg) + |> encode_path(msg) + |> encode_id(msg) + |> encode_table(msg) + |> encode_role(msg) + |> encode_privilege(msg) end ) @@ -4108,35 +4052,68 @@ reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ end end, - defp encode_token(acc, msg) do + defp encode_table(acc, msg) do try do - if msg.token == "" do + if msg.table == nil do acc else - [acc, "\x12", Protox.Encode.encode_string(msg.token)] + [acc, "\x12", Protox.Encode.encode_message(msg.table)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:token, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:table, "invalid field value"), __STACKTRACE__ end end, - defp encode_headers(acc, msg) do + defp encode_role(acc, msg) do try do - case msg.headers do - [] -> - acc - - values -> - [ - acc, - Enum.reduce(values, [], fn value, acc -> - [acc, "\x1A", Protox.Encode.encode_message(value)] - end) - ] + if msg.role == nil do + acc + else + [acc, "\x1A", Protox.Encode.encode_message(msg.role)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:headers, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:role, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_privilege(acc, msg) do + try do + if msg.privilege == :DELETE do + acc + else + [ + acc, + " ", + msg.privilege + |> Electric.Satellite.SatPerms.Privilege.encode() + |> Protox.Encode.encode_enum() + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:privilege, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_scope(acc, msg) do + try do + case msg.scope do + nil -> [acc] + child_field_value -> [acc, "2", Protox.Encode.encode_message(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:scope, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_path(acc, msg) do + try do + case msg.path do + nil -> [acc] + child_field_value -> [acc, ":", Protox.Encode.encode_message(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:path, "invalid field value"), __STACKTRACE__ end end ] @@ -4159,7 +4136,7 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatAuthReq)) + parse_key_value(bytes, struct(Electric.Satellite.SatPerms.Revoke)) end ) ) @@ -4184,15 +4161,67 @@ {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[token: Protox.Decode.validate_string(delimited)], rest} + + {[ + table: + Protox.MergeMessage.merge( + msg.table, + Electric.Satellite.SatPerms.Table.decode!(delimited) + ) + ], rest} {3, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) {[ - headers: - msg.headers ++ [Electric.Satellite.SatAuthHeaderPair.decode!(delimited)] + role: + Protox.MergeMessage.merge( + msg.role, + Electric.Satellite.SatPerms.RoleName.decode!(delimited) + ) + ], rest} + + {4, _, bytes} -> + {value, rest} = + Protox.Decode.parse_enum(bytes, Electric.Satellite.SatPerms.Privilege) + + {[privilege: value], rest} + + {6, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + case msg.scope do + {:scope, previous_value} -> + {:scope, + Protox.MergeMessage.merge( + previous_value, + Electric.Satellite.SatPerms.Table.decode!(delimited) + )} + + _ -> + {:scope, Electric.Satellite.SatPerms.Table.decode!(delimited)} + end + ], rest} + + {7, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + case msg.path do + {:path, previous_value} -> + {:path, + Protox.MergeMessage.merge( + previous_value, + Electric.Satellite.SatPerms.Path.decode!(delimited) + )} + + _ -> + {:path, Electric.Satellite.SatPerms.Path.decode!(delimited)} + end ], rest} {tag, wire_type, rest} -> @@ -4224,7 +4253,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatAuthReq, + Electric.Satellite.SatPerms.Revoke, &json_library_wrapper.decode!(json_library, &1) ) end @@ -4253,8 +4282,11 @@ def defs() do %{ 1 => {:id, {:scalar, ""}, :string}, - 2 => {:token, {:scalar, ""}, :string}, - 3 => {:headers, :unpacked, {:message, Electric.Satellite.SatAuthHeaderPair}} + 2 => {:table, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}}, + 3 => {:role, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.RoleName}}, + 4 => {:privilege, {:scalar, :DELETE}, {:enum, Electric.Satellite.SatPerms.Privilege}}, + 6 => {:scope, {:oneof, :_scope}, {:message, Electric.Satellite.SatPerms.Table}}, + 7 => {:path, {:oneof, :_path}, {:message, Electric.Satellite.SatPerms.Path}} } end @@ -4264,9 +4296,12 @@ } def defs_by_name() do %{ - headers: {3, :unpacked, {:message, Electric.Satellite.SatAuthHeaderPair}}, id: {1, {:scalar, ""}, :string}, - token: {2, {:scalar, ""}, :string} + path: {7, {:oneof, :_path}, {:message, Electric.Satellite.SatPerms.Path}}, + privilege: {4, {:scalar, :DELETE}, {:enum, Electric.Satellite.SatPerms.Privilege}}, + role: {3, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.RoleName}}, + scope: {6, {:oneof, :_scope}, {:message, Electric.Satellite.SatPerms.Table}}, + table: {2, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}} } end ) @@ -4286,21 +4321,48 @@ }, %{ __struct__: Protox.Field, - json_name: "token", - kind: {:scalar, ""}, + json_name: "table", + kind: {:scalar, nil}, label: :optional, - name: :token, + name: :table, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatPerms.Table} }, %{ __struct__: Protox.Field, - json_name: "headers", - kind: :unpacked, - label: :repeated, - name: :headers, + json_name: "role", + kind: {:scalar, nil}, + label: :optional, + name: :role, tag: 3, - type: {:message, Electric.Satellite.SatAuthHeaderPair} + type: {:message, Electric.Satellite.SatPerms.RoleName} + }, + %{ + __struct__: Protox.Field, + json_name: "privilege", + kind: {:scalar, :DELETE}, + label: :optional, + name: :privilege, + tag: 4, + type: {:enum, Electric.Satellite.SatPerms.Privilege} + }, + %{ + __struct__: Protox.Field, + json_name: "scope", + kind: {:oneof, :_scope}, + label: :proto3_optional, + name: :scope, + tag: 6, + type: {:message, Electric.Satellite.SatPerms.Table} + }, + %{ + __struct__: Protox.Field, + json_name: "path", + kind: {:oneof, :_path}, + label: :proto3_optional, + name: :path, + tag: 7, + type: {:message, Electric.Satellite.SatPerms.Path} } ] end @@ -4337,83 +4399,170 @@ [] ), ( - def field_def(:token) do + def field_def(:table) do {:ok, %{ __struct__: Protox.Field, - json_name: "token", - kind: {:scalar, ""}, + json_name: "table", + kind: {:scalar, nil}, label: :optional, - name: :token, + name: :table, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatPerms.Table} }} end - def field_def("token") do + def field_def("table") do {:ok, %{ __struct__: Protox.Field, - json_name: "token", - kind: {:scalar, ""}, + json_name: "table", + kind: {:scalar, nil}, label: :optional, - name: :token, + name: :table, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatPerms.Table} }} end [] ), ( - def field_def(:headers) do + def field_def(:role) do {:ok, %{ __struct__: Protox.Field, - json_name: "headers", - kind: :unpacked, - label: :repeated, - name: :headers, + json_name: "role", + kind: {:scalar, nil}, + label: :optional, + name: :role, tag: 3, - type: {:message, Electric.Satellite.SatAuthHeaderPair} + type: {:message, Electric.Satellite.SatPerms.RoleName} }} end - def field_def("headers") do + def field_def("role") do {:ok, %{ __struct__: Protox.Field, - json_name: "headers", - kind: :unpacked, - label: :repeated, - name: :headers, + json_name: "role", + kind: {:scalar, nil}, + label: :optional, + name: :role, tag: 3, - type: {:message, Electric.Satellite.SatAuthHeaderPair} + type: {:message, Electric.Satellite.SatPerms.RoleName} }} end [] ), - def field_def(_) do - {:error, :no_such_field} - end - ] - ) + ( + def field_def(:privilege) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "privilege", + kind: {:scalar, :DELETE}, + label: :optional, + name: :privilege, + tag: 4, + type: {:enum, Electric.Satellite.SatPerms.Privilege} + }} + end - [] + def field_def("privilege") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "privilege", + kind: {:scalar, :DELETE}, + label: :optional, + name: :privilege, + tag: 4, + type: {:enum, Electric.Satellite.SatPerms.Privilege} + }} + end - ( - @spec required_fields() :: [] - def required_fields() do - [] - end - ) + [] + ), + ( + def field_def(:scope) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "scope", + kind: {:oneof, :_scope}, + label: :proto3_optional, + name: :scope, + tag: 6, + type: {:message, Electric.Satellite.SatPerms.Table} + }} + end - ( - @spec syntax() :: atom() - def syntax() do - :proto3 - end + def field_def("scope") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "scope", + kind: {:oneof, :_scope}, + label: :proto3_optional, + name: :scope, + tag: 6, + type: {:message, Electric.Satellite.SatPerms.Table} + }} + end + + [] + ), + ( + def field_def(:path) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "path", + kind: {:oneof, :_path}, + label: :proto3_optional, + name: :path, + tag: 7, + type: {:message, Electric.Satellite.SatPerms.Path} + }} + end + + def field_def("path") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "path", + kind: {:oneof, :_path}, + label: :proto3_optional, + name: :path, + tag: 7, + type: {:message, Electric.Satellite.SatPerms.Path} + }} + end + + [] + ), + def field_def(_) do + {:error, :no_such_field} + end + ] + ) + + [] + + ( + @spec required_fields() :: [] + def required_fields() do + [] + end + ) + + ( + @spec syntax() :: atom() + def syntax() do + :proto3 + end ) [ @@ -4421,10 +4570,19 @@ def default(:id) do {:ok, ""} end, - def default(:token) do - {:ok, ""} + def default(:table) do + {:ok, nil} end, - def default(:headers) do + def default(:role) do + {:ok, nil} + end, + def default(:privilege) do + {:ok, :DELETE} + end, + def default(:scope) do + {:error, :no_default_value} + end, + def default(:path) do {:error, :no_default_value} end, def default(_) do @@ -4439,9 +4597,13 @@ end ) end, - defmodule Electric.Satellite.SatShapeDef.Relation do + defmodule Electric.Satellite.SatInStartReplicationReq do @moduledoc false - defstruct foreign_key: [], select: nil + defstruct lsn: "", + options: [], + subscription_ids: [], + schema_version: nil, + observed_transaction_data: [] ( ( @@ -4456,16 +4618,65 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_foreign_key(msg) |> encode_select(msg) + [] + |> encode_schema_version(msg) + |> encode_lsn(msg) + |> encode_options(msg) + |> encode_subscription_ids(msg) + |> encode_observed_transaction_data(msg) end ) [] [ - defp encode_foreign_key(acc, msg) do + defp encode_lsn(acc, msg) do try do - case msg.foreign_key do + if msg.lsn == "" do + acc + else + [acc, "\n", Protox.Encode.encode_bytes(msg.lsn)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:lsn, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_options(acc, msg) do + try do + case msg.options do + [] -> + acc + + values -> + [ + acc, + "\x12", + ( + {bytes, len} = + Enum.reduce(values, {[], 0}, fn value, {acc, len} -> + value_bytes = + :binary.list_to_bin([ + value + |> Electric.Satellite.SatInStartReplicationReq.Option.encode() + |> Protox.Encode.encode_enum() + ]) + + {[acc, value_bytes], len + byte_size(value_bytes)} + end) + + [Protox.Varint.encode(len), bytes] + ) + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:options, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_subscription_ids(acc, msg) do + try do + case msg.subscription_ids do [] -> acc @@ -4473,26 +4684,53 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\n", Protox.Encode.encode_string(value)] + [acc, "\"", Protox.Encode.encode_string(value)] end) ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:foreign_key, "invalid field value"), + reraise Protox.EncodingError.new(:subscription_ids, "invalid field value"), __STACKTRACE__ end end, - defp encode_select(acc, msg) do + defp encode_schema_version(acc, msg) do try do - if msg.select == nil do - acc - else - [acc, "\x12", Protox.Encode.encode_message(msg.select)] + case msg.schema_version do + nil -> [acc] + child_field_value -> [acc, "*", Protox.Encode.encode_string(child_field_value)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:select, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:schema_version, "invalid field value"), + __STACKTRACE__ + end + end, + defp encode_observed_transaction_data(acc, msg) do + try do + case msg.observed_transaction_data do + [] -> + acc + + values -> + [ + acc, + "2", + ( + {bytes, len} = + Enum.reduce(values, {[], 0}, fn value, {acc, len} -> + value_bytes = :binary.list_to_bin([Protox.Encode.encode_uint64(value)]) + {[acc, value_bytes], len + byte_size(value_bytes)} + end) + + [Protox.Varint.encode(len), bytes] + ) + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:observed_transaction_data, "invalid field value"), + __STACKTRACE__ end end ] @@ -4515,7 +4753,7 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatShapeDef.Relation)) + parse_key_value(bytes, struct(Electric.Satellite.SatInStartReplicationReq)) end ) ) @@ -4535,22 +4773,59 @@ {1, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[lsn: delimited], rest} - {[foreign_key: msg.foreign_key ++ [Protox.Decode.validate_string(delimited)]], - rest} + {2, 2, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + options: + msg.options ++ + Protox.Decode.parse_repeated_enum( + [], + delimited, + Electric.Satellite.SatInStartReplicationReq.Option + ) + ], rest} {2, _, bytes} -> + {value, rest} = + Protox.Decode.parse_enum( + bytes, + Electric.Satellite.SatInStartReplicationReq.Option + ) + + {[options: msg.options ++ [value]], rest} + + {4, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) {[ - select: - Protox.MergeMessage.merge( - msg.select, - Electric.Satellite.SatShapeDef.Select.decode!(delimited) - ) + subscription_ids: + msg.subscription_ids ++ [Protox.Decode.validate_string(delimited)] + ], rest} + + {5, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[schema_version: Protox.Decode.validate_string(delimited)], rest} + + {6, 2, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + observed_transaction_data: + msg.observed_transaction_data ++ + Protox.Decode.parse_repeated_uint64([], delimited) ], rest} + {6, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[observed_transaction_data: msg.observed_transaction_data ++ [value]], rest} + {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) {[], rest} @@ -4580,7 +4855,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatShapeDef.Relation, + Electric.Satellite.SatInStartReplicationReq, &json_library_wrapper.decode!(json_library, &1) ) end @@ -4608,8 +4883,11 @@ } def defs() do %{ - 1 => {:foreign_key, :unpacked, :string}, - 2 => {:select, {:scalar, nil}, {:message, Electric.Satellite.SatShapeDef.Select}} + 1 => {:lsn, {:scalar, ""}, :bytes}, + 2 => {:options, :packed, {:enum, Electric.Satellite.SatInStartReplicationReq.Option}}, + 4 => {:subscription_ids, :unpacked, :string}, + 5 => {:schema_version, {:oneof, :_schema_version}, :string}, + 6 => {:observed_transaction_data, :packed, :uint64} } end @@ -4619,8 +4897,11 @@ } def defs_by_name() do %{ - foreign_key: {1, :unpacked, :string}, - select: {2, {:scalar, nil}, {:message, Electric.Satellite.SatShapeDef.Select}} + lsn: {1, {:scalar, ""}, :bytes}, + observed_transaction_data: {6, :packed, :uint64}, + options: {2, :packed, {:enum, Electric.Satellite.SatInStartReplicationReq.Option}}, + schema_version: {5, {:oneof, :_schema_version}, :string}, + subscription_ids: {4, :unpacked, :string} } end ) @@ -4631,21 +4912,48 @@ [ %{ __struct__: Protox.Field, - json_name: "foreignKey", - kind: :unpacked, - label: :repeated, - name: :foreign_key, + json_name: "lsn", + kind: {:scalar, ""}, + label: :optional, + name: :lsn, tag: 1, - type: :string + type: :bytes }, %{ __struct__: Protox.Field, - json_name: "select", - kind: {:scalar, nil}, - label: :optional, - name: :select, + json_name: "options", + kind: :packed, + label: :repeated, + name: :options, tag: 2, - type: {:message, Electric.Satellite.SatShapeDef.Select} + type: {:enum, Electric.Satellite.SatInStartReplicationReq.Option} + }, + %{ + __struct__: Protox.Field, + json_name: "subscriptionIds", + kind: :unpacked, + label: :repeated, + name: :subscription_ids, + tag: 4, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "schemaVersion", + kind: {:oneof, :_schema_version}, + label: :proto3_optional, + name: :schema_version, + tag: 5, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "observedTransactionData", + kind: :packed, + label: :repeated, + name: :observed_transaction_data, + tag: 6, + type: :uint64 } ] end @@ -4653,73 +4961,182 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:foreign_key) do + def field_def(:lsn) do {:ok, %{ __struct__: Protox.Field, - json_name: "foreignKey", + json_name: "lsn", + kind: {:scalar, ""}, + label: :optional, + name: :lsn, + tag: 1, + type: :bytes + }} + end + + def field_def("lsn") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "lsn", + kind: {:scalar, ""}, + label: :optional, + name: :lsn, + tag: 1, + type: :bytes + }} + end + + [] + ), + ( + def field_def(:options) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "options", + kind: :packed, + label: :repeated, + name: :options, + tag: 2, + type: {:enum, Electric.Satellite.SatInStartReplicationReq.Option} + }} + end + + def field_def("options") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "options", + kind: :packed, + label: :repeated, + name: :options, + tag: 2, + type: {:enum, Electric.Satellite.SatInStartReplicationReq.Option} + }} + end + + [] + ), + ( + def field_def(:subscription_ids) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "subscriptionIds", kind: :unpacked, label: :repeated, - name: :foreign_key, - tag: 1, + name: :subscription_ids, + tag: 4, type: :string }} end - def field_def("foreignKey") do + def field_def("subscriptionIds") do {:ok, %{ __struct__: Protox.Field, - json_name: "foreignKey", + json_name: "subscriptionIds", kind: :unpacked, label: :repeated, - name: :foreign_key, - tag: 1, + name: :subscription_ids, + tag: 4, type: :string }} end - def field_def("foreign_key") do + def field_def("subscription_ids") do {:ok, %{ __struct__: Protox.Field, - json_name: "foreignKey", + json_name: "subscriptionIds", kind: :unpacked, label: :repeated, - name: :foreign_key, - tag: 1, + name: :subscription_ids, + tag: 4, type: :string }} end ), ( - def field_def(:select) do + def field_def(:schema_version) do {:ok, %{ __struct__: Protox.Field, - json_name: "select", - kind: {:scalar, nil}, - label: :optional, - name: :select, - tag: 2, - type: {:message, Electric.Satellite.SatShapeDef.Select} + json_name: "schemaVersion", + kind: {:oneof, :_schema_version}, + label: :proto3_optional, + name: :schema_version, + tag: 5, + type: :string }} end - def field_def("select") do + def field_def("schemaVersion") do {:ok, %{ __struct__: Protox.Field, - json_name: "select", - kind: {:scalar, nil}, - label: :optional, - name: :select, - tag: 2, - type: {:message, Electric.Satellite.SatShapeDef.Select} + json_name: "schemaVersion", + kind: {:oneof, :_schema_version}, + label: :proto3_optional, + name: :schema_version, + tag: 5, + type: :string }} end - [] + def field_def("schema_version") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "schemaVersion", + kind: {:oneof, :_schema_version}, + label: :proto3_optional, + name: :schema_version, + tag: 5, + type: :string + }} + end + ), + ( + def field_def(:observed_transaction_data) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "observedTransactionData", + kind: :packed, + label: :repeated, + name: :observed_transaction_data, + tag: 6, + type: :uint64 + }} + end + + def field_def("observedTransactionData") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "observedTransactionData", + kind: :packed, + label: :repeated, + name: :observed_transaction_data, + tag: 6, + type: :uint64 + }} + end + + def field_def("observed_transaction_data") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "observedTransactionData", + kind: :packed, + label: :repeated, + name: :observed_transaction_data, + tag: 6, + type: :uint64 + }} + end ), def field_def(_) do {:error, :no_such_field} @@ -4745,11 +5162,20 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:foreign_key) do + def default(:lsn) do + {:ok, ""} + end, + def default(:options) do {:error, :no_default_value} end, - def default(:select) do - {:ok, nil} + def default(:subscription_ids) do + {:error, :no_default_value} + end, + def default(:schema_version) do + {:error, :no_default_value} + end, + def default(:observed_transaction_data) do + {:error, :no_default_value} end, def default(_) do {:error, :no_such_field} @@ -4763,9 +5189,9 @@ end ) end, - defmodule Electric.Satellite.SatSubsReq do + defmodule Electric.Satellite.SatAuthReq do @moduledoc false - defstruct subscription_id: "", shape_requests: [] + defstruct id: "", token: "", headers: [] ( ( @@ -4780,29 +5206,2150 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_subscription_id(msg) |> encode_shape_requests(msg) + [] |> encode_id(msg) |> encode_token(msg) |> encode_headers(msg) end ) [] [ - defp encode_subscription_id(acc, msg) do + defp encode_id(acc, msg) do + try do + if msg.id == "" do + acc + else + [acc, "\n", Protox.Encode.encode_string(msg.id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_token(acc, msg) do + try do + if msg.token == "" do + acc + else + [acc, "\x12", Protox.Encode.encode_string(msg.token)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:token, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_headers(acc, msg) do + try do + case msg.headers do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\x1A", Protox.Encode.encode_message(value)] + end) + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:headers, "invalid field value"), __STACKTRACE__ + end + end + ] + + [] + ) + + ( + ( + @spec decode(binary) :: {:ok, struct} | {:error, any} + def decode(bytes) do + try do + {:ok, decode!(bytes)} + rescue + e in [Protox.DecodingError, Protox.IllegalTagError, Protox.RequiredFieldsError] -> + {:error, e} + end + end + + ( + @spec decode!(binary) :: struct | no_return + def decode!(bytes) do + parse_key_value(bytes, struct(Electric.Satellite.SatAuthReq)) + end + ) + ) + + ( + @spec parse_key_value(binary, struct) :: struct + defp parse_key_value(<<>>, msg) do + msg + end + + defp parse_key_value(bytes, msg) do + {field, rest} = + case Protox.Decode.parse_key(bytes) do + {0, _, _} -> + raise %Protox.IllegalTagError{} + + {1, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[id: Protox.Decode.validate_string(delimited)], rest} + + {2, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[token: Protox.Decode.validate_string(delimited)], rest} + + {3, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + headers: + msg.headers ++ [Electric.Satellite.SatAuthHeaderPair.decode!(delimited)] + ], rest} + + {tag, wire_type, rest} -> + {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) + {[], rest} + end + + msg_updated = struct(msg, field) + parse_key_value(rest, msg_updated) + end + ) + + [] + ) + + ( + @spec json_decode(iodata(), keyword()) :: {:ok, struct()} | {:error, any()} + def json_decode(input, opts \\ []) do + try do + {:ok, json_decode!(input, opts)} + rescue + e in Protox.JsonDecodingError -> {:error, e} + end + end + + @spec json_decode!(iodata(), keyword()) :: struct() | no_return() + def json_decode!(input, opts \\ []) do + {json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :decode) + + Protox.JsonDecode.decode!( + input, + Electric.Satellite.SatAuthReq, + &json_library_wrapper.decode!(json_library, &1) + ) + end + + @spec json_encode(struct(), keyword()) :: {:ok, iodata()} | {:error, any()} + def json_encode(msg, opts \\ []) do + try do + {:ok, json_encode!(msg, opts)} + rescue + e in Protox.JsonEncodingError -> {:error, e} + end + end + + @spec json_encode!(struct(), keyword()) :: iodata() | no_return() + def json_encode!(msg, opts \\ []) do + {json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :encode) + Protox.JsonEncode.encode!(msg, &json_library_wrapper.encode!(json_library, &1)) + end + ) + + ( + @deprecated "Use fields_defs()/0 instead" + @spec defs() :: %{ + required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} + } + def defs() do + %{ + 1 => {:id, {:scalar, ""}, :string}, + 2 => {:token, {:scalar, ""}, :string}, + 3 => {:headers, :unpacked, {:message, Electric.Satellite.SatAuthHeaderPair}} + } + end + + @deprecated "Use fields_defs()/0 instead" + @spec defs_by_name() :: %{ + required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} + } + def defs_by_name() do + %{ + headers: {3, :unpacked, {:message, Electric.Satellite.SatAuthHeaderPair}}, + id: {1, {:scalar, ""}, :string}, + token: {2, {:scalar, ""}, :string} + } + end + ) + + ( + @spec fields_defs() :: list(Protox.Field.t()) + def fields_defs() do + [ + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "token", + kind: {:scalar, ""}, + label: :optional, + name: :token, + tag: 2, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "headers", + kind: :unpacked, + label: :repeated, + name: :headers, + tag: 3, + type: {:message, Electric.Satellite.SatAuthHeaderPair} + } + ] + end + + [ + @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + ( + def field_def(:id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }} + end + + def field_def("id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }} + end + + [] + ), + ( + def field_def(:token) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "token", + kind: {:scalar, ""}, + label: :optional, + name: :token, + tag: 2, + type: :string + }} + end + + def field_def("token") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "token", + kind: {:scalar, ""}, + label: :optional, + name: :token, + tag: 2, + type: :string + }} + end + + [] + ), + ( + def field_def(:headers) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "headers", + kind: :unpacked, + label: :repeated, + name: :headers, + tag: 3, + type: {:message, Electric.Satellite.SatAuthHeaderPair} + }} + end + + def field_def("headers") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "headers", + kind: :unpacked, + label: :repeated, + name: :headers, + tag: 3, + type: {:message, Electric.Satellite.SatAuthHeaderPair} + }} + end + + [] + ), + def field_def(_) do + {:error, :no_such_field} + end + ] + ) + + [] + + ( + @spec required_fields() :: [] + def required_fields() do + [] + end + ) + + ( + @spec syntax() :: atom() + def syntax() do + :proto3 + end + ) + + [ + @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:id) do + {:ok, ""} + end, + def default(:token) do + {:ok, ""} + end, + def default(:headers) do + {:error, :no_default_value} + end, + def default(_) do + {:error, :no_such_field} + end + ] + + ( + @spec file_options() :: nil + def file_options() do + nil + end + ) + end, + defmodule Electric.Satellite.SatShapeDef.Relation do + @moduledoc false + defstruct foreign_key: [], select: nil + + ( + ( + @spec encode(struct) :: {:ok, iodata} | {:error, any} + def encode(msg) do + try do + {:ok, encode!(msg)} + rescue + e in [Protox.EncodingError, Protox.RequiredFieldsError] -> {:error, e} + end + end + + @spec encode!(struct) :: iodata | no_return + def encode!(msg) do + [] |> encode_foreign_key(msg) |> encode_select(msg) + end + ) + + [] + + [ + defp encode_foreign_key(acc, msg) do + try do + case msg.foreign_key do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\n", Protox.Encode.encode_string(value)] + end) + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:foreign_key, "invalid field value"), + __STACKTRACE__ + end + end, + defp encode_select(acc, msg) do + try do + if msg.select == nil do + acc + else + [acc, "\x12", Protox.Encode.encode_message(msg.select)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:select, "invalid field value"), __STACKTRACE__ + end + end + ] + + [] + ) + + ( + ( + @spec decode(binary) :: {:ok, struct} | {:error, any} + def decode(bytes) do + try do + {:ok, decode!(bytes)} + rescue + e in [Protox.DecodingError, Protox.IllegalTagError, Protox.RequiredFieldsError] -> + {:error, e} + end + end + + ( + @spec decode!(binary) :: struct | no_return + def decode!(bytes) do + parse_key_value(bytes, struct(Electric.Satellite.SatShapeDef.Relation)) + end + ) + ) + + ( + @spec parse_key_value(binary, struct) :: struct + defp parse_key_value(<<>>, msg) do + msg + end + + defp parse_key_value(bytes, msg) do + {field, rest} = + case Protox.Decode.parse_key(bytes) do + {0, _, _} -> + raise %Protox.IllegalTagError{} + + {1, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[foreign_key: msg.foreign_key ++ [Protox.Decode.validate_string(delimited)]], + rest} + + {2, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + select: + Protox.MergeMessage.merge( + msg.select, + Electric.Satellite.SatShapeDef.Select.decode!(delimited) + ) + ], rest} + + {tag, wire_type, rest} -> + {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) + {[], rest} + end + + msg_updated = struct(msg, field) + parse_key_value(rest, msg_updated) + end + ) + + [] + ) + + ( + @spec json_decode(iodata(), keyword()) :: {:ok, struct()} | {:error, any()} + def json_decode(input, opts \\ []) do + try do + {:ok, json_decode!(input, opts)} + rescue + e in Protox.JsonDecodingError -> {:error, e} + end + end + + @spec json_decode!(iodata(), keyword()) :: struct() | no_return() + def json_decode!(input, opts \\ []) do + {json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :decode) + + Protox.JsonDecode.decode!( + input, + Electric.Satellite.SatShapeDef.Relation, + &json_library_wrapper.decode!(json_library, &1) + ) + end + + @spec json_encode(struct(), keyword()) :: {:ok, iodata()} | {:error, any()} + def json_encode(msg, opts \\ []) do + try do + {:ok, json_encode!(msg, opts)} + rescue + e in Protox.JsonEncodingError -> {:error, e} + end + end + + @spec json_encode!(struct(), keyword()) :: iodata() | no_return() + def json_encode!(msg, opts \\ []) do + {json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :encode) + Protox.JsonEncode.encode!(msg, &json_library_wrapper.encode!(json_library, &1)) + end + ) + + ( + @deprecated "Use fields_defs()/0 instead" + @spec defs() :: %{ + required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} + } + def defs() do + %{ + 1 => {:foreign_key, :unpacked, :string}, + 2 => {:select, {:scalar, nil}, {:message, Electric.Satellite.SatShapeDef.Select}} + } + end + + @deprecated "Use fields_defs()/0 instead" + @spec defs_by_name() :: %{ + required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} + } + def defs_by_name() do + %{ + foreign_key: {1, :unpacked, :string}, + select: {2, {:scalar, nil}, {:message, Electric.Satellite.SatShapeDef.Select}} + } + end + ) + + ( + @spec fields_defs() :: list(Protox.Field.t()) + def fields_defs() do + [ + %{ + __struct__: Protox.Field, + json_name: "foreignKey", + kind: :unpacked, + label: :repeated, + name: :foreign_key, + tag: 1, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "select", + kind: {:scalar, nil}, + label: :optional, + name: :select, + tag: 2, + type: {:message, Electric.Satellite.SatShapeDef.Select} + } + ] + end + + [ + @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + ( + def field_def(:foreign_key) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "foreignKey", + kind: :unpacked, + label: :repeated, + name: :foreign_key, + tag: 1, + type: :string + }} + end + + def field_def("foreignKey") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "foreignKey", + kind: :unpacked, + label: :repeated, + name: :foreign_key, + tag: 1, + type: :string + }} + end + + def field_def("foreign_key") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "foreignKey", + kind: :unpacked, + label: :repeated, + name: :foreign_key, + tag: 1, + type: :string + }} + end + ), + ( + def field_def(:select) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "select", + kind: {:scalar, nil}, + label: :optional, + name: :select, + tag: 2, + type: {:message, Electric.Satellite.SatShapeDef.Select} + }} + end + + def field_def("select") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "select", + kind: {:scalar, nil}, + label: :optional, + name: :select, + tag: 2, + type: {:message, Electric.Satellite.SatShapeDef.Select} + }} + end + + [] + ), + def field_def(_) do + {:error, :no_such_field} + end + ] + ) + + [] + + ( + @spec required_fields() :: [] + def required_fields() do + [] + end + ) + + ( + @spec syntax() :: atom() + def syntax() do + :proto3 + end + ) + + [ + @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:foreign_key) do + {:error, :no_default_value} + end, + def default(:select) do + {:ok, nil} + end, + def default(_) do + {:error, :no_such_field} + end + ] + + ( + @spec file_options() :: nil + def file_options() do + nil + end + ) + end, + defmodule Electric.Satellite.SatSubsReq do + @moduledoc false + defstruct subscription_id: "", shape_requests: [] + + ( + ( + @spec encode(struct) :: {:ok, iodata} | {:error, any} + def encode(msg) do + try do + {:ok, encode!(msg)} + rescue + e in [Protox.EncodingError, Protox.RequiredFieldsError] -> {:error, e} + end + end + + @spec encode!(struct) :: iodata | no_return + def encode!(msg) do + [] |> encode_subscription_id(msg) |> encode_shape_requests(msg) + end + ) + + [] + + [ + defp encode_subscription_id(acc, msg) do + try do + if msg.subscription_id == "" do + acc + else + [acc, "\n", Protox.Encode.encode_string(msg.subscription_id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:subscription_id, "invalid field value"), + __STACKTRACE__ + end + end, + defp encode_shape_requests(acc, msg) do + try do + case msg.shape_requests do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\x12", Protox.Encode.encode_message(value)] + end) + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:shape_requests, "invalid field value"), + __STACKTRACE__ + end + end + ] + + [] + ) + + ( + ( + @spec decode(binary) :: {:ok, struct} | {:error, any} + def decode(bytes) do + try do + {:ok, decode!(bytes)} + rescue + e in [Protox.DecodingError, Protox.IllegalTagError, Protox.RequiredFieldsError] -> + {:error, e} + end + end + + ( + @spec decode!(binary) :: struct | no_return + def decode!(bytes) do + parse_key_value(bytes, struct(Electric.Satellite.SatSubsReq)) + end + ) + ) + + ( + @spec parse_key_value(binary, struct) :: struct + defp parse_key_value(<<>>, msg) do + msg + end + + defp parse_key_value(bytes, msg) do + {field, rest} = + case Protox.Decode.parse_key(bytes) do + {0, _, _} -> + raise %Protox.IllegalTagError{} + + {1, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[subscription_id: Protox.Decode.validate_string(delimited)], rest} + + {2, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + shape_requests: + msg.shape_requests ++ [Electric.Satellite.SatShapeReq.decode!(delimited)] + ], rest} + + {tag, wire_type, rest} -> + {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) + {[], rest} + end + + msg_updated = struct(msg, field) + parse_key_value(rest, msg_updated) + end + ) + + [] + ) + + ( + @spec json_decode(iodata(), keyword()) :: {:ok, struct()} | {:error, any()} + def json_decode(input, opts \\ []) do + try do + {:ok, json_decode!(input, opts)} + rescue + e in Protox.JsonDecodingError -> {:error, e} + end + end + + @spec json_decode!(iodata(), keyword()) :: struct() | no_return() + def json_decode!(input, opts \\ []) do + {json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :decode) + + Protox.JsonDecode.decode!( + input, + Electric.Satellite.SatSubsReq, + &json_library_wrapper.decode!(json_library, &1) + ) + end + + @spec json_encode(struct(), keyword()) :: {:ok, iodata()} | {:error, any()} + def json_encode(msg, opts \\ []) do + try do + {:ok, json_encode!(msg, opts)} + rescue + e in Protox.JsonEncodingError -> {:error, e} + end + end + + @spec json_encode!(struct(), keyword()) :: iodata() | no_return() + def json_encode!(msg, opts \\ []) do + {json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :encode) + Protox.JsonEncode.encode!(msg, &json_library_wrapper.encode!(json_library, &1)) + end + ) + + ( + @deprecated "Use fields_defs()/0 instead" + @spec defs() :: %{ + required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} + } + def defs() do + %{ + 1 => {:subscription_id, {:scalar, ""}, :string}, + 2 => {:shape_requests, :unpacked, {:message, Electric.Satellite.SatShapeReq}} + } + end + + @deprecated "Use fields_defs()/0 instead" + @spec defs_by_name() :: %{ + required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} + } + def defs_by_name() do + %{ + shape_requests: {2, :unpacked, {:message, Electric.Satellite.SatShapeReq}}, + subscription_id: {1, {:scalar, ""}, :string} + } + end + ) + + ( + @spec fields_defs() :: list(Protox.Field.t()) + def fields_defs() do + [ + %{ + __struct__: Protox.Field, + json_name: "subscriptionId", + kind: {:scalar, ""}, + label: :optional, + name: :subscription_id, + tag: 1, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "shapeRequests", + kind: :unpacked, + label: :repeated, + name: :shape_requests, + tag: 2, + type: {:message, Electric.Satellite.SatShapeReq} + } + ] + end + + [ + @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + ( + def field_def(:subscription_id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "subscriptionId", + kind: {:scalar, ""}, + label: :optional, + name: :subscription_id, + tag: 1, + type: :string + }} + end + + def field_def("subscriptionId") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "subscriptionId", + kind: {:scalar, ""}, + label: :optional, + name: :subscription_id, + tag: 1, + type: :string + }} + end + + def field_def("subscription_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "subscriptionId", + kind: {:scalar, ""}, + label: :optional, + name: :subscription_id, + tag: 1, + type: :string + }} + end + ), + ( + def field_def(:shape_requests) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "shapeRequests", + kind: :unpacked, + label: :repeated, + name: :shape_requests, + tag: 2, + type: {:message, Electric.Satellite.SatShapeReq} + }} + end + + def field_def("shapeRequests") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "shapeRequests", + kind: :unpacked, + label: :repeated, + name: :shape_requests, + tag: 2, + type: {:message, Electric.Satellite.SatShapeReq} + }} + end + + def field_def("shape_requests") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "shapeRequests", + kind: :unpacked, + label: :repeated, + name: :shape_requests, + tag: 2, + type: {:message, Electric.Satellite.SatShapeReq} + }} + end + ), + def field_def(_) do + {:error, :no_such_field} + end + ] + ) + + [] + + ( + @spec required_fields() :: [] + def required_fields() do + [] + end + ) + + ( + @spec syntax() :: atom() + def syntax() do + :proto3 + end + ) + + [ + @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:subscription_id) do + {:ok, ""} + end, + def default(:shape_requests) do + {:error, :no_default_value} + end, + def default(_) do + {:error, :no_such_field} + end + ] + + ( + @spec file_options() :: nil + def file_options() do + nil + end + ) + end, + defmodule Electric.Satellite.SatSubsDataError do + @moduledoc false + defstruct code: :CODE_UNSPECIFIED, message: "", subscription_id: "", shape_request_error: [] + + ( + ( + @spec encode(struct) :: {:ok, iodata} | {:error, any} + def encode(msg) do + try do + {:ok, encode!(msg)} + rescue + e in [Protox.EncodingError, Protox.RequiredFieldsError] -> {:error, e} + end + end + + @spec encode!(struct) :: iodata | no_return + def encode!(msg) do + [] + |> encode_code(msg) + |> encode_message(msg) + |> encode_subscription_id(msg) + |> encode_shape_request_error(msg) + end + ) + + [] + + [ + defp encode_code(acc, msg) do + try do + if msg.code == :CODE_UNSPECIFIED do + acc + else + [ + acc, + "\b", + msg.code + |> Electric.Satellite.SatSubsDataError.Code.encode() + |> Protox.Encode.encode_enum() + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:code, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_message(acc, msg) do + try do + if msg.message == "" do + acc + else + [acc, "\x12", Protox.Encode.encode_string(msg.message)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:message, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_subscription_id(acc, msg) do + try do + if msg.subscription_id == "" do + acc + else + [acc, "\x1A", Protox.Encode.encode_string(msg.subscription_id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:subscription_id, "invalid field value"), + __STACKTRACE__ + end + end, + defp encode_shape_request_error(acc, msg) do + try do + case msg.shape_request_error do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\"", Protox.Encode.encode_message(value)] + end) + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:shape_request_error, "invalid field value"), + __STACKTRACE__ + end + end + ] + + [] + ) + + ( + ( + @spec decode(binary) :: {:ok, struct} | {:error, any} + def decode(bytes) do + try do + {:ok, decode!(bytes)} + rescue + e in [Protox.DecodingError, Protox.IllegalTagError, Protox.RequiredFieldsError] -> + {:error, e} + end + end + + ( + @spec decode!(binary) :: struct | no_return + def decode!(bytes) do + parse_key_value(bytes, struct(Electric.Satellite.SatSubsDataError)) + end + ) + ) + + ( + @spec parse_key_value(binary, struct) :: struct + defp parse_key_value(<<>>, msg) do + msg + end + + defp parse_key_value(bytes, msg) do + {field, rest} = + case Protox.Decode.parse_key(bytes) do + {0, _, _} -> + raise %Protox.IllegalTagError{} + + {1, _, bytes} -> + {value, rest} = + Protox.Decode.parse_enum(bytes, Electric.Satellite.SatSubsDataError.Code) + + {[code: value], rest} + + {2, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[message: Protox.Decode.validate_string(delimited)], rest} + + {3, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[subscription_id: Protox.Decode.validate_string(delimited)], rest} + + {4, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + shape_request_error: + msg.shape_request_error ++ + [Electric.Satellite.SatSubsDataError.ShapeReqError.decode!(delimited)] + ], rest} + + {tag, wire_type, rest} -> + {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) + {[], rest} + end + + msg_updated = struct(msg, field) + parse_key_value(rest, msg_updated) + end + ) + + [] + ) + + ( + @spec json_decode(iodata(), keyword()) :: {:ok, struct()} | {:error, any()} + def json_decode(input, opts \\ []) do + try do + {:ok, json_decode!(input, opts)} + rescue + e in Protox.JsonDecodingError -> {:error, e} + end + end + + @spec json_decode!(iodata(), keyword()) :: struct() | no_return() + def json_decode!(input, opts \\ []) do + {json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :decode) + + Protox.JsonDecode.decode!( + input, + Electric.Satellite.SatSubsDataError, + &json_library_wrapper.decode!(json_library, &1) + ) + end + + @spec json_encode(struct(), keyword()) :: {:ok, iodata()} | {:error, any()} + def json_encode(msg, opts \\ []) do + try do + {:ok, json_encode!(msg, opts)} + rescue + e in Protox.JsonEncodingError -> {:error, e} + end + end + + @spec json_encode!(struct(), keyword()) :: iodata() | no_return() + def json_encode!(msg, opts \\ []) do + {json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :encode) + Protox.JsonEncode.encode!(msg, &json_library_wrapper.encode!(json_library, &1)) + end + ) + + ( + @deprecated "Use fields_defs()/0 instead" + @spec defs() :: %{ + required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} + } + def defs() do + %{ + 1 => + {:code, {:scalar, :CODE_UNSPECIFIED}, + {:enum, Electric.Satellite.SatSubsDataError.Code}}, + 2 => {:message, {:scalar, ""}, :string}, + 3 => {:subscription_id, {:scalar, ""}, :string}, + 4 => + {:shape_request_error, :unpacked, + {:message, Electric.Satellite.SatSubsDataError.ShapeReqError}} + } + end + + @deprecated "Use fields_defs()/0 instead" + @spec defs_by_name() :: %{ + required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} + } + def defs_by_name() do + %{ + code: + {1, {:scalar, :CODE_UNSPECIFIED}, {:enum, Electric.Satellite.SatSubsDataError.Code}}, + message: {2, {:scalar, ""}, :string}, + shape_request_error: + {4, :unpacked, {:message, Electric.Satellite.SatSubsDataError.ShapeReqError}}, + subscription_id: {3, {:scalar, ""}, :string} + } + end + ) + + ( + @spec fields_defs() :: list(Protox.Field.t()) + def fields_defs() do + [ + %{ + __struct__: Protox.Field, + json_name: "code", + kind: {:scalar, :CODE_UNSPECIFIED}, + label: :optional, + name: :code, + tag: 1, + type: {:enum, Electric.Satellite.SatSubsDataError.Code} + }, + %{ + __struct__: Protox.Field, + json_name: "message", + kind: {:scalar, ""}, + label: :optional, + name: :message, + tag: 2, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "subscriptionId", + kind: {:scalar, ""}, + label: :optional, + name: :subscription_id, + tag: 3, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "shapeRequestError", + kind: :unpacked, + label: :repeated, + name: :shape_request_error, + tag: 4, + type: {:message, Electric.Satellite.SatSubsDataError.ShapeReqError} + } + ] + end + + [ + @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + ( + def field_def(:code) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "code", + kind: {:scalar, :CODE_UNSPECIFIED}, + label: :optional, + name: :code, + tag: 1, + type: {:enum, Electric.Satellite.SatSubsDataError.Code} + }} + end + + def field_def("code") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "code", + kind: {:scalar, :CODE_UNSPECIFIED}, + label: :optional, + name: :code, + tag: 1, + type: {:enum, Electric.Satellite.SatSubsDataError.Code} + }} + end + + [] + ), + ( + def field_def(:message) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "message", + kind: {:scalar, ""}, + label: :optional, + name: :message, + tag: 2, + type: :string + }} + end + + def field_def("message") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "message", + kind: {:scalar, ""}, + label: :optional, + name: :message, + tag: 2, + type: :string + }} + end + + [] + ), + ( + def field_def(:subscription_id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "subscriptionId", + kind: {:scalar, ""}, + label: :optional, + name: :subscription_id, + tag: 3, + type: :string + }} + end + + def field_def("subscriptionId") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "subscriptionId", + kind: {:scalar, ""}, + label: :optional, + name: :subscription_id, + tag: 3, + type: :string + }} + end + + def field_def("subscription_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "subscriptionId", + kind: {:scalar, ""}, + label: :optional, + name: :subscription_id, + tag: 3, + type: :string + }} + end + ), + ( + def field_def(:shape_request_error) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "shapeRequestError", + kind: :unpacked, + label: :repeated, + name: :shape_request_error, + tag: 4, + type: {:message, Electric.Satellite.SatSubsDataError.ShapeReqError} + }} + end + + def field_def("shapeRequestError") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "shapeRequestError", + kind: :unpacked, + label: :repeated, + name: :shape_request_error, + tag: 4, + type: {:message, Electric.Satellite.SatSubsDataError.ShapeReqError} + }} + end + + def field_def("shape_request_error") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "shapeRequestError", + kind: :unpacked, + label: :repeated, + name: :shape_request_error, + tag: 4, + type: {:message, Electric.Satellite.SatSubsDataError.ShapeReqError} + }} + end + ), + def field_def(_) do + {:error, :no_such_field} + end + ] + ) + + [] + + ( + @spec required_fields() :: [] + def required_fields() do + [] + end + ) + + ( + @spec syntax() :: atom() + def syntax() do + :proto3 + end + ) + + [ + @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:code) do + {:ok, :CODE_UNSPECIFIED} + end, + def default(:message) do + {:ok, ""} + end, + def default(:subscription_id) do + {:ok, ""} + end, + def default(:shape_request_error) do + {:error, :no_default_value} + end, + def default(_) do + {:error, :no_such_field} + end + ] + + ( + @spec file_options() :: nil + def file_options() do + nil + end + ) + end, + defmodule Electric.Satellite.SatPerms.Unassign do + @moduledoc false + defstruct id: "", table: nil, user_column: nil, role_column: nil, role_name: nil, scope: nil + + ( + ( + @spec encode(struct) :: {:ok, iodata} | {:error, any} + def encode(msg) do + try do + {:ok, encode!(msg)} + rescue + e in [Protox.EncodingError, Protox.RequiredFieldsError] -> {:error, e} + end + end + + @spec encode!(struct) :: iodata | no_return + def encode!(msg) do + [] + |> encode_user_column(msg) + |> encode_role_column(msg) + |> encode_role_name(msg) + |> encode_scope(msg) + |> encode_id(msg) + |> encode_table(msg) + end + ) + + [] + + [ + defp encode_id(acc, msg) do + try do + if msg.id == "" do + acc + else + [acc, "\n", Protox.Encode.encode_string(msg.id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_table(acc, msg) do + try do + if msg.table == nil do + acc + else + [acc, "\x12", Protox.Encode.encode_message(msg.table)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:table, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_user_column(acc, msg) do + try do + case msg.user_column do + nil -> [acc] + child_field_value -> [acc, "\x1A", Protox.Encode.encode_string(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:user_column, "invalid field value"), + __STACKTRACE__ + end + end, + defp encode_role_column(acc, msg) do + try do + case msg.role_column do + nil -> [acc] + child_field_value -> [acc, "\"", Protox.Encode.encode_string(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:role_column, "invalid field value"), + __STACKTRACE__ + end + end, + defp encode_role_name(acc, msg) do + try do + case msg.role_name do + nil -> [acc] + child_field_value -> [acc, "*", Protox.Encode.encode_string(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:role_name, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_scope(acc, msg) do + try do + case msg.scope do + nil -> [acc] + child_field_value -> [acc, "2", Protox.Encode.encode_message(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:scope, "invalid field value"), __STACKTRACE__ + end + end + ] + + [] + ) + + ( + ( + @spec decode(binary) :: {:ok, struct} | {:error, any} + def decode(bytes) do + try do + {:ok, decode!(bytes)} + rescue + e in [Protox.DecodingError, Protox.IllegalTagError, Protox.RequiredFieldsError] -> + {:error, e} + end + end + + ( + @spec decode!(binary) :: struct | no_return + def decode!(bytes) do + parse_key_value(bytes, struct(Electric.Satellite.SatPerms.Unassign)) + end + ) + ) + + ( + @spec parse_key_value(binary, struct) :: struct + defp parse_key_value(<<>>, msg) do + msg + end + + defp parse_key_value(bytes, msg) do + {field, rest} = + case Protox.Decode.parse_key(bytes) do + {0, _, _} -> + raise %Protox.IllegalTagError{} + + {1, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[id: Protox.Decode.validate_string(delimited)], rest} + + {2, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + table: + Protox.MergeMessage.merge( + msg.table, + Electric.Satellite.SatPerms.Table.decode!(delimited) + ) + ], rest} + + {3, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[user_column: Protox.Decode.validate_string(delimited)], rest} + + {4, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[role_column: Protox.Decode.validate_string(delimited)], rest} + + {5, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[role_name: Protox.Decode.validate_string(delimited)], rest} + + {6, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + case msg.scope do + {:scope, previous_value} -> + {:scope, + Protox.MergeMessage.merge( + previous_value, + Electric.Satellite.SatPerms.Table.decode!(delimited) + )} + + _ -> + {:scope, Electric.Satellite.SatPerms.Table.decode!(delimited)} + end + ], rest} + + {tag, wire_type, rest} -> + {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) + {[], rest} + end + + msg_updated = struct(msg, field) + parse_key_value(rest, msg_updated) + end + ) + + [] + ) + + ( + @spec json_decode(iodata(), keyword()) :: {:ok, struct()} | {:error, any()} + def json_decode(input, opts \\ []) do + try do + {:ok, json_decode!(input, opts)} + rescue + e in Protox.JsonDecodingError -> {:error, e} + end + end + + @spec json_decode!(iodata(), keyword()) :: struct() | no_return() + def json_decode!(input, opts \\ []) do + {json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :decode) + + Protox.JsonDecode.decode!( + input, + Electric.Satellite.SatPerms.Unassign, + &json_library_wrapper.decode!(json_library, &1) + ) + end + + @spec json_encode(struct(), keyword()) :: {:ok, iodata()} | {:error, any()} + def json_encode(msg, opts \\ []) do + try do + {:ok, json_encode!(msg, opts)} + rescue + e in Protox.JsonEncodingError -> {:error, e} + end + end + + @spec json_encode!(struct(), keyword()) :: iodata() | no_return() + def json_encode!(msg, opts \\ []) do + {json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :encode) + Protox.JsonEncode.encode!(msg, &json_library_wrapper.encode!(json_library, &1)) + end + ) + + ( + @deprecated "Use fields_defs()/0 instead" + @spec defs() :: %{ + required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} + } + def defs() do + %{ + 1 => {:id, {:scalar, ""}, :string}, + 2 => {:table, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}}, + 3 => {:user_column, {:oneof, :_user_column}, :string}, + 4 => {:role_column, {:oneof, :_role_column}, :string}, + 5 => {:role_name, {:oneof, :_role_name}, :string}, + 6 => {:scope, {:oneof, :_scope}, {:message, Electric.Satellite.SatPerms.Table}} + } + end + + @deprecated "Use fields_defs()/0 instead" + @spec defs_by_name() :: %{ + required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} + } + def defs_by_name() do + %{ + id: {1, {:scalar, ""}, :string}, + role_column: {4, {:oneof, :_role_column}, :string}, + role_name: {5, {:oneof, :_role_name}, :string}, + scope: {6, {:oneof, :_scope}, {:message, Electric.Satellite.SatPerms.Table}}, + table: {2, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}}, + user_column: {3, {:oneof, :_user_column}, :string} + } + end + ) + + ( + @spec fields_defs() :: list(Protox.Field.t()) + def fields_defs() do + [ + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "table", + kind: {:scalar, nil}, + label: :optional, + name: :table, + tag: 2, + type: {:message, Electric.Satellite.SatPerms.Table} + }, + %{ + __struct__: Protox.Field, + json_name: "userColumn", + kind: {:oneof, :_user_column}, + label: :proto3_optional, + name: :user_column, + tag: 3, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "roleColumn", + kind: {:oneof, :_role_column}, + label: :proto3_optional, + name: :role_column, + tag: 4, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "roleName", + kind: {:oneof, :_role_name}, + label: :proto3_optional, + name: :role_name, + tag: 5, + type: :string + }, + %{ + __struct__: Protox.Field, + json_name: "scope", + kind: {:oneof, :_scope}, + label: :proto3_optional, + name: :scope, + tag: 6, + type: {:message, Electric.Satellite.SatPerms.Table} + } + ] + end + + [ + @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + ( + def field_def(:id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }} + end + + def field_def("id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }} + end + + [] + ), + ( + def field_def(:table) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "table", + kind: {:scalar, nil}, + label: :optional, + name: :table, + tag: 2, + type: {:message, Electric.Satellite.SatPerms.Table} + }} + end + + def field_def("table") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "table", + kind: {:scalar, nil}, + label: :optional, + name: :table, + tag: 2, + type: {:message, Electric.Satellite.SatPerms.Table} + }} + end + + [] + ), + ( + def field_def(:user_column) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "userColumn", + kind: {:oneof, :_user_column}, + label: :proto3_optional, + name: :user_column, + tag: 3, + type: :string + }} + end + + def field_def("userColumn") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "userColumn", + kind: {:oneof, :_user_column}, + label: :proto3_optional, + name: :user_column, + tag: 3, + type: :string + }} + end + + def field_def("user_column") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "userColumn", + kind: {:oneof, :_user_column}, + label: :proto3_optional, + name: :user_column, + tag: 3, + type: :string + }} + end + ), + ( + def field_def(:role_column) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "roleColumn", + kind: {:oneof, :_role_column}, + label: :proto3_optional, + name: :role_column, + tag: 4, + type: :string + }} + end + + def field_def("roleColumn") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "roleColumn", + kind: {:oneof, :_role_column}, + label: :proto3_optional, + name: :role_column, + tag: 4, + type: :string + }} + end + + def field_def("role_column") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "roleColumn", + kind: {:oneof, :_role_column}, + label: :proto3_optional, + name: :role_column, + tag: 4, + type: :string + }} + end + ), + ( + def field_def(:role_name) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "roleName", + kind: {:oneof, :_role_name}, + label: :proto3_optional, + name: :role_name, + tag: 5, + type: :string + }} + end + + def field_def("roleName") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "roleName", + kind: {:oneof, :_role_name}, + label: :proto3_optional, + name: :role_name, + tag: 5, + type: :string + }} + end + + def field_def("role_name") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "roleName", + kind: {:oneof, :_role_name}, + label: :proto3_optional, + name: :role_name, + tag: 5, + type: :string + }} + end + ), + ( + def field_def(:scope) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "scope", + kind: {:oneof, :_scope}, + label: :proto3_optional, + name: :scope, + tag: 6, + type: {:message, Electric.Satellite.SatPerms.Table} + }} + end + + def field_def("scope") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "scope", + kind: {:oneof, :_scope}, + label: :proto3_optional, + name: :scope, + tag: 6, + type: {:message, Electric.Satellite.SatPerms.Table} + }} + end + + [] + ), + def field_def(_) do + {:error, :no_such_field} + end + ] + ) + + [] + + ( + @spec required_fields() :: [] + def required_fields() do + [] + end + ) + + ( + @spec syntax() :: atom() + def syntax() do + :proto3 + end + ) + + [ + @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:id) do + {:ok, ""} + end, + def default(:table) do + {:ok, nil} + end, + def default(:user_column) do + {:error, :no_default_value} + end, + def default(:role_column) do + {:error, :no_default_value} + end, + def default(:role_name) do + {:error, :no_default_value} + end, + def default(:scope) do + {:error, :no_default_value} + end, + def default(_) do + {:error, :no_such_field} + end + ] + + ( + @spec file_options() :: nil + def file_options() do + nil + end + ) + end, + defmodule Electric.Satellite.SatOpMigrate.Table do + @moduledoc false + defstruct name: "", columns: [], fks: [], pks: [] + + ( + ( + @spec encode(struct) :: {:ok, iodata} | {:error, any} + def encode(msg) do + try do + {:ok, encode!(msg)} + rescue + e in [Protox.EncodingError, Protox.RequiredFieldsError] -> {:error, e} + end + end + + @spec encode!(struct) :: iodata | no_return + def encode!(msg) do + [] |> encode_name(msg) |> encode_columns(msg) |> encode_fks(msg) |> encode_pks(msg) + end + ) + + [] + + [ + defp encode_name(acc, msg) do + try do + if msg.name == "" do + acc + else + [acc, "\n", Protox.Encode.encode_string(msg.name)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:name, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_columns(acc, msg) do + try do + case msg.columns do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\x12", Protox.Encode.encode_message(value)] + end) + ] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:columns, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_fks(acc, msg) do try do - if msg.subscription_id == "" do - acc - else - [acc, "\n", Protox.Encode.encode_string(msg.subscription_id)] + case msg.fks do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\x1A", Protox.Encode.encode_message(value)] + end) + ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:subscription_id, "invalid field value"), - __STACKTRACE__ + reraise Protox.EncodingError.new(:fks, "invalid field value"), __STACKTRACE__ end end, - defp encode_shape_requests(acc, msg) do + defp encode_pks(acc, msg) do try do - case msg.shape_requests do + case msg.pks do [] -> acc @@ -4810,14 +7357,13 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\x12", Protox.Encode.encode_message(value)] + [acc, "\"", Protox.Encode.encode_string(value)] end) ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:shape_requests, "invalid field value"), - __STACKTRACE__ + reraise Protox.EncodingError.new(:pks, "invalid field value"), __STACKTRACE__ end end ] @@ -4840,7 +7386,7 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatSubsReq)) + parse_key_value(bytes, struct(Electric.Satellite.SatOpMigrate.Table)) end ) ) @@ -4860,17 +7406,30 @@ {1, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[subscription_id: Protox.Decode.validate_string(delimited)], rest} + {[name: Protox.Decode.validate_string(delimited)], rest} {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) {[ - shape_requests: - msg.shape_requests ++ [Electric.Satellite.SatShapeReq.decode!(delimited)] + columns: + msg.columns ++ [Electric.Satellite.SatOpMigrate.Column.decode!(delimited)] + ], rest} + + {3, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + fks: msg.fks ++ [Electric.Satellite.SatOpMigrate.ForeignKey.decode!(delimited)] ], rest} + {4, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[pks: msg.pks ++ [Protox.Decode.validate_string(delimited)]], rest} + {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) {[], rest} @@ -4900,7 +7459,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatSubsReq, + Electric.Satellite.SatOpMigrate.Table, &json_library_wrapper.decode!(json_library, &1) ) end @@ -4928,8 +7487,10 @@ } def defs() do %{ - 1 => {:subscription_id, {:scalar, ""}, :string}, - 2 => {:shape_requests, :unpacked, {:message, Electric.Satellite.SatShapeReq}} + 1 => {:name, {:scalar, ""}, :string}, + 2 => {:columns, :unpacked, {:message, Electric.Satellite.SatOpMigrate.Column}}, + 3 => {:fks, :unpacked, {:message, Electric.Satellite.SatOpMigrate.ForeignKey}}, + 4 => {:pks, :unpacked, :string} } end @@ -4939,8 +7500,10 @@ } def defs_by_name() do %{ - shape_requests: {2, :unpacked, {:message, Electric.Satellite.SatShapeReq}}, - subscription_id: {1, {:scalar, ""}, :string} + columns: {2, :unpacked, {:message, Electric.Satellite.SatOpMigrate.Column}}, + fks: {3, :unpacked, {:message, Electric.Satellite.SatOpMigrate.ForeignKey}}, + name: {1, {:scalar, ""}, :string}, + pks: {4, :unpacked, :string} } end ) @@ -4951,21 +7514,39 @@ [ %{ __struct__: Protox.Field, - json_name: "subscriptionId", + json_name: "name", kind: {:scalar, ""}, label: :optional, - name: :subscription_id, + name: :name, tag: 1, type: :string }, %{ __struct__: Protox.Field, - json_name: "shapeRequests", + json_name: "columns", kind: :unpacked, label: :repeated, - name: :shape_requests, + name: :columns, tag: 2, - type: {:message, Electric.Satellite.SatShapeReq} + type: {:message, Electric.Satellite.SatOpMigrate.Column} + }, + %{ + __struct__: Protox.Field, + json_name: "fks", + kind: :unpacked, + label: :repeated, + name: :fks, + tag: 3, + type: {:message, Electric.Satellite.SatOpMigrate.ForeignKey} + }, + %{ + __struct__: Protox.Field, + json_name: "pks", + kind: :unpacked, + label: :repeated, + name: :pks, + tag: 4, + type: :string } ] end @@ -4973,84 +7554,120 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:subscription_id) do + def field_def(:name) do {:ok, %{ __struct__: Protox.Field, - json_name: "subscriptionId", + json_name: "name", kind: {:scalar, ""}, label: :optional, - name: :subscription_id, + name: :name, tag: 1, type: :string }} end - def field_def("subscriptionId") do + def field_def("name") do {:ok, %{ __struct__: Protox.Field, - json_name: "subscriptionId", + json_name: "name", kind: {:scalar, ""}, label: :optional, - name: :subscription_id, + name: :name, tag: 1, type: :string }} end - def field_def("subscription_id") do + [] + ), + ( + def field_def(:columns) do {:ok, %{ __struct__: Protox.Field, - json_name: "subscriptionId", - kind: {:scalar, ""}, - label: :optional, - name: :subscription_id, - tag: 1, - type: :string + json_name: "columns", + kind: :unpacked, + label: :repeated, + name: :columns, + tag: 2, + type: {:message, Electric.Satellite.SatOpMigrate.Column} + }} + end + + def field_def("columns") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "columns", + kind: :unpacked, + label: :repeated, + name: :columns, + tag: 2, + type: {:message, Electric.Satellite.SatOpMigrate.Column} }} end + + [] ), ( - def field_def(:shape_requests) do + def field_def(:fks) do {:ok, %{ __struct__: Protox.Field, - json_name: "shapeRequests", + json_name: "fks", kind: :unpacked, label: :repeated, - name: :shape_requests, - tag: 2, - type: {:message, Electric.Satellite.SatShapeReq} + name: :fks, + tag: 3, + type: {:message, Electric.Satellite.SatOpMigrate.ForeignKey} }} end - def field_def("shapeRequests") do + def field_def("fks") do {:ok, %{ __struct__: Protox.Field, - json_name: "shapeRequests", + json_name: "fks", kind: :unpacked, label: :repeated, - name: :shape_requests, - tag: 2, - type: {:message, Electric.Satellite.SatShapeReq} + name: :fks, + tag: 3, + type: {:message, Electric.Satellite.SatOpMigrate.ForeignKey} }} end - def field_def("shape_requests") do + [] + ), + ( + def field_def(:pks) do {:ok, %{ __struct__: Protox.Field, - json_name: "shapeRequests", + json_name: "pks", kind: :unpacked, label: :repeated, - name: :shape_requests, - tag: 2, - type: {:message, Electric.Satellite.SatShapeReq} + name: :pks, + tag: 4, + type: :string + }} + end + + def field_def("pks") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "pks", + kind: :unpacked, + label: :repeated, + name: :pks, + tag: 4, + type: :string }} end + + [] ), def field_def(_) do {:error, :no_such_field} @@ -5076,10 +7693,16 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:subscription_id) do + def default(:name) do {:ok, ""} end, - def default(:shape_requests) do + def default(:columns) do + {:error, :no_default_value} + end, + def default(:fks) do + {:error, :no_default_value} + end, + def default(:pks) do {:error, :no_default_value} end, def default(_) do @@ -5094,9 +7717,9 @@ end ) end, - defmodule Electric.Satellite.SatSubsDataError do + defmodule Electric.Satellite.SatOpMigrate.ForeignKey do @moduledoc false - defstruct code: :CODE_UNSPECIFIED, message: "", subscription_id: "", shape_request_error: [] + defstruct fk_cols: [], pk_table: "", pk_cols: [] ( ( @@ -5111,63 +7734,47 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] - |> encode_code(msg) - |> encode_message(msg) - |> encode_subscription_id(msg) - |> encode_shape_request_error(msg) + [] |> encode_fk_cols(msg) |> encode_pk_table(msg) |> encode_pk_cols(msg) end ) [] [ - defp encode_code(acc, msg) do - try do - if msg.code == :CODE_UNSPECIFIED do - acc - else - [ - acc, - "\b", - msg.code - |> Electric.Satellite.SatSubsDataError.Code.encode() - |> Protox.Encode.encode_enum() - ] - end - rescue - ArgumentError -> - reraise Protox.EncodingError.new(:code, "invalid field value"), __STACKTRACE__ - end - end, - defp encode_message(acc, msg) do + defp encode_fk_cols(acc, msg) do try do - if msg.message == "" do - acc - else - [acc, "\x12", Protox.Encode.encode_string(msg.message)] + case msg.fk_cols do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\n", Protox.Encode.encode_string(value)] + end) + ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:message, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:fk_cols, "invalid field value"), __STACKTRACE__ end end, - defp encode_subscription_id(acc, msg) do + defp encode_pk_table(acc, msg) do try do - if msg.subscription_id == "" do + if msg.pk_table == "" do acc else - [acc, "\x1A", Protox.Encode.encode_string(msg.subscription_id)] + [acc, "\x12", Protox.Encode.encode_string(msg.pk_table)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:subscription_id, "invalid field value"), - __STACKTRACE__ + reraise Protox.EncodingError.new(:pk_table, "invalid field value"), __STACKTRACE__ end end, - defp encode_shape_request_error(acc, msg) do + defp encode_pk_cols(acc, msg) do try do - case msg.shape_request_error do + case msg.pk_cols do [] -> acc @@ -5175,14 +7782,13 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\"", Protox.Encode.encode_message(value)] + [acc, "\x1A", Protox.Encode.encode_string(value)] end) ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:shape_request_error, "invalid field value"), - __STACKTRACE__ + reraise Protox.EncodingError.new(:pk_cols, "invalid field value"), __STACKTRACE__ end end ] @@ -5205,7 +7811,7 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatSubsDataError)) + parse_key_value(bytes, struct(Electric.Satellite.SatOpMigrate.ForeignKey)) end ) ) @@ -5223,30 +7829,19 @@ raise %Protox.IllegalTagError{} {1, _, bytes} -> - {value, rest} = - Protox.Decode.parse_enum(bytes, Electric.Satellite.SatSubsDataError.Code) - - {[code: value], rest} - - {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[message: Protox.Decode.validate_string(delimited)], rest} + {[fk_cols: msg.fk_cols ++ [Protox.Decode.validate_string(delimited)]], rest} - {3, _, bytes} -> + {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[subscription_id: Protox.Decode.validate_string(delimited)], rest} + {[pk_table: Protox.Decode.validate_string(delimited)], rest} - {4, _, bytes} -> + {3, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - - {[ - shape_request_error: - msg.shape_request_error ++ - [Electric.Satellite.SatSubsDataError.ShapeReqError.decode!(delimited)] - ], rest} + {[pk_cols: msg.pk_cols ++ [Protox.Decode.validate_string(delimited)]], rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -5277,7 +7872,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatSubsDataError, + Electric.Satellite.SatOpMigrate.ForeignKey, &json_library_wrapper.decode!(json_library, &1) ) end @@ -5305,14 +7900,9 @@ } def defs() do %{ - 1 => - {:code, {:scalar, :CODE_UNSPECIFIED}, - {:enum, Electric.Satellite.SatSubsDataError.Code}}, - 2 => {:message, {:scalar, ""}, :string}, - 3 => {:subscription_id, {:scalar, ""}, :string}, - 4 => - {:shape_request_error, :unpacked, - {:message, Electric.Satellite.SatSubsDataError.ShapeReqError}} + 1 => {:fk_cols, :unpacked, :string}, + 2 => {:pk_table, {:scalar, ""}, :string}, + 3 => {:pk_cols, :unpacked, :string} } end @@ -5322,12 +7912,9 @@ } def defs_by_name() do %{ - code: - {1, {:scalar, :CODE_UNSPECIFIED}, {:enum, Electric.Satellite.SatSubsDataError.Code}}, - message: {2, {:scalar, ""}, :string}, - shape_request_error: - {4, :unpacked, {:message, Electric.Satellite.SatSubsDataError.ShapeReqError}}, - subscription_id: {3, {:scalar, ""}, :string} + fk_cols: {1, :unpacked, :string}, + pk_cols: {3, :unpacked, :string}, + pk_table: {2, {:scalar, ""}, :string} } end ) @@ -5338,39 +7925,30 @@ [ %{ __struct__: Protox.Field, - json_name: "code", - kind: {:scalar, :CODE_UNSPECIFIED}, - label: :optional, - name: :code, + json_name: "fkCols", + kind: :unpacked, + label: :repeated, + name: :fk_cols, tag: 1, - type: {:enum, Electric.Satellite.SatSubsDataError.Code} - }, - %{ - __struct__: Protox.Field, - json_name: "message", - kind: {:scalar, ""}, - label: :optional, - name: :message, - tag: 2, type: :string }, %{ __struct__: Protox.Field, - json_name: "subscriptionId", + json_name: "pkTable", kind: {:scalar, ""}, label: :optional, - name: :subscription_id, - tag: 3, + name: :pk_table, + tag: 2, type: :string }, %{ __struct__: Protox.Field, - json_name: "shapeRequestError", + json_name: "pkCols", kind: :unpacked, label: :repeated, - name: :shape_request_error, - tag: 4, - type: {:message, Electric.Satellite.SatSubsDataError.ShapeReqError} + name: :pk_cols, + tag: 3, + type: :string } ] end @@ -5378,140 +7956,122 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:code) do + def field_def(:fk_cols) do {:ok, %{ __struct__: Protox.Field, - json_name: "code", - kind: {:scalar, :CODE_UNSPECIFIED}, - label: :optional, - name: :code, + json_name: "fkCols", + kind: :unpacked, + label: :repeated, + name: :fk_cols, tag: 1, - type: {:enum, Electric.Satellite.SatSubsDataError.Code} + type: :string }} end - def field_def("code") do + def field_def("fkCols") do {:ok, %{ __struct__: Protox.Field, - json_name: "code", - kind: {:scalar, :CODE_UNSPECIFIED}, - label: :optional, - name: :code, + json_name: "fkCols", + kind: :unpacked, + label: :repeated, + name: :fk_cols, tag: 1, - type: {:enum, Electric.Satellite.SatSubsDataError.Code} - }} - end - - [] - ), - ( - def field_def(:message) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "message", - kind: {:scalar, ""}, - label: :optional, - name: :message, - tag: 2, type: :string }} end - def field_def("message") do + def field_def("fk_cols") do {:ok, %{ __struct__: Protox.Field, - json_name: "message", - kind: {:scalar, ""}, - label: :optional, - name: :message, - tag: 2, + json_name: "fkCols", + kind: :unpacked, + label: :repeated, + name: :fk_cols, + tag: 1, type: :string }} end - - [] ), ( - def field_def(:subscription_id) do + def field_def(:pk_table) do {:ok, %{ __struct__: Protox.Field, - json_name: "subscriptionId", + json_name: "pkTable", kind: {:scalar, ""}, label: :optional, - name: :subscription_id, - tag: 3, + name: :pk_table, + tag: 2, type: :string }} end - def field_def("subscriptionId") do + def field_def("pkTable") do {:ok, %{ __struct__: Protox.Field, - json_name: "subscriptionId", + json_name: "pkTable", kind: {:scalar, ""}, label: :optional, - name: :subscription_id, - tag: 3, + name: :pk_table, + tag: 2, type: :string }} end - def field_def("subscription_id") do + def field_def("pk_table") do {:ok, %{ __struct__: Protox.Field, - json_name: "subscriptionId", + json_name: "pkTable", kind: {:scalar, ""}, label: :optional, - name: :subscription_id, - tag: 3, + name: :pk_table, + tag: 2, type: :string }} end ), ( - def field_def(:shape_request_error) do + def field_def(:pk_cols) do {:ok, %{ __struct__: Protox.Field, - json_name: "shapeRequestError", + json_name: "pkCols", kind: :unpacked, label: :repeated, - name: :shape_request_error, - tag: 4, - type: {:message, Electric.Satellite.SatSubsDataError.ShapeReqError} + name: :pk_cols, + tag: 3, + type: :string }} end - def field_def("shapeRequestError") do + def field_def("pkCols") do {:ok, %{ __struct__: Protox.Field, - json_name: "shapeRequestError", + json_name: "pkCols", kind: :unpacked, label: :repeated, - name: :shape_request_error, - tag: 4, - type: {:message, Electric.Satellite.SatSubsDataError.ShapeReqError} + name: :pk_cols, + tag: 3, + type: :string }} end - def field_def("shape_request_error") do + def field_def("pk_cols") do {:ok, %{ __struct__: Protox.Field, - json_name: "shapeRequestError", + json_name: "pkCols", kind: :unpacked, label: :repeated, - name: :shape_request_error, - tag: 4, - type: {:message, Electric.Satellite.SatSubsDataError.ShapeReqError} + name: :pk_cols, + tag: 3, + type: :string }} end ), @@ -5539,16 +8099,13 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:code) do - {:ok, :CODE_UNSPECIFIED} - end, - def default(:message) do - {:ok, ""} + def default(:fk_cols) do + {:error, :no_default_value} end, - def default(:subscription_id) do + def default(:pk_table) do {:ok, ""} end, - def default(:shape_request_error) do + def default(:pk_cols) do {:error, :no_default_value} end, def default(_) do @@ -5563,97 +8120,52 @@ end ) end, - defmodule Electric.Satellite.SatOpMigrate.Table do + defmodule Electric.Satellite.SatPerms.Table do @moduledoc false - defstruct name: "", columns: [], fks: [], pks: [] - - ( - ( - @spec encode(struct) :: {:ok, iodata} | {:error, any} - def encode(msg) do - try do - {:ok, encode!(msg)} - rescue - e in [Protox.EncodingError, Protox.RequiredFieldsError] -> {:error, e} - end - end - - @spec encode!(struct) :: iodata | no_return - def encode!(msg) do - [] |> encode_name(msg) |> encode_columns(msg) |> encode_fks(msg) |> encode_pks(msg) - end - ) - - [] - - [ - defp encode_name(acc, msg) do - try do - if msg.name == "" do - acc - else - [acc, "\n", Protox.Encode.encode_string(msg.name)] - end - rescue - ArgumentError -> - reraise Protox.EncodingError.new(:name, "invalid field value"), __STACKTRACE__ - end - end, - defp encode_columns(acc, msg) do - try do - case msg.columns do - [] -> - acc - - values -> - [ - acc, - Enum.reduce(values, [], fn value, acc -> - [acc, "\x12", Protox.Encode.encode_message(value)] - end) - ] - end + defstruct schema: "", name: "" + + ( + ( + @spec encode(struct) :: {:ok, iodata} | {:error, any} + def encode(msg) do + try do + {:ok, encode!(msg)} rescue - ArgumentError -> - reraise Protox.EncodingError.new(:columns, "invalid field value"), __STACKTRACE__ + e in [Protox.EncodingError, Protox.RequiredFieldsError] -> {:error, e} end - end, - defp encode_fks(acc, msg) do - try do - case msg.fks do - [] -> - acc + end - values -> - [ - acc, - Enum.reduce(values, [], fn value, acc -> - [acc, "\x1A", Protox.Encode.encode_message(value)] - end) - ] + @spec encode!(struct) :: iodata | no_return + def encode!(msg) do + [] |> encode_schema(msg) |> encode_name(msg) + end + ) + + [] + + [ + defp encode_schema(acc, msg) do + try do + if msg.schema == "" do + acc + else + [acc, "\n", Protox.Encode.encode_string(msg.schema)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:fks, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:schema, "invalid field value"), __STACKTRACE__ end end, - defp encode_pks(acc, msg) do + defp encode_name(acc, msg) do try do - case msg.pks do - [] -> - acc - - values -> - [ - acc, - Enum.reduce(values, [], fn value, acc -> - [acc, "\"", Protox.Encode.encode_string(value)] - end) - ] + if msg.name == "" do + acc + else + [acc, "\x12", Protox.Encode.encode_string(msg.name)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:pks, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:name, "invalid field value"), __STACKTRACE__ end end ] @@ -5676,7 +8188,7 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatOpMigrate.Table)) + parse_key_value(bytes, struct(Electric.Satellite.SatPerms.Table)) end ) ) @@ -5696,29 +8208,12 @@ {1, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[name: Protox.Decode.validate_string(delimited)], rest} + {[schema: Protox.Decode.validate_string(delimited)], rest} {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - - {[ - columns: - msg.columns ++ [Electric.Satellite.SatOpMigrate.Column.decode!(delimited)] - ], rest} - - {3, _, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - - {[ - fks: msg.fks ++ [Electric.Satellite.SatOpMigrate.ForeignKey.decode!(delimited)] - ], rest} - - {4, _, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[pks: msg.pks ++ [Protox.Decode.validate_string(delimited)]], rest} + {[name: Protox.Decode.validate_string(delimited)], rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -5749,7 +8244,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatOpMigrate.Table, + Electric.Satellite.SatPerms.Table, &json_library_wrapper.decode!(json_library, &1) ) end @@ -5776,12 +8271,7 @@ required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} } def defs() do - %{ - 1 => {:name, {:scalar, ""}, :string}, - 2 => {:columns, :unpacked, {:message, Electric.Satellite.SatOpMigrate.Column}}, - 3 => {:fks, :unpacked, {:message, Electric.Satellite.SatOpMigrate.ForeignKey}}, - 4 => {:pks, :unpacked, :string} - } + %{1 => {:schema, {:scalar, ""}, :string}, 2 => {:name, {:scalar, ""}, :string}} end @deprecated "Use fields_defs()/0 instead" @@ -5789,12 +8279,7 @@ required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} } def defs_by_name() do - %{ - columns: {2, :unpacked, {:message, Electric.Satellite.SatOpMigrate.Column}}, - fks: {3, :unpacked, {:message, Electric.Satellite.SatOpMigrate.ForeignKey}}, - name: {1, {:scalar, ""}, :string}, - pks: {4, :unpacked, :string} - } + %{name: {2, {:scalar, ""}, :string}, schema: {1, {:scalar, ""}, :string}} end ) @@ -5804,38 +8289,20 @@ [ %{ __struct__: Protox.Field, - json_name: "name", + json_name: "schema", kind: {:scalar, ""}, label: :optional, - name: :name, + name: :schema, tag: 1, type: :string }, %{ __struct__: Protox.Field, - json_name: "columns", - kind: :unpacked, - label: :repeated, - name: :columns, + json_name: "name", + kind: {:scalar, ""}, + label: :optional, + name: :name, tag: 2, - type: {:message, Electric.Satellite.SatOpMigrate.Column} - }, - %{ - __struct__: Protox.Field, - json_name: "fks", - kind: :unpacked, - label: :repeated, - name: :fks, - tag: 3, - type: {:message, Electric.Satellite.SatOpMigrate.ForeignKey} - }, - %{ - __struct__: Protox.Field, - json_name: "pks", - kind: :unpacked, - label: :repeated, - name: :pks, - tag: 4, type: :string } ] @@ -5844,27 +8311,27 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:name) do + def field_def(:schema) do {:ok, %{ __struct__: Protox.Field, - json_name: "name", + json_name: "schema", kind: {:scalar, ""}, label: :optional, - name: :name, + name: :schema, tag: 1, type: :string }} end - def field_def("name") do + def field_def("schema") do {:ok, %{ __struct__: Protox.Field, - json_name: "name", + json_name: "schema", kind: {:scalar, ""}, label: :optional, - name: :name, + name: :schema, tag: 1, type: :string }} @@ -5873,86 +8340,28 @@ [] ), ( - def field_def(:columns) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "columns", - kind: :unpacked, - label: :repeated, - name: :columns, - tag: 2, - type: {:message, Electric.Satellite.SatOpMigrate.Column} - }} - end - - def field_def("columns") do + def field_def(:name) do {:ok, %{ __struct__: Protox.Field, - json_name: "columns", - kind: :unpacked, - label: :repeated, - name: :columns, + json_name: "name", + kind: {:scalar, ""}, + label: :optional, + name: :name, tag: 2, - type: {:message, Electric.Satellite.SatOpMigrate.Column} - }} - end - - [] - ), - ( - def field_def(:fks) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "fks", - kind: :unpacked, - label: :repeated, - name: :fks, - tag: 3, - type: {:message, Electric.Satellite.SatOpMigrate.ForeignKey} - }} - end - - def field_def("fks") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "fks", - kind: :unpacked, - label: :repeated, - name: :fks, - tag: 3, - type: {:message, Electric.Satellite.SatOpMigrate.ForeignKey} - }} - end - - [] - ), - ( - def field_def(:pks) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "pks", - kind: :unpacked, - label: :repeated, - name: :pks, - tag: 4, type: :string }} end - def field_def("pks") do + def field_def("name") do {:ok, %{ __struct__: Protox.Field, - json_name: "pks", - kind: :unpacked, - label: :repeated, - name: :pks, - tag: 4, + json_name: "name", + kind: {:scalar, ""}, + label: :optional, + name: :name, + tag: 2, type: :string }} end @@ -5983,17 +8392,11 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:name) do + def default(:schema) do {:ok, ""} end, - def default(:columns) do - {:error, :no_default_value} - end, - def default(:fks) do - {:error, :no_default_value} - end, - def default(:pks) do - {:error, :no_default_value} + def default(:name) do + {:ok, ""} end, def default(_) do {:error, :no_such_field} @@ -6007,9 +8410,9 @@ end ) end, - defmodule Electric.Satellite.SatOpMigrate.ForeignKey do + defmodule Electric.Satellite.SatOpUpdate do @moduledoc false - defstruct fk_cols: [], pk_table: "", pk_cols: [] + defstruct relation_id: 0, row_data: nil, old_row_data: nil, tags: [] ( ( @@ -6024,47 +8427,58 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_fk_cols(msg) |> encode_pk_table(msg) |> encode_pk_cols(msg) + [] + |> encode_relation_id(msg) + |> encode_row_data(msg) + |> encode_old_row_data(msg) + |> encode_tags(msg) end ) [] [ - defp encode_fk_cols(acc, msg) do + defp encode_relation_id(acc, msg) do + try do + if msg.relation_id == 0 do + acc + else + [acc, "\b", Protox.Encode.encode_uint32(msg.relation_id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:relation_id, "invalid field value"), + __STACKTRACE__ + end + end, + defp encode_row_data(acc, msg) do try do - case msg.fk_cols do - [] -> - acc - - values -> - [ - acc, - Enum.reduce(values, [], fn value, acc -> - [acc, "\n", Protox.Encode.encode_string(value)] - end) - ] + if msg.row_data == nil do + acc + else + [acc, "\x12", Protox.Encode.encode_message(msg.row_data)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:fk_cols, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:row_data, "invalid field value"), __STACKTRACE__ end end, - defp encode_pk_table(acc, msg) do + defp encode_old_row_data(acc, msg) do try do - if msg.pk_table == "" do + if msg.old_row_data == nil do acc else - [acc, "\x12", Protox.Encode.encode_string(msg.pk_table)] + [acc, "\x1A", Protox.Encode.encode_message(msg.old_row_data)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:pk_table, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:old_row_data, "invalid field value"), + __STACKTRACE__ end end, - defp encode_pk_cols(acc, msg) do + defp encode_tags(acc, msg) do try do - case msg.pk_cols do + case msg.tags do [] -> acc @@ -6072,13 +8486,13 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\x1A", Protox.Encode.encode_string(value)] + [acc, "\"", Protox.Encode.encode_string(value)] end) ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:pk_cols, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:tags, "invalid field value"), __STACKTRACE__ end end ] @@ -6101,7 +8515,7 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatOpMigrate.ForeignKey)) + parse_key_value(bytes, struct(Electric.Satellite.SatOpUpdate)) end ) ) @@ -6119,19 +8533,37 @@ raise %Protox.IllegalTagError{} {1, _, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[fk_cols: msg.fk_cols ++ [Protox.Decode.validate_string(delimited)]], rest} + {value, rest} = Protox.Decode.parse_uint32(bytes) + {[relation_id: value], rest} {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[pk_table: Protox.Decode.validate_string(delimited)], rest} + + {[ + row_data: + Protox.MergeMessage.merge( + msg.row_data, + Electric.Satellite.SatOpRow.decode!(delimited) + ) + ], rest} {3, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[pk_cols: msg.pk_cols ++ [Protox.Decode.validate_string(delimited)]], rest} + + {[ + old_row_data: + Protox.MergeMessage.merge( + msg.old_row_data, + Electric.Satellite.SatOpRow.decode!(delimited) + ) + ], rest} + + {4, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[tags: msg.tags ++ [Protox.Decode.validate_string(delimited)]], rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -6162,7 +8594,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatOpMigrate.ForeignKey, + Electric.Satellite.SatOpUpdate, &json_library_wrapper.decode!(json_library, &1) ) end @@ -6190,9 +8622,10 @@ } def defs() do %{ - 1 => {:fk_cols, :unpacked, :string}, - 2 => {:pk_table, {:scalar, ""}, :string}, - 3 => {:pk_cols, :unpacked, :string} + 1 => {:relation_id, {:scalar, 0}, :uint32}, + 2 => {:row_data, {:scalar, nil}, {:message, Electric.Satellite.SatOpRow}}, + 3 => {:old_row_data, {:scalar, nil}, {:message, Electric.Satellite.SatOpRow}}, + 4 => {:tags, :unpacked, :string} } end @@ -6202,9 +8635,10 @@ } def defs_by_name() do %{ - fk_cols: {1, :unpacked, :string}, - pk_cols: {3, :unpacked, :string}, - pk_table: {2, {:scalar, ""}, :string} + old_row_data: {3, {:scalar, nil}, {:message, Electric.Satellite.SatOpRow}}, + relation_id: {1, {:scalar, 0}, :uint32}, + row_data: {2, {:scalar, nil}, {:message, Electric.Satellite.SatOpRow}}, + tags: {4, :unpacked, :string} } end ) @@ -6215,29 +8649,38 @@ [ %{ __struct__: Protox.Field, - json_name: "fkCols", - kind: :unpacked, - label: :repeated, - name: :fk_cols, + json_name: "relationId", + kind: {:scalar, 0}, + label: :optional, + name: :relation_id, tag: 1, - type: :string + type: :uint32 }, %{ __struct__: Protox.Field, - json_name: "pkTable", - kind: {:scalar, ""}, + json_name: "rowData", + kind: {:scalar, nil}, label: :optional, - name: :pk_table, + name: :row_data, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatOpRow} }, %{ __struct__: Protox.Field, - json_name: "pkCols", + json_name: "oldRowData", + kind: {:scalar, nil}, + label: :optional, + name: :old_row_data, + tag: 3, + type: {:message, Electric.Satellite.SatOpRow} + }, + %{ + __struct__: Protox.Field, + json_name: "tags", kind: :unpacked, label: :repeated, - name: :pk_cols, - tag: 3, + name: :tags, + tag: 4, type: :string } ] @@ -6246,124 +8689,153 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:fk_cols) do + def field_def(:relation_id) do {:ok, %{ __struct__: Protox.Field, - json_name: "fkCols", - kind: :unpacked, - label: :repeated, - name: :fk_cols, + json_name: "relationId", + kind: {:scalar, 0}, + label: :optional, + name: :relation_id, tag: 1, - type: :string + type: :uint32 }} end - def field_def("fkCols") do + def field_def("relationId") do {:ok, %{ __struct__: Protox.Field, - json_name: "fkCols", - kind: :unpacked, - label: :repeated, - name: :fk_cols, + json_name: "relationId", + kind: {:scalar, 0}, + label: :optional, + name: :relation_id, tag: 1, - type: :string + type: :uint32 }} end - def field_def("fk_cols") do + def field_def("relation_id") do {:ok, %{ __struct__: Protox.Field, - json_name: "fkCols", - kind: :unpacked, - label: :repeated, - name: :fk_cols, + json_name: "relationId", + kind: {:scalar, 0}, + label: :optional, + name: :relation_id, tag: 1, - type: :string + type: :uint32 }} end ), ( - def field_def(:pk_table) do + def field_def(:row_data) do {:ok, %{ __struct__: Protox.Field, - json_name: "pkTable", - kind: {:scalar, ""}, + json_name: "rowData", + kind: {:scalar, nil}, label: :optional, - name: :pk_table, + name: :row_data, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatOpRow} }} end - def field_def("pkTable") do + def field_def("rowData") do {:ok, %{ __struct__: Protox.Field, - json_name: "pkTable", - kind: {:scalar, ""}, + json_name: "rowData", + kind: {:scalar, nil}, label: :optional, - name: :pk_table, + name: :row_data, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatOpRow} }} end - def field_def("pk_table") do + def field_def("row_data") do {:ok, %{ __struct__: Protox.Field, - json_name: "pkTable", - kind: {:scalar, ""}, + json_name: "rowData", + kind: {:scalar, nil}, label: :optional, - name: :pk_table, + name: :row_data, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatOpRow} }} end ), ( - def field_def(:pk_cols) do + def field_def(:old_row_data) do {:ok, %{ __struct__: Protox.Field, - json_name: "pkCols", - kind: :unpacked, - label: :repeated, - name: :pk_cols, + json_name: "oldRowData", + kind: {:scalar, nil}, + label: :optional, + name: :old_row_data, tag: 3, - type: :string + type: {:message, Electric.Satellite.SatOpRow} }} end - def field_def("pkCols") do + def field_def("oldRowData") do {:ok, %{ __struct__: Protox.Field, - json_name: "pkCols", + json_name: "oldRowData", + kind: {:scalar, nil}, + label: :optional, + name: :old_row_data, + tag: 3, + type: {:message, Electric.Satellite.SatOpRow} + }} + end + + def field_def("old_row_data") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "oldRowData", + kind: {:scalar, nil}, + label: :optional, + name: :old_row_data, + tag: 3, + type: {:message, Electric.Satellite.SatOpRow} + }} + end + ), + ( + def field_def(:tags) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "tags", kind: :unpacked, label: :repeated, - name: :pk_cols, - tag: 3, + name: :tags, + tag: 4, type: :string }} end - def field_def("pk_cols") do + def field_def("tags") do {:ok, %{ __struct__: Protox.Field, - json_name: "pkCols", + json_name: "tags", kind: :unpacked, label: :repeated, - name: :pk_cols, - tag: 3, + name: :tags, + tag: 4, type: :string }} end + + [] ), def field_def(_) do {:error, :no_such_field} @@ -6389,13 +8861,16 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:fk_cols) do - {:error, :no_default_value} + def default(:relation_id) do + {:ok, 0} end, - def default(:pk_table) do - {:ok, ""} + def default(:row_data) do + {:ok, nil} end, - def default(:pk_cols) do + def default(:old_row_data) do + {:ok, nil} + end, + def default(:tags) do {:error, :no_default_value} end, def default(_) do @@ -6410,9 +8885,9 @@ end ) end, - defmodule Electric.Satellite.SatPerms.Table do + defmodule Electric.Satellite.SatOpMigrate do @moduledoc false - defstruct schema: "", name: "" + defstruct version: "", stmts: [], table: nil ( ( @@ -6427,35 +8902,53 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_schema(msg) |> encode_name(msg) + [] |> encode_table(msg) |> encode_version(msg) |> encode_stmts(msg) end ) [] [ - defp encode_schema(acc, msg) do + defp encode_version(acc, msg) do try do - if msg.schema == "" do + if msg.version == "" do acc else - [acc, "\n", Protox.Encode.encode_string(msg.schema)] + [acc, "\n", Protox.Encode.encode_string(msg.version)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:schema, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:version, "invalid field value"), __STACKTRACE__ end end, - defp encode_name(acc, msg) do + defp encode_stmts(acc, msg) do try do - if msg.name == "" do - acc - else - [acc, "\x12", Protox.Encode.encode_string(msg.name)] + case msg.stmts do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\x12", Protox.Encode.encode_message(value)] + end) + ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:name, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:stmts, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_table(acc, msg) do + try do + case msg.table do + nil -> [acc] + child_field_value -> [acc, "\x1A", Protox.Encode.encode_message(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:table, "invalid field value"), __STACKTRACE__ end end ] @@ -6478,7 +8971,7 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatPerms.Table)) + parse_key_value(bytes, struct(Electric.Satellite.SatOpMigrate)) end ) ) @@ -6498,12 +8991,32 @@ {1, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[schema: Protox.Decode.validate_string(delimited)], rest} + {[version: Protox.Decode.validate_string(delimited)], rest} {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[name: Protox.Decode.validate_string(delimited)], rest} + + {[stmts: msg.stmts ++ [Electric.Satellite.SatOpMigrate.Stmt.decode!(delimited)]], + rest} + + {3, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + + {[ + case msg.table do + {:table, previous_value} -> + {:table, + Protox.MergeMessage.merge( + previous_value, + Electric.Satellite.SatOpMigrate.Table.decode!(delimited) + )} + + _ -> + {:table, Electric.Satellite.SatOpMigrate.Table.decode!(delimited)} + end + ], rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -6534,7 +9047,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatPerms.Table, + Electric.Satellite.SatOpMigrate, &json_library_wrapper.decode!(json_library, &1) ) end @@ -6561,7 +9074,11 @@ required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} } def defs() do - %{1 => {:schema, {:scalar, ""}, :string}, 2 => {:name, {:scalar, ""}, :string}} + %{ + 1 => {:version, {:scalar, ""}, :string}, + 2 => {:stmts, :unpacked, {:message, Electric.Satellite.SatOpMigrate.Stmt}}, + 3 => {:table, {:oneof, :_table}, {:message, Electric.Satellite.SatOpMigrate.Table}} + } end @deprecated "Use fields_defs()/0 instead" @@ -6569,7 +9086,11 @@ required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} } def defs_by_name() do - %{name: {2, {:scalar, ""}, :string}, schema: {1, {:scalar, ""}, :string}} + %{ + stmts: {2, :unpacked, {:message, Electric.Satellite.SatOpMigrate.Stmt}}, + table: {3, {:oneof, :_table}, {:message, Electric.Satellite.SatOpMigrate.Table}}, + version: {1, {:scalar, ""}, :string} + } end ) @@ -6579,21 +9100,30 @@ [ %{ __struct__: Protox.Field, - json_name: "schema", + json_name: "version", kind: {:scalar, ""}, label: :optional, - name: :schema, + name: :version, tag: 1, type: :string }, %{ __struct__: Protox.Field, - json_name: "name", - kind: {:scalar, ""}, - label: :optional, - name: :name, + json_name: "stmts", + kind: :unpacked, + label: :repeated, + name: :stmts, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatOpMigrate.Stmt} + }, + %{ + __struct__: Protox.Field, + json_name: "table", + kind: {:oneof, :_table}, + label: :proto3_optional, + name: :table, + tag: 3, + type: {:message, Electric.Satellite.SatOpMigrate.Table} } ] end @@ -6601,27 +9131,27 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:schema) do + def field_def(:version) do {:ok, %{ __struct__: Protox.Field, - json_name: "schema", + json_name: "version", kind: {:scalar, ""}, label: :optional, - name: :schema, + name: :version, tag: 1, type: :string }} end - def field_def("schema") do + def field_def("version") do {:ok, %{ __struct__: Protox.Field, - json_name: "schema", + json_name: "version", kind: {:scalar, ""}, label: :optional, - name: :schema, + name: :version, tag: 1, type: :string }} @@ -6630,29 +9160,58 @@ [] ), ( - def field_def(:name) do + def field_def(:stmts) do {:ok, %{ __struct__: Protox.Field, - json_name: "name", - kind: {:scalar, ""}, - label: :optional, - name: :name, + json_name: "stmts", + kind: :unpacked, + label: :repeated, + name: :stmts, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatOpMigrate.Stmt} }} end - def field_def("name") do + def field_def("stmts") do {:ok, %{ __struct__: Protox.Field, - json_name: "name", - kind: {:scalar, ""}, - label: :optional, - name: :name, + json_name: "stmts", + kind: :unpacked, + label: :repeated, + name: :stmts, tag: 2, - type: :string + type: {:message, Electric.Satellite.SatOpMigrate.Stmt} + }} + end + + [] + ), + ( + def field_def(:table) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "table", + kind: {:oneof, :_table}, + label: :proto3_optional, + name: :table, + tag: 3, + type: {:message, Electric.Satellite.SatOpMigrate.Table} + }} + end + + def field_def("table") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "table", + kind: {:oneof, :_table}, + label: :proto3_optional, + name: :table, + tag: 3, + type: {:message, Electric.Satellite.SatOpMigrate.Table} }} end @@ -6682,11 +9241,14 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:schema) do + def default(:version) do {:ok, ""} end, - def default(:name) do - {:ok, ""} + def default(:stmts) do + {:error, :no_default_value} + end, + def default(:table) do + {:error, :no_default_value} end, def default(_) do {:error, :no_such_field} @@ -6700,9 +9262,9 @@ end ) end, - defmodule Electric.Satellite.SatOpUpdate do + defmodule Electric.Satellite.SatPerms.ColumnList do @moduledoc false - defstruct relation_id: 0, row_data: nil, old_row_data: nil, tags: [] + defstruct names: [] ( ( @@ -6717,58 +9279,16 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] - |> encode_relation_id(msg) - |> encode_row_data(msg) - |> encode_old_row_data(msg) - |> encode_tags(msg) + [] |> encode_names(msg) end ) [] [ - defp encode_relation_id(acc, msg) do - try do - if msg.relation_id == 0 do - acc - else - [acc, "\b", Protox.Encode.encode_uint32(msg.relation_id)] - end - rescue - ArgumentError -> - reraise Protox.EncodingError.new(:relation_id, "invalid field value"), - __STACKTRACE__ - end - end, - defp encode_row_data(acc, msg) do - try do - if msg.row_data == nil do - acc - else - [acc, "\x12", Protox.Encode.encode_message(msg.row_data)] - end - rescue - ArgumentError -> - reraise Protox.EncodingError.new(:row_data, "invalid field value"), __STACKTRACE__ - end - end, - defp encode_old_row_data(acc, msg) do - try do - if msg.old_row_data == nil do - acc - else - [acc, "\x1A", Protox.Encode.encode_message(msg.old_row_data)] - end - rescue - ArgumentError -> - reraise Protox.EncodingError.new(:old_row_data, "invalid field value"), - __STACKTRACE__ - end - end, - defp encode_tags(acc, msg) do + defp encode_names(acc, msg) do try do - case msg.tags do + case msg.names do [] -> acc @@ -6776,13 +9296,13 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\"", Protox.Encode.encode_string(value)] + [acc, "*", Protox.Encode.encode_string(value)] end) ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:tags, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:names, "invalid field value"), __STACKTRACE__ end end ] @@ -6805,55 +9325,27 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatOpUpdate)) + parse_key_value(bytes, struct(Electric.Satellite.SatPerms.ColumnList)) end ) ) ( @spec parse_key_value(binary, struct) :: struct - defp parse_key_value(<<>>, msg) do - msg - end - - defp parse_key_value(bytes, msg) do - {field, rest} = - case Protox.Decode.parse_key(bytes) do - {0, _, _} -> - raise %Protox.IllegalTagError{} - - {1, _, bytes} -> - {value, rest} = Protox.Decode.parse_uint32(bytes) - {[relation_id: value], rest} - - {2, _, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - - {[ - row_data: - Protox.MergeMessage.merge( - msg.row_data, - Electric.Satellite.SatOpRow.decode!(delimited) - ) - ], rest} - - {3, _, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + defp parse_key_value(<<>>, msg) do + msg + end - {[ - old_row_data: - Protox.MergeMessage.merge( - msg.old_row_data, - Electric.Satellite.SatOpRow.decode!(delimited) - ) - ], rest} + defp parse_key_value(bytes, msg) do + {field, rest} = + case Protox.Decode.parse_key(bytes) do + {0, _, _} -> + raise %Protox.IllegalTagError{} - {4, _, bytes} -> + {5, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[tags: msg.tags ++ [Protox.Decode.validate_string(delimited)]], rest} + {[names: msg.names ++ [Protox.Decode.validate_string(delimited)]], rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -6884,7 +9376,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatOpUpdate, + Electric.Satellite.SatPerms.ColumnList, &json_library_wrapper.decode!(json_library, &1) ) end @@ -6911,12 +9403,7 @@ required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} } def defs() do - %{ - 1 => {:relation_id, {:scalar, 0}, :uint32}, - 2 => {:row_data, {:scalar, nil}, {:message, Electric.Satellite.SatOpRow}}, - 3 => {:old_row_data, {:scalar, nil}, {:message, Electric.Satellite.SatOpRow}}, - 4 => {:tags, :unpacked, :string} - } + %{5 => {:names, :unpacked, :string}} end @deprecated "Use fields_defs()/0 instead" @@ -6924,12 +9411,7 @@ required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} } def defs_by_name() do - %{ - old_row_data: {3, {:scalar, nil}, {:message, Electric.Satellite.SatOpRow}}, - relation_id: {1, {:scalar, 0}, :uint32}, - row_data: {2, {:scalar, nil}, {:message, Electric.Satellite.SatOpRow}}, - tags: {4, :unpacked, :string} - } + %{names: {5, :unpacked, :string}} end ) @@ -6939,38 +9421,11 @@ [ %{ __struct__: Protox.Field, - json_name: "relationId", - kind: {:scalar, 0}, - label: :optional, - name: :relation_id, - tag: 1, - type: :uint32 - }, - %{ - __struct__: Protox.Field, - json_name: "rowData", - kind: {:scalar, nil}, - label: :optional, - name: :row_data, - tag: 2, - type: {:message, Electric.Satellite.SatOpRow} - }, - %{ - __struct__: Protox.Field, - json_name: "oldRowData", - kind: {:scalar, nil}, - label: :optional, - name: :old_row_data, - tag: 3, - type: {:message, Electric.Satellite.SatOpRow} - }, - %{ - __struct__: Protox.Field, - json_name: "tags", + json_name: "names", kind: :unpacked, label: :repeated, - name: :tags, - tag: 4, + name: :names, + tag: 5, type: :string } ] @@ -6979,148 +9434,28 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:relation_id) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "relationId", - kind: {:scalar, 0}, - label: :optional, - name: :relation_id, - tag: 1, - type: :uint32 - }} - end - - def field_def("relationId") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "relationId", - kind: {:scalar, 0}, - label: :optional, - name: :relation_id, - tag: 1, - type: :uint32 - }} - end - - def field_def("relation_id") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "relationId", - kind: {:scalar, 0}, - label: :optional, - name: :relation_id, - tag: 1, - type: :uint32 - }} - end - ), - ( - def field_def(:row_data) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "rowData", - kind: {:scalar, nil}, - label: :optional, - name: :row_data, - tag: 2, - type: {:message, Electric.Satellite.SatOpRow} - }} - end - - def field_def("rowData") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "rowData", - kind: {:scalar, nil}, - label: :optional, - name: :row_data, - tag: 2, - type: {:message, Electric.Satellite.SatOpRow} - }} - end - - def field_def("row_data") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "rowData", - kind: {:scalar, nil}, - label: :optional, - name: :row_data, - tag: 2, - type: {:message, Electric.Satellite.SatOpRow} - }} - end - ), - ( - def field_def(:old_row_data) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "oldRowData", - kind: {:scalar, nil}, - label: :optional, - name: :old_row_data, - tag: 3, - type: {:message, Electric.Satellite.SatOpRow} - }} - end - - def field_def("oldRowData") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "oldRowData", - kind: {:scalar, nil}, - label: :optional, - name: :old_row_data, - tag: 3, - type: {:message, Electric.Satellite.SatOpRow} - }} - end - - def field_def("old_row_data") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "oldRowData", - kind: {:scalar, nil}, - label: :optional, - name: :old_row_data, - tag: 3, - type: {:message, Electric.Satellite.SatOpRow} - }} - end - ), - ( - def field_def(:tags) do + def field_def(:names) do {:ok, %{ __struct__: Protox.Field, - json_name: "tags", + json_name: "names", kind: :unpacked, label: :repeated, - name: :tags, - tag: 4, + name: :names, + tag: 5, type: :string }} end - def field_def("tags") do + def field_def("names") do {:ok, %{ __struct__: Protox.Field, - json_name: "tags", + json_name: "names", kind: :unpacked, label: :repeated, - name: :tags, - tag: 4, + name: :names, + tag: 5, type: :string }} end @@ -7151,16 +9486,7 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:relation_id) do - {:ok, 0} - end, - def default(:row_data) do - {:ok, nil} - end, - def default(:old_row_data) do - {:ok, nil} - end, - def default(:tags) do + def default(:names) do {:error, :no_default_value} end, def default(_) do @@ -7175,9 +9501,9 @@ end ) end, - defmodule Electric.Satellite.SatOpMigrate do + defmodule Electric.Satellite.SatPerms.Sqlite do @moduledoc false - defstruct version: "", stmts: [], table: nil + defstruct stmt: "" ( ( @@ -7192,53 +9518,23 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_table(msg) |> encode_version(msg) |> encode_stmts(msg) + [] |> encode_stmt(msg) end ) [] [ - defp encode_version(acc, msg) do + defp encode_stmt(acc, msg) do try do - if msg.version == "" do + if msg.stmt == "" do acc else - [acc, "\n", Protox.Encode.encode_string(msg.version)] - end - rescue - ArgumentError -> - reraise Protox.EncodingError.new(:version, "invalid field value"), __STACKTRACE__ - end - end, - defp encode_stmts(acc, msg) do - try do - case msg.stmts do - [] -> - acc - - values -> - [ - acc, - Enum.reduce(values, [], fn value, acc -> - [acc, "\x12", Protox.Encode.encode_message(value)] - end) - ] - end - rescue - ArgumentError -> - reraise Protox.EncodingError.new(:stmts, "invalid field value"), __STACKTRACE__ - end - end, - defp encode_table(acc, msg) do - try do - case msg.table do - nil -> [acc] - child_field_value -> [acc, "\x1A", Protox.Encode.encode_message(child_field_value)] + [acc, "\n", Protox.Encode.encode_string(msg.stmt)] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:table, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:stmt, "invalid field value"), __STACKTRACE__ end end ] @@ -7261,7 +9557,7 @@ ( @spec decode!(binary) :: struct | no_return def decode!(bytes) do - parse_key_value(bytes, struct(Electric.Satellite.SatOpMigrate)) + parse_key_value(bytes, struct(Electric.Satellite.SatPerms.Sqlite)) end ) ) @@ -7270,43 +9566,18 @@ @spec parse_key_value(binary, struct) :: struct defp parse_key_value(<<>>, msg) do msg - end - - defp parse_key_value(bytes, msg) do - {field, rest} = - case Protox.Decode.parse_key(bytes) do - {0, _, _} -> - raise %Protox.IllegalTagError{} - - {1, _, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[version: Protox.Decode.validate_string(delimited)], rest} - - {2, _, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + end - {[stmts: msg.stmts ++ [Electric.Satellite.SatOpMigrate.Stmt.decode!(delimited)]], - rest} + defp parse_key_value(bytes, msg) do + {field, rest} = + case Protox.Decode.parse_key(bytes) do + {0, _, _} -> + raise %Protox.IllegalTagError{} - {3, _, bytes} -> + {1, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - - {[ - case msg.table do - {:table, previous_value} -> - {:table, - Protox.MergeMessage.merge( - previous_value, - Electric.Satellite.SatOpMigrate.Table.decode!(delimited) - )} - - _ -> - {:table, Electric.Satellite.SatOpMigrate.Table.decode!(delimited)} - end - ], rest} + {[stmt: Protox.Decode.validate_string(delimited)], rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -7337,7 +9608,7 @@ Protox.JsonDecode.decode!( input, - Electric.Satellite.SatOpMigrate, + Electric.Satellite.SatPerms.Sqlite, &json_library_wrapper.decode!(json_library, &1) ) end @@ -7364,11 +9635,7 @@ required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} } def defs() do - %{ - 1 => {:version, {:scalar, ""}, :string}, - 2 => {:stmts, :unpacked, {:message, Electric.Satellite.SatOpMigrate.Stmt}}, - 3 => {:table, {:oneof, :_table}, {:message, Electric.Satellite.SatOpMigrate.Table}} - } + %{1 => {:stmt, {:scalar, ""}, :string}} end @deprecated "Use fields_defs()/0 instead" @@ -7376,11 +9643,7 @@ required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} } def defs_by_name() do - %{ - stmts: {2, :unpacked, {:message, Electric.Satellite.SatOpMigrate.Stmt}}, - table: {3, {:oneof, :_table}, {:message, Electric.Satellite.SatOpMigrate.Table}}, - version: {1, {:scalar, ""}, :string} - } + %{stmt: {1, {:scalar, ""}, :string}} end ) @@ -7390,30 +9653,12 @@ [ %{ __struct__: Protox.Field, - json_name: "version", + json_name: "stmt", kind: {:scalar, ""}, label: :optional, - name: :version, + name: :stmt, tag: 1, type: :string - }, - %{ - __struct__: Protox.Field, - json_name: "stmts", - kind: :unpacked, - label: :repeated, - name: :stmts, - tag: 2, - type: {:message, Electric.Satellite.SatOpMigrate.Stmt} - }, - %{ - __struct__: Protox.Field, - json_name: "table", - kind: {:oneof, :_table}, - label: :proto3_optional, - name: :table, - tag: 3, - type: {:message, Electric.Satellite.SatOpMigrate.Table} } ] end @@ -7421,27 +9666,27 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:version) do + def field_def(:stmt) do {:ok, %{ __struct__: Protox.Field, - json_name: "version", + json_name: "stmt", kind: {:scalar, ""}, label: :optional, - name: :version, + name: :stmt, tag: 1, type: :string }} end - def field_def("version") do + def field_def("stmt") do {:ok, %{ __struct__: Protox.Field, - json_name: "version", + json_name: "stmt", kind: {:scalar, ""}, label: :optional, - name: :version, + name: :stmt, tag: 1, type: :string }} @@ -7449,64 +9694,6 @@ [] ), - ( - def field_def(:stmts) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "stmts", - kind: :unpacked, - label: :repeated, - name: :stmts, - tag: 2, - type: {:message, Electric.Satellite.SatOpMigrate.Stmt} - }} - end - - def field_def("stmts") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "stmts", - kind: :unpacked, - label: :repeated, - name: :stmts, - tag: 2, - type: {:message, Electric.Satellite.SatOpMigrate.Stmt} - }} - end - - [] - ), - ( - def field_def(:table) do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "table", - kind: {:oneof, :_table}, - label: :proto3_optional, - name: :table, - tag: 3, - type: {:message, Electric.Satellite.SatOpMigrate.Table} - }} - end - - def field_def("table") do - {:ok, - %{ - __struct__: Protox.Field, - json_name: "table", - kind: {:oneof, :_table}, - label: :proto3_optional, - name: :table, - tag: 3, - type: {:message, Electric.Satellite.SatOpMigrate.Table} - }} - end - - [] - ), def field_def(_) do {:error, :no_such_field} end @@ -7531,15 +9718,9 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:version) do + def default(:stmt) do {:ok, ""} end, - def default(:stmts) do - {:error, :no_default_value} - end, - def default(:table) do - {:error, :no_default_value} - end, def default(_) do {:error, :no_such_field} end @@ -8143,7 +10324,7 @@ end, defmodule Electric.Satellite.SatPerms.Roles do @moduledoc false - defstruct roles: [] + defstruct id: 0, parent_id: nil, rules_id: 0, roles: [] ( ( @@ -8158,13 +10339,52 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_roles(msg) + [] + |> encode_parent_id(msg) + |> encode_id(msg) + |> encode_rules_id(msg) + |> encode_roles(msg) end ) [] [ + defp encode_id(acc, msg) do + try do + if msg.id == 0 do + acc + else + [acc, "\b", Protox.Encode.encode_uint64(msg.id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_parent_id(acc, msg) do + try do + case msg.parent_id do + nil -> [acc] + child_field_value -> [acc, "\x10", Protox.Encode.encode_uint64(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:parent_id, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_rules_id(acc, msg) do + try do + if msg.rules_id == 0 do + acc + else + [acc, "\x18", Protox.Encode.encode_uint64(msg.rules_id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:rules_id, "invalid field value"), __STACKTRACE__ + end + end, defp encode_roles(acc, msg) do try do case msg.roles do @@ -8175,7 +10395,7 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\x12", Protox.Encode.encode_message(value)] + [acc, "\"", Protox.Encode.encode_message(value)] end) ] end @@ -8221,7 +10441,19 @@ {0, _, _} -> raise %Protox.IllegalTagError{} + {1, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[id: value], rest} + {2, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[parent_id: value], rest} + + {3, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[rules_id: value], rest} + + {4, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) @@ -8278,42 +10510,188 @@ end ) - ( - @deprecated "Use fields_defs()/0 instead" - @spec defs() :: %{ - required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} - } - def defs() do - %{2 => {:roles, :unpacked, {:message, Electric.Satellite.SatPerms.Role}}} - end + ( + @deprecated "Use fields_defs()/0 instead" + @spec defs() :: %{ + required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} + } + def defs() do + %{ + 1 => {:id, {:scalar, 0}, :uint64}, + 2 => {:parent_id, {:oneof, :_parent_id}, :uint64}, + 3 => {:rules_id, {:scalar, 0}, :uint64}, + 4 => {:roles, :unpacked, {:message, Electric.Satellite.SatPerms.Role}} + } + end + + @deprecated "Use fields_defs()/0 instead" + @spec defs_by_name() :: %{ + required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} + } + def defs_by_name() do + %{ + id: {1, {:scalar, 0}, :uint64}, + parent_id: {2, {:oneof, :_parent_id}, :uint64}, + roles: {4, :unpacked, {:message, Electric.Satellite.SatPerms.Role}}, + rules_id: {3, {:scalar, 0}, :uint64} + } + end + ) + + ( + @spec fields_defs() :: list(Protox.Field.t()) + def fields_defs() do + [ + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 + }, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }, + %{ + __struct__: Protox.Field, + json_name: "rulesId", + kind: {:scalar, 0}, + label: :optional, + name: :rules_id, + tag: 3, + type: :uint64 + }, + %{ + __struct__: Protox.Field, + json_name: "roles", + kind: :unpacked, + label: :repeated, + name: :roles, + tag: 4, + type: {:message, Electric.Satellite.SatPerms.Role} + } + ] + end + + [ + @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + ( + def field_def(:id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 + }} + end + + def field_def("id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 + }} + end + + [] + ), + ( + def field_def(:parent_id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end + + def field_def("parentId") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end - @deprecated "Use fields_defs()/0 instead" - @spec defs_by_name() :: %{ - required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} - } - def defs_by_name() do - %{roles: {2, :unpacked, {:message, Electric.Satellite.SatPerms.Role}}} - end - ) + def field_def("parent_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end + ), + ( + def field_def(:rules_id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "rulesId", + kind: {:scalar, 0}, + label: :optional, + name: :rules_id, + tag: 3, + type: :uint64 + }} + end - ( - @spec fields_defs() :: list(Protox.Field.t()) - def fields_defs() do - [ - %{ - __struct__: Protox.Field, - json_name: "roles", - kind: :unpacked, - label: :repeated, - name: :roles, - tag: 2, - type: {:message, Electric.Satellite.SatPerms.Role} - } - ] - end + def field_def("rulesId") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "rulesId", + kind: {:scalar, 0}, + label: :optional, + name: :rules_id, + tag: 3, + type: :uint64 + }} + end - [ - @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + def field_def("rules_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "rulesId", + kind: {:scalar, 0}, + label: :optional, + name: :rules_id, + tag: 3, + type: :uint64 + }} + end + ), ( def field_def(:roles) do {:ok, @@ -8323,7 +10701,7 @@ kind: :unpacked, label: :repeated, name: :roles, - tag: 2, + tag: 4, type: {:message, Electric.Satellite.SatPerms.Role} }} end @@ -8336,7 +10714,7 @@ kind: :unpacked, label: :repeated, name: :roles, - tag: 2, + tag: 4, type: {:message, Electric.Satellite.SatPerms.Role} }} end @@ -8367,6 +10745,15 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:id) do + {:ok, 0} + end, + def default(:parent_id) do + {:error, :no_default_value} + end, + def default(:rules_id) do + {:ok, 0} + end, def default(:roles) do {:error, :no_default_value} end, @@ -9876,8 +12263,8 @@ defstruct id: "", table: nil, role: nil, - privileges: [], - columns: [], + privilege: :DELETE, + columns: nil, scope: nil, path: nil, check: nil @@ -9896,14 +12283,14 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do [] + |> encode_columns(msg) |> encode_scope(msg) |> encode_path(msg) |> encode_check(msg) |> encode_id(msg) |> encode_table(msg) |> encode_role(msg) - |> encode_privileges(msg) - |> encode_columns(msg) + |> encode_privilege(msg) end ) @@ -9946,51 +12333,29 @@ reraise Protox.EncodingError.new(:role, "invalid field value"), __STACKTRACE__ end end, - defp encode_privileges(acc, msg) do + defp encode_privilege(acc, msg) do try do - case msg.privileges do - [] -> - acc - - values -> - [ - acc, - "\"", - ( - {bytes, len} = - Enum.reduce(values, {[], 0}, fn value, {acc, len} -> - value_bytes = - :binary.list_to_bin([ - value - |> Electric.Satellite.SatPerms.Privilege.encode() - |> Protox.Encode.encode_enum() - ]) - - {[acc, value_bytes], len + byte_size(value_bytes)} - end) - - [Protox.Varint.encode(len), bytes] - ) - ] + if msg.privilege == :DELETE do + acc + else + [ + acc, + " ", + msg.privilege + |> Electric.Satellite.SatPerms.Privilege.encode() + |> Protox.Encode.encode_enum() + ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:privileges, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:privilege, "invalid field value"), __STACKTRACE__ end end, defp encode_columns(acc, msg) do try do case msg.columns do - [] -> - acc - - values -> - [ - acc, - Enum.reduce(values, [], fn value, acc -> - [acc, "*", Protox.Encode.encode_string(value)] - end) - ] + nil -> [acc] + child_field_value -> [acc, "*", Protox.Encode.encode_message(child_field_value)] end rescue ArgumentError -> @@ -10096,30 +12461,29 @@ ) ], rest} - {4, 2, bytes} -> - {len, bytes} = Protox.Varint.decode(bytes) - {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - - {[ - privileges: - msg.privileges ++ - Protox.Decode.parse_repeated_enum( - [], - delimited, - Electric.Satellite.SatPerms.Privilege - ) - ], rest} - {4, _, bytes} -> {value, rest} = Protox.Decode.parse_enum(bytes, Electric.Satellite.SatPerms.Privilege) - {[privileges: msg.privileges ++ [value]], rest} + {[privilege: value], rest} {5, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[columns: msg.columns ++ [Protox.Decode.validate_string(delimited)]], rest} + + {[ + case msg.columns do + {:columns, previous_value} -> + {:columns, + Protox.MergeMessage.merge( + previous_value, + Electric.Satellite.SatPerms.ColumnList.decode!(delimited) + )} + + _ -> + {:columns, Electric.Satellite.SatPerms.ColumnList.decode!(delimited)} + end + ], rest} {6, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) @@ -10222,8 +12586,9 @@ 1 => {:id, {:scalar, ""}, :string}, 2 => {:table, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}}, 3 => {:role, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.RoleName}}, - 4 => {:privileges, :packed, {:enum, Electric.Satellite.SatPerms.Privilege}}, - 5 => {:columns, :unpacked, :string}, + 4 => {:privilege, {:scalar, :DELETE}, {:enum, Electric.Satellite.SatPerms.Privilege}}, + 5 => + {:columns, {:oneof, :_columns}, {:message, Electric.Satellite.SatPerms.ColumnList}}, 6 => {:scope, {:oneof, :_scope}, {:message, Electric.Satellite.SatPerms.Table}}, 7 => {:path, {:oneof, :_path}, {:message, Electric.Satellite.SatPerms.Path}}, 8 => {:check, {:oneof, :_check}, :string} @@ -10237,10 +12602,10 @@ def defs_by_name() do %{ check: {8, {:oneof, :_check}, :string}, - columns: {5, :unpacked, :string}, + columns: {5, {:oneof, :_columns}, {:message, Electric.Satellite.SatPerms.ColumnList}}, id: {1, {:scalar, ""}, :string}, path: {7, {:oneof, :_path}, {:message, Electric.Satellite.SatPerms.Path}}, - privileges: {4, :packed, {:enum, Electric.Satellite.SatPerms.Privilege}}, + privilege: {4, {:scalar, :DELETE}, {:enum, Electric.Satellite.SatPerms.Privilege}}, role: {3, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.RoleName}}, scope: {6, {:oneof, :_scope}, {:message, Electric.Satellite.SatPerms.Table}}, table: {2, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}} @@ -10281,21 +12646,21 @@ }, %{ __struct__: Protox.Field, - json_name: "privileges", - kind: :packed, - label: :repeated, - name: :privileges, + json_name: "privilege", + kind: {:scalar, :DELETE}, + label: :optional, + name: :privilege, tag: 4, type: {:enum, Electric.Satellite.SatPerms.Privilege} }, %{ __struct__: Protox.Field, json_name: "columns", - kind: :unpacked, - label: :repeated, + kind: {:oneof, :_columns}, + label: :proto3_optional, name: :columns, tag: 5, - type: :string + type: {:message, Electric.Satellite.SatPerms.ColumnList} }, %{ __struct__: Protox.Field, @@ -10417,27 +12782,27 @@ [] ), ( - def field_def(:privileges) do + def field_def(:privilege) do {:ok, %{ __struct__: Protox.Field, - json_name: "privileges", - kind: :packed, - label: :repeated, - name: :privileges, + json_name: "privilege", + kind: {:scalar, :DELETE}, + label: :optional, + name: :privilege, tag: 4, type: {:enum, Electric.Satellite.SatPerms.Privilege} }} end - def field_def("privileges") do + def field_def("privilege") do {:ok, %{ __struct__: Protox.Field, - json_name: "privileges", - kind: :packed, - label: :repeated, - name: :privileges, + json_name: "privilege", + kind: {:scalar, :DELETE}, + label: :optional, + name: :privilege, tag: 4, type: {:enum, Electric.Satellite.SatPerms.Privilege} }} @@ -10451,11 +12816,11 @@ %{ __struct__: Protox.Field, json_name: "columns", - kind: :unpacked, - label: :repeated, + kind: {:oneof, :_columns}, + label: :proto3_optional, name: :columns, tag: 5, - type: :string + type: {:message, Electric.Satellite.SatPerms.ColumnList} }} end @@ -10464,11 +12829,11 @@ %{ __struct__: Protox.Field, json_name: "columns", - kind: :unpacked, - label: :repeated, + kind: {:oneof, :_columns}, + label: :proto3_optional, name: :columns, tag: 5, - type: :string + type: {:message, Electric.Satellite.SatPerms.ColumnList} }} end @@ -10594,8 +12959,8 @@ def default(:role) do {:ok, nil} end, - def default(:privileges) do - {:error, :no_default_value} + def default(:privilege) do + {:ok, :DELETE} end, def default(:columns) do {:error, :no_default_value} @@ -10623,7 +12988,7 @@ end, defmodule Electric.Satellite.SatPerms do @moduledoc false - defstruct id: 0, rules: nil, roles: nil + defstruct id: 0, user_id: "", rules: nil, roles: [] ( ( @@ -10638,7 +13003,7 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_id(msg) |> encode_rules(msg) |> encode_roles(msg) + [] |> encode_id(msg) |> encode_user_id(msg) |> encode_rules(msg) |> encode_roles(msg) end ) @@ -10657,6 +13022,18 @@ reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ end end, + defp encode_user_id(acc, msg) do + try do + if msg.user_id == "" do + acc + else + [acc, "\x12", Protox.Encode.encode_string(msg.user_id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:user_id, "invalid field value"), __STACKTRACE__ + end + end, defp encode_rules(acc, msg) do try do if msg.rules == nil do @@ -10671,10 +13048,17 @@ end, defp encode_roles(acc, msg) do try do - if msg.roles == nil do - acc - else - [acc, "\"", Protox.Encode.encode_message(msg.roles)] + case msg.roles do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\"", Protox.Encode.encode_message(value)] + end) + ] end rescue ArgumentError -> @@ -10722,6 +13106,11 @@ {value, rest} = Protox.Decode.parse_int64(bytes) {[id: value], rest} + {2, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[user_id: Protox.Decode.validate_string(delimited)], rest} + {3, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) @@ -10738,13 +13127,8 @@ {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[ - roles: - Protox.MergeMessage.merge( - msg.roles, - Electric.Satellite.SatPerms.Roles.decode!(delimited) - ) - ], rest} + {[roles: msg.roles ++ [Electric.Satellite.SatPerms.Role.decode!(delimited)]], + rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -10804,8 +13188,9 @@ def defs() do %{ 1 => {:id, {:scalar, 0}, :int64}, + 2 => {:user_id, {:scalar, ""}, :string}, 3 => {:rules, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Rules}}, - 4 => {:roles, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Roles}} + 4 => {:roles, :unpacked, {:message, Electric.Satellite.SatPerms.Role}} } end @@ -10816,8 +13201,9 @@ def defs_by_name() do %{ id: {1, {:scalar, 0}, :int64}, - roles: {4, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Roles}}, - rules: {3, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Rules}} + roles: {4, :unpacked, {:message, Electric.Satellite.SatPerms.Role}}, + rules: {3, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Rules}}, + user_id: {2, {:scalar, ""}, :string} } end ) @@ -10835,6 +13221,15 @@ tag: 1, type: :int64 }, + %{ + __struct__: Protox.Field, + json_name: "userId", + kind: {:scalar, ""}, + label: :optional, + name: :user_id, + tag: 2, + type: :string + }, %{ __struct__: Protox.Field, json_name: "rules", @@ -10847,11 +13242,11 @@ %{ __struct__: Protox.Field, json_name: "roles", - kind: {:scalar, nil}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :roles, tag: 4, - type: {:message, Electric.Satellite.SatPerms.Roles} + type: {:message, Electric.Satellite.SatPerms.Role} } ] end @@ -10859,33 +13254,73 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:id) do + def field_def(:id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :int64 + }} + end + + def field_def("id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :int64 + }} + end + + [] + ), + ( + def field_def(:user_id) do {:ok, %{ __struct__: Protox.Field, - json_name: "id", - kind: {:scalar, 0}, + json_name: "userId", + kind: {:scalar, ""}, label: :optional, - name: :id, - tag: 1, - type: :int64 + name: :user_id, + tag: 2, + type: :string }} end - def field_def("id") do + def field_def("userId") do {:ok, %{ __struct__: Protox.Field, - json_name: "id", - kind: {:scalar, 0}, + json_name: "userId", + kind: {:scalar, ""}, label: :optional, - name: :id, - tag: 1, - type: :int64 + name: :user_id, + tag: 2, + type: :string }} end - [] + def field_def("user_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "userId", + kind: {:scalar, ""}, + label: :optional, + name: :user_id, + tag: 2, + type: :string + }} + end ), ( def field_def(:rules) do @@ -10922,11 +13357,11 @@ %{ __struct__: Protox.Field, json_name: "roles", - kind: {:scalar, nil}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :roles, tag: 4, - type: {:message, Electric.Satellite.SatPerms.Roles} + type: {:message, Electric.Satellite.SatPerms.Role} }} end @@ -10935,11 +13370,11 @@ %{ __struct__: Protox.Field, json_name: "roles", - kind: {:scalar, nil}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :roles, tag: 4, - type: {:message, Electric.Satellite.SatPerms.Roles} + type: {:message, Electric.Satellite.SatPerms.Role} }} end @@ -10972,11 +13407,14 @@ def default(:id) do {:ok, 0} end, + def default(:user_id) do + {:ok, ""} + end, def default(:rules) do {:ok, nil} end, def default(:roles) do - {:ok, nil} + {:error, :no_default_value} end, def default(_) do {:error, :no_such_field} @@ -14225,7 +16663,7 @@ end, defmodule Electric.Satellite.SatPerms.Rules do @moduledoc false - defstruct grants: [], assigns: [] + defstruct id: 0, parent_id: nil, grants: [], assigns: [] ( ( @@ -14240,13 +16678,40 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_grants(msg) |> encode_assigns(msg) + [] + |> encode_parent_id(msg) + |> encode_id(msg) + |> encode_grants(msg) + |> encode_assigns(msg) end ) [] [ + defp encode_id(acc, msg) do + try do + if msg.id == 0 do + acc + else + [acc, "\b", Protox.Encode.encode_uint64(msg.id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_parent_id(acc, msg) do + try do + case msg.parent_id do + nil -> [acc] + child_field_value -> [acc, "\x10", Protox.Encode.encode_uint64(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:parent_id, "invalid field value"), __STACKTRACE__ + end + end, defp encode_grants(acc, msg) do try do case msg.grants do @@ -14257,7 +16722,7 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\n", Protox.Encode.encode_message(value)] + [acc, "\x1A", Protox.Encode.encode_message(value)] end) ] end @@ -14276,7 +16741,7 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\x12", Protox.Encode.encode_message(value)] + [acc, "\"", Protox.Encode.encode_message(value)] end) ] end @@ -14323,13 +16788,21 @@ raise %Protox.IllegalTagError{} {1, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[id: value], rest} + + {2, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[parent_id: value], rest} + + {3, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) {[grants: msg.grants ++ [Electric.Satellite.SatPerms.Grant.decode!(delimited)]], rest} - {2, _, bytes} -> + {4, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) @@ -14394,8 +16867,10 @@ } def defs() do %{ - 1 => {:grants, :unpacked, {:message, Electric.Satellite.SatPerms.Grant}}, - 2 => {:assigns, :unpacked, {:message, Electric.Satellite.SatPerms.Assign}} + 1 => {:id, {:scalar, 0}, :uint64}, + 2 => {:parent_id, {:oneof, :_parent_id}, :uint64}, + 3 => {:grants, :unpacked, {:message, Electric.Satellite.SatPerms.Grant}}, + 4 => {:assigns, :unpacked, {:message, Electric.Satellite.SatPerms.Assign}} } end @@ -14405,8 +16880,10 @@ } def defs_by_name() do %{ - assigns: {2, :unpacked, {:message, Electric.Satellite.SatPerms.Assign}}, - grants: {1, :unpacked, {:message, Electric.Satellite.SatPerms.Grant}} + assigns: {4, :unpacked, {:message, Electric.Satellite.SatPerms.Assign}}, + grants: {3, :unpacked, {:message, Electric.Satellite.SatPerms.Grant}}, + id: {1, {:scalar, 0}, :uint64}, + parent_id: {2, {:oneof, :_parent_id}, :uint64} } end ) @@ -14415,13 +16892,31 @@ @spec fields_defs() :: list(Protox.Field.t()) def fields_defs() do [ + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 + }, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }, %{ __struct__: Protox.Field, json_name: "grants", kind: :unpacked, label: :repeated, name: :grants, - tag: 1, + tag: 3, type: {:message, Electric.Satellite.SatPerms.Grant} }, %{ @@ -14430,7 +16925,7 @@ kind: :unpacked, label: :repeated, name: :assigns, - tag: 2, + tag: 4, type: {:message, Electric.Satellite.SatPerms.Assign} } ] @@ -14438,6 +16933,75 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + ( + def field_def(:id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 + }} + end + + def field_def("id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 + }} + end + + [] + ), + ( + def field_def(:parent_id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end + + def field_def("parentId") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end + + def field_def("parent_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end + ), ( def field_def(:grants) do {:ok, @@ -14447,7 +17011,7 @@ kind: :unpacked, label: :repeated, name: :grants, - tag: 1, + tag: 3, type: {:message, Electric.Satellite.SatPerms.Grant} }} end @@ -14460,7 +17024,7 @@ kind: :unpacked, label: :repeated, name: :grants, - tag: 1, + tag: 3, type: {:message, Electric.Satellite.SatPerms.Grant} }} end @@ -14476,7 +17040,7 @@ kind: :unpacked, label: :repeated, name: :assigns, - tag: 2, + tag: 4, type: {:message, Electric.Satellite.SatPerms.Assign} }} end @@ -14489,7 +17053,7 @@ kind: :unpacked, label: :repeated, name: :assigns, - tag: 2, + tag: 4, type: {:message, Electric.Satellite.SatPerms.Assign} }} end @@ -14520,6 +17084,12 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:id) do + {:ok, 0} + end, + def default(:parent_id) do + {:error, :no_default_value} + end, def default(:grants) do {:error, :no_default_value} end, @@ -14853,7 +17423,7 @@ end, defmodule Electric.Satellite.SatPerms.Scope do @moduledoc false - defstruct table: nil, id: "" + defstruct table: nil, id: [] ( ( @@ -14889,10 +17459,17 @@ end, defp encode_id(acc, msg) do try do - if msg.id == "" do - acc - else - [acc, "\x12", Protox.Encode.encode_string(msg.id)] + case msg.id do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\x12", Protox.Encode.encode_string(value)] + end) + ] end rescue ArgumentError -> @@ -14951,7 +17528,7 @@ {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[id: Protox.Decode.validate_string(delimited)], rest} + {[id: msg.id ++ [Protox.Decode.validate_string(delimited)]], rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -15011,7 +17588,7 @@ def defs() do %{ 1 => {:table, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}}, - 2 => {:id, {:scalar, ""}, :string} + 2 => {:id, :unpacked, :string} } end @@ -15021,7 +17598,7 @@ } def defs_by_name() do %{ - id: {2, {:scalar, ""}, :string}, + id: {2, :unpacked, :string}, table: {1, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}} } end @@ -15043,8 +17620,8 @@ %{ __struct__: Protox.Field, json_name: "id", - kind: {:scalar, ""}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :id, tag: 2, type: :string @@ -15089,8 +17666,8 @@ %{ __struct__: Protox.Field, json_name: "id", - kind: {:scalar, ""}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :id, tag: 2, type: :string @@ -15102,8 +17679,8 @@ %{ __struct__: Protox.Field, json_name: "id", - kind: {:scalar, ""}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :id, tag: 2, type: :string @@ -15140,7 +17717,7 @@ {:ok, nil} end, def default(:id) do - {:ok, ""} + {:error, :no_default_value} end, def default(_) do {:error, :no_such_field} @@ -16530,7 +19107,7 @@ end, defmodule Electric.Satellite.SatPerms.Role do @moduledoc false - defstruct id: "", role: "", user_id: "", assign_id: "", scope: nil + defstruct row_id: [], role: "", user_id: "", assign_id: "", scope: nil ( ( @@ -16547,7 +19124,7 @@ def encode!(msg) do [] |> encode_scope(msg) - |> encode_id(msg) + |> encode_row_id(msg) |> encode_role(msg) |> encode_user_id(msg) |> encode_assign_id(msg) @@ -16557,16 +19134,23 @@ [] [ - defp encode_id(acc, msg) do + defp encode_row_id(acc, msg) do try do - if msg.id == "" do - acc - else - [acc, "\n", Protox.Encode.encode_string(msg.id)] + case msg.row_id do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\n", Protox.Encode.encode_string(value)] + end) + ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:row_id, "invalid field value"), __STACKTRACE__ end end, defp encode_role(acc, msg) do @@ -16656,7 +19240,7 @@ {1, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[id: Protox.Decode.validate_string(delimited)], rest} + {[row_id: msg.row_id ++ [Protox.Decode.validate_string(delimited)]], rest} {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) @@ -16748,7 +19332,7 @@ } def defs() do %{ - 1 => {:id, {:scalar, ""}, :string}, + 1 => {:row_id, :unpacked, :string}, 2 => {:role, {:scalar, ""}, :string}, 3 => {:user_id, {:scalar, ""}, :string}, 4 => {:assign_id, {:scalar, ""}, :string}, @@ -16763,8 +19347,8 @@ def defs_by_name() do %{ assign_id: {4, {:scalar, ""}, :string}, - id: {1, {:scalar, ""}, :string}, role: {2, {:scalar, ""}, :string}, + row_id: {1, :unpacked, :string}, scope: {5, {:oneof, :_scope}, {:message, Electric.Satellite.SatPerms.Scope}}, user_id: {3, {:scalar, ""}, :string} } @@ -16777,10 +19361,10 @@ [ %{ __struct__: Protox.Field, - json_name: "id", - kind: {:scalar, ""}, - label: :optional, - name: :id, + json_name: "rowId", + kind: :unpacked, + label: :repeated, + name: :row_id, tag: 1, type: :string }, @@ -16826,33 +19410,44 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:id) do + def field_def(:row_id) do {:ok, %{ __struct__: Protox.Field, - json_name: "id", - kind: {:scalar, ""}, - label: :optional, - name: :id, + json_name: "rowId", + kind: :unpacked, + label: :repeated, + name: :row_id, tag: 1, type: :string }} end - def field_def("id") do + def field_def("rowId") do {:ok, %{ __struct__: Protox.Field, - json_name: "id", - kind: {:scalar, ""}, - label: :optional, - name: :id, + json_name: "rowId", + kind: :unpacked, + label: :repeated, + name: :row_id, tag: 1, type: :string }} end - [] + def field_def("row_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "rowId", + kind: :unpacked, + label: :repeated, + name: :row_id, + tag: 1, + type: :string + }} + end ), ( def field_def(:role) do @@ -17016,8 +19611,8 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:id) do - {:ok, ""} + def default(:row_id) do + {:error, :no_default_value} end, def default(:role) do {:ok, ""} diff --git a/components/electric/lib/electric/satellite/protocol.ex b/components/electric/lib/electric/satellite/protocol.ex index f497583965..50e36af28e 100644 --- a/components/electric/lib/electric/satellite/protocol.ex +++ b/components/electric/lib/electric/satellite/protocol.ex @@ -564,6 +564,8 @@ defmodule Electric.Satellite.Protocol do def handle_outgoing_txs([{tx, offset} | events], %State{} = state, {msgs_acc, actions_acc}) when can_send_more_txs(state) do + {tx, state} = manage_permissions_changes(tx, state) + {%Transaction{} = filtered_tx, new_graph, actions} = process_transaction(tx, state.out_rep.sent_rows_graph, state) @@ -623,6 +625,33 @@ defmodule Electric.Satellite.Protocol do {msgs_acc, actions_acc, state} end + defp manage_permissions_changes(tx, state) do + %{auth: %{user_id: user_id}} = state + + {changes, state} = + Enum.flat_map_reduce( + tx.changes, + state, + fn + %Changes.UpdatedPermissions{type: :user, permissions: %{user_id: ^user_id}}, state -> + Logger.debug(fn -> "User permissions updated for connection" end) + {[], state} + + %Changes.UpdatedPermissions{type: :user}, state -> + {[], state} + + %Changes.UpdatedPermissions{type: :global}, state -> + Logger.debug(fn -> "Global permissions updated for connection" end) + {[], state} + + change, state -> + {[change], state} + end + ) + + {%{tx | changes: changes}, state} + end + # If the client received at least one migration during the initial sync, the value of # last_migration_xid_at_initial_sync is non-zero. And due to the lag between any changes getting committed to the # database and those same changes getting propagated through the cached WAL, we may be looking at the same migration diff --git a/components/electric/priv/sql_function_templates/ddlx/assign.sql.eex b/components/electric/priv/sql_function_templates/ddlx/assign.sql.eex deleted file mode 100644 index 2470fca85d..0000000000 --- a/components/electric/priv/sql_function_templates/ddlx/assign.sql.eex +++ /dev/null @@ -1,405 +0,0 @@ -CREATE OR REPLACE PROCEDURE <%= schema() %>.assign( - assign_table_full_name text, - scope text, - user_column_name text, - role_name_string text, - role_column_name text, - if_fn text -) SECURITY DEFINER AS $$ - -DECLARE - assign_table TEXT; - assign_schema TEXT; - assignment_id uuid; - scope_table_not_null TEXT; - role_name_not_null TEXT; - role_column_not_null TEXT; - if_fn_not_null TEXT; - role_def TEXT; - assignment_name TEXT; - user_column_type TEXT; - scope_key_count int; - user_key_count int; - scope_key RECORD; - user_key RECORD; - primary_key RECORD; - -BEGIN - - -- return types for the introspection of foreign keys - CREATE TEMP TABLE scope_fkeys - ( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ); - - CREATE TEMP TABLE user_fkeys - ( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ); - - CREATE TEMP TABLE pkeys - ( - columns name[10], - types information_schema.character_data[10] - ); - - SELECT n[1], n[2] INTO assign_schema, assign_table FROM parse_ident(assign_table_full_name) n; - - -- gets the columns and types for the assign_table's primary key - INSERT INTO pkeys SELECT * from <%= schema() %>.find_pk(assign_schema, assign_table); - SELECT * FROM pkeys LIMIT 1 INTO primary_key; - - - -- gets the foreign key pointing to the user - INSERT INTO user_fkeys SELECT * from <%= schema() %>.find_fk_for_column(assign_schema,assign_table, user_column_name); - SELECT COUNT(*) FROM user_fkeys INTO user_key_count; - - IF user_key_count = 0 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'Could not find a foreign key pointing to the user table'; - END IF; - - SELECT * FROM user_fkeys LIMIT 1 INTO user_key; - - SELECT data_type FROM information_schema.columns - WHERE table_name = user_key.to_table and column_name = user_key.to_columns[1] - INTO user_column_type; - - -- sets some things to default strings if the function args are null - IF scope IS NULL THEN scope_table_not_null = '__none__'; ELSE scope_table_not_null = scope; END IF; - IF if_fn IS NULL THEN if_fn_not_null = 'true'; ELSE if_fn_not_null = if_fn; END IF; - - IF role_name_string IS NULL AND role_column_name IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'You must give either a role_name_string or a role_column_name'; - END IF; - - IF NOT role_name_string IS NULL AND NOT role_column_name IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'You must give either a role_name_string or a role_column_name but not both'; - END IF; - - -- assign_table_full_name = format('%s.%s', assign_schema, assign_table); - - IF role_name_string IS NULL THEN - role_name_not_null = '__none__'; - role_column_not_null = role_column_name; - role_def = format('NEW.%s', role_column_name); - ELSE - role_name_not_null = role_name_string; - role_column_not_null = '__none__'; - role_def = format(E'\'%s\'', role_name_string); - END IF; - - -- reads the foreign key for the scope if it exists - IF NOT scope IS NULL THEN - INSERT INTO scope_fkeys SELECT * from <%= schema() %>.find_fk_to_table(assign_schema,assign_table, scope); - SELECT COUNT(*) FROM scope_fkeys INTO scope_key_count; - - IF scope_key_count > 1 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - -- The assign_table is assumed to have a single foreign key pointing to the scope table - RAISE EXCEPTION 'Too many foreign keys for the scope table'; - END IF; - - IF scope_key_count = 0 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - -- The assign_table is assumed to have a single foreign key pointing to the scope table - RAISE EXCEPTION 'Could not find a foreign key pointing to the scope table'; - END IF; - - SELECT * FROM scope_fkeys LIMIT 1 INTO scope_key; - - END IF; - - -- Creates the assignment itself. - INSERT INTO <%= assignments_table() %> (table_name, scope_table, user_column, role_name, role_column, if_fn) - VALUES (assign_table_full_name, scope_table_not_null, user_column_name, role_name_not_null, role_column_not_null, if_fn) - RETURNING id INTO assignment_id; - - if assignment_id IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'Could not create assignment'; - END IF; - - - -- this is a canonical name used by components owned by this assignment - assignment_name = REPLACE(format('%s', assignment_id), '-', '_'); - - /* - Creates big fat join table. Every time the assignment rule is used and a user is given a role a row will be created - in both this join table and in the table electric.roles. This table serves as a polymorphic join between the roles - table and the different types of both scope table and assignment table, and handles clean up correctly via fk cascade on delete. - - This table have 4 or 5 foreign keys - - It has foreign keys with ON DELETE CASCADE pointing to: - - The assignment created above. This assignment is the rule that causes all the entries in this join to be created in owns them. - - The user that the role has been given too. - - The assignment table item that assigned the role. - - The row in the scope table if one is specified. - - So that any of these being deleted will remove the join. - - And it has a foreign key pointing to the role in electric.roles which it will delete with a trigger. - */ - - EXECUTE format('CREATE TABLE IF NOT EXISTS <%= schema() %>.assignment_%s_join ( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - user_id %s, - assignment_id uuid, - role_id uuid, - FOREIGN KEY(role_id) - REFERENCES <%= roles_table() %> (id), - FOREIGN KEY(user_id) - REFERENCES %s.%s(%s) - ON DELETE CASCADE, - FOREIGN KEY(assignment_id) - REFERENCES <%= assignments_table() %> (id) - ON DELETE CASCADE - );', - assignment_name, - user_key.to_types[1], - user_key.to_schema, - user_key.to_table, - user_key.to_columns[1] - ); - - -- Adds a foreign key to the join table pointing to the assign_table - for counter in 1..ARRAY_LENGTH(primary_key.columns, 1) - loop - EXECUTE format('ALTER TABLE <%= schema() %>.assignment_%s_join ADD COLUMN IF NOT EXISTS %s_%s %s;', - assignment_name, - assign_table, - primary_key.columns[counter], - primary_key.types[counter] - ); - end loop; - - EXECUTE format('ALTER TABLE <%= schema() %>.assignment_%s_join - ADD CONSTRAINT electric_%s_join_%s_fk - FOREIGN KEY (%s_%s) - REFERENCES %s.%s(%s) - ON DELETE CASCADE;', - assignment_name, - assignment_name, - assign_table, - assign_table, - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - assign_schema, - assign_table, - ARRAY_TO_STRING(primary_key.columns, ', ') - ); - - -- defines insert and update trigger functions for the assign_table - -- when there is no scope - IF scope IS NULL THEN - - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema() %>.upsert_role_%1$s() RETURNS TRIGGER SECURITY DEFINER - AS $%2$s$ - DECLARE - role_key uuid; - join_key uuid; - BEGIN - - SELECT id, role_id FROM <%= schema() %>.assignment_%1$s_join WHERE assignment_id = \'%4$s\' AND ( %5$s_%6$s ) = ( NEW.%7$s ) INTO join_key, role_key; - IF ( %8$s ) THEN - IF join_key IS NULL THEN - INSERT INTO <%= roles_table() %> (user_id, role) - VALUES (NEW.%9$s, %10$s) returning id INTO role_key; - INSERT INTO <%= schema() %>.assignment_%1$s_join (user_id, %5$s_%6$s, role_id, assignment_id) - VALUES (NEW.%9$s, NEW.%7$s, role_key, \'%4$s\'); - ELSE - UPDATE <%= schema() %>.assignment_%1$s_join SET user_id = NEW.%9$s - WHERE id = join_key; - UPDATE <%= roles_table() %> SET (user_id, role) = (NEW.%9$s, %10s) - WHERE id = role_key; - END IF; - ELSE - IF NOT join_key IS NULL THEN - DELETE FROM <%= schema() %>.assignment_%1$s_join WHERE id = join_key; - END IF; - END IF; - RETURN NEW; - END; - $%2$s$ LANGUAGE plpgsql;', - --1 - assignment_name, - --2 - '', - --3 - '', - --4 - assignment_id, - --5 - assign_table, - --6 - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - --7 - ARRAY_TO_STRING(primary_key.columns, ', NEW.'), - --8 - if_fn_not_null, - --9 - user_key.from_columns[1], - --10 - role_def, - --11 - scope - ); - - -- and when there is a scope - ELSE - for counter in 1..ARRAY_LENGTH(scope_key.from_columns, 1) - loop - EXECUTE format('ALTER TABLE <%= schema() %>.assignment_%s_join ADD COLUMN IF NOT EXISTS %s %s;', - assignment_name, - scope_key.from_columns[counter], - scope_key.to_types[counter] - ); - end loop; - - EXECUTE format('ALTER TABLE <%= schema() %>.assignment_%s_join - ADD CONSTRAINT electric_%s_join_scope_fk - FOREIGN KEY (%s) - REFERENCES %s.%s(%s) - ON DELETE CASCADE;', - assignment_name, - assignment_name, - ARRAY_TO_STRING(scope_key.from_columns, ', '), - scope_key.to_schema, - scope_key.to_table, - ARRAY_TO_STRING(scope_key.to_columns, ', ') - ); - - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema() %>.upsert_role_%1$s() RETURNS TRIGGER SECURITY DEFINER - AS $%2$s$ - DECLARE - scope_key TEXT; - scope_list TEXT[]; - role_key uuid; - join_key uuid; - BEGIN - - scope_list := ARRAY[NEW.%3$s::text]; - scope_key := ARRAY_TO_STRING(scope_list, \', \' ); - - SELECT id, role_id FROM <%= schema() %>.assignment_%1$s_join WHERE assignment_id = \'%4$s\' AND ( %5$s_%6$s ) = ( NEW.%7$s ) INTO join_key, role_key; - IF ( %8$s ) THEN - IF join_key IS NULL THEN - INSERT INTO <%= roles_table() %> (user_id, role, scope_table, scope_id) - VALUES (NEW.%9$s, %10$s, \'%11$s\', scope_key) returning id INTO role_key; - INSERT INTO <%= schema() %>.assignment_%1$s_join (user_id, %12$s, %5$s_%6$s, role_id, assignment_id) - VALUES (NEW.%9$s, NEW.%13$s, NEW.%7$s, role_key, \'%4$s\'); - ELSE - UPDATE <%= schema() %>.assignment_%1$s_join SET (user_id, %12$s) - = (NEW.%9$s, NEW.%13$s) WHERE id = join_key; - UPDATE <%= roles_table() %> SET (user_id, role, scope_table, scope_id) - = (NEW.%9$s, %10$s, \'%11$s\', scope_key) WHERE id = role_key; - END IF; - ELSE - IF NOT join_key IS NULL THEN - DELETE FROM <%= schema() %>.assignment_%1$s_join WHERE id = join_key; - END IF; - END IF; - RETURN NEW; - END; - $%2$s$ LANGUAGE plpgsql;', - --1 - assignment_name, - --2 - '', - --3 - ARRAY_TO_STRING(scope_key.from_columns, '::text, NEW.'), - --4 - assignment_id, - --5 - assign_table, - --6 - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - --7 - ARRAY_TO_STRING(primary_key.columns, ', NEW.'), - --8 - if_fn_not_null, - --9 - user_key.from_columns[1], - --10 - role_def, - --11 - scope, - --12 - ARRAY_TO_STRING(scope_key.from_columns, ', '), - --13 - ARRAY_TO_STRING(scope_key.from_columns, ', NEW.') - ); - END IF; - - -- adds a trigger to the join table that deletes the role itself - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema() %>.cleanup_role_%s() RETURNS TRIGGER SECURITY DEFINER - AS $%s$ - BEGIN - DELETE FROM <%= roles_table() %> WHERE id = OLD.role_id; - RETURN OLD; - END; - $%s$ LANGUAGE plpgsql;', - assignment_name, - '', - '' - ); - - EXECUTE format('CREATE OR REPLACE TRIGGER electric_cleanup_role_%s - AFTER DELETE ON <%= schema() %>.assignment_%s_join - FOR EACH ROW - EXECUTE FUNCTION <%= schema() %>.cleanup_role_%s();', - assignment_name, - assignment_name, - assignment_name - ); - - -- adds the insert and update triggers functions to the assign_table - EXECUTE format('CREATE OR REPLACE TRIGGER electric_insert_role_%s - AFTER INSERT ON %s - FOR EACH ROW - EXECUTE FUNCTION <%= schema() %>.upsert_role_%s();', - assignment_name, - assign_table, - assignment_name - ); - - EXECUTE format('CREATE OR REPLACE TRIGGER electric_update_role_%s - AFTER UPDATE ON %s - FOR EACH ROW - EXECUTE FUNCTION <%= schema() %>.upsert_role_%s();', - assignment_name, - assign_table, - assignment_name - ); - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; -END; -$$ LANGUAGE plpgsql; - diff --git a/components/electric/priv/sql_function_templates/ddlx/grant.sql.eex b/components/electric/priv/sql_function_templates/ddlx/grant.sql.eex deleted file mode 100644 index 0a539e67c3..0000000000 --- a/components/electric/priv/sql_function_templates/ddlx/grant.sql.eex +++ /dev/null @@ -1,24 +0,0 @@ -CREATE OR REPLACE PROCEDURE <%= schema() %>.grant( - privilege_name text, - on_table_name text, - role_name text, - columns text[], - scope_name text, - using_path text, - check_fn text -) SECURITY DEFINER AS $$ - - DECLARE - col TEXT; - - BEGIN - FOREACH col IN ARRAY columns - LOOP - INSERT INTO <%= grants_table() %> ( privilege, on_table, role , column_name, scope, using_path, check_fn) - VALUES (privilege_name, on_table_name, role_name, col, scope_name, using_path, check_fn) - ON CONFLICT ON CONSTRAINT grants_pkey DO UPDATE SET - (using_path, check_fn) = (EXCLUDED.using_path, EXCLUDED.check_fn); - END LOOP; - END; -$$ LANGUAGE plpgsql; - diff --git a/components/electric/priv/sql_function_templates/ddlx/unassign.sql.eex b/components/electric/priv/sql_function_templates/ddlx/unassign.sql.eex deleted file mode 100644 index efc66eeb5c..0000000000 --- a/components/electric/priv/sql_function_templates/ddlx/unassign.sql.eex +++ /dev/null @@ -1,68 +0,0 @@ -CREATE OR REPLACE PROCEDURE <%= schema() %>.unassign( - assign_table_full_name text, - scope text, - user_column_name text, - role_name_string text, - role_column_name text -) SECURITY DEFINER AS $$ -DECLARE - -- assign_schema TEXT; - -- assign_table TEXT; - assignment_id uuid; - assignment_name TEXT; - scope_table_not_null TEXT; - role_name_not_null TEXT; - role_column_not_null TEXT; - -BEGIN - IF role_name_string IS NULL THEN role_name_not_null = '__none__'; ELSE role_name_not_null = role_name_string; END IF; - IF role_column_name IS NULL THEN role_column_not_null = '__none__'; ELSE role_column_not_null = role_column_name; END IF; - IF scope IS NULL THEN scope_table_not_null = '__none__'; ELSE scope_table_not_null = scope; END IF; - - -- SELECT n[1], n[2] INTO assign_schema, assign_table FROM parse_ident(assign_table_full_name) n; - - SELECT id FROM <%= assignments_table() %> - WHERE table_name = assign_table_full_name - AND scope_table = scope_table_not_null - AND user_column = user_column_name - AND role_name = role_name_not_null - AND role_column = role_column_not_null - INTO assignment_id; - - assignment_name = REPLACE(format('%s', assignment_id), '-', '_'); - - -- remove triggers - EXECUTE format('DROP TRIGGER IF EXISTS electric_cleanup_role_%s ON <%= schema() %>.assignment_%s_join;', - assignment_name, - assignment_name - ); - - EXECUTE format('DROP TRIGGER IF EXISTS electric_insert_role_%s ON %s;', - assignment_name, - assign_table_full_name - ); - - EXECUTE format('DROP TRIGGER IF EXISTS electric_update_role_%s ON %s;', - assignment_name, - assign_table_full_name - ); - - -- remove functions - EXECUTE format('DROP FUNCTION IF EXISTS <%= schema() %>.cleanup_role_%s;', - assignment_name - ); - - EXECUTE format('DROP FUNCTION IF EXISTS <%= schema() %>.upsert_role_%s;', - assignment_name - ); - - -- remove join table - EXECUTE format('DROP TABLE IF EXISTS <%= schema() %>.assignment_%s_join;', - assignment_name - ); - - -- remove assignment - DELETE FROM <%= assignments_table() %> WHERE id = assignment_id; -END; -$$ LANGUAGE plpgsql; - diff --git a/components/electric/priv/sql_function_templates/find_fk_to_table.sql.eex b/components/electric/priv/sql_function_templates/find_fk_to_table.sql.eex deleted file mode 100644 index 577120e134..0000000000 --- a/components/electric/priv/sql_function_templates/find_fk_to_table.sql.eex +++ /dev/null @@ -1,47 +0,0 @@ -CREATE OR REPLACE FUNCTION <%= schema() %>.find_fk_to_table( - src_schema text, - src_table text, - dst_table text -) RETURNS TABLE ( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] -) SECURITY DEFINER AS $$ - DECLARE - dst_schema name; - dst_name name; - BEGIN - -- dst_table is a quoted, fully qualified table, e.g. '"public"."assignments"' - SELECT n[1], n[2] INTO dst_schema, dst_name FROM parse_ident(dst_table) n; - - RETURN QUERY - SELECT sch.nspname AS "from_schema", - tbl.relname AS "from_table", - ARRAY_AGG(col.attname ORDER BY u.attposition) AS "from_columns", - f_sch.nspname AS "to_schema", - f_tbl.relname AS "to_table", - ARRAY_AGG(f_col.attname ORDER BY f_u.attposition) AS "to_columns", - ARRAY_AGG((SELECT data_type FROM information_schema.columns WHERE table_name = src_table and column_name = col.attname) ORDER BY f_u.attposition) AS "to_types" - FROM pg_constraint c - LEFT JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE - LEFT JOIN LATERAL UNNEST(c.confkey) WITH ORDINALITY AS f_u(attnum, attposition) ON f_u.attposition = u.attposition - JOIN pg_class tbl ON tbl.oid = c.conrelid - JOIN pg_namespace sch ON sch.oid = tbl.relnamespace - LEFT JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum) - LEFT JOIN pg_class f_tbl ON f_tbl.oid = c.confrelid - LEFT JOIN pg_namespace f_sch ON f_sch.oid = f_tbl.relnamespace - LEFT JOIN pg_attribute f_col ON (f_col.attrelid = f_tbl.oid AND f_col.attnum = f_u.attnum) - WHERE c.contype = 'f' - AND tbl.relname = src_table - AND f_tbl.relname = dst_name - AND sch.nspname = src_schema - AND f_sch.nspname = dst_schema - GROUP BY "from_schema", "from_table", "to_schema", "to_table" - ORDER BY "from_schema", "from_table"; - END; -$$ LANGUAGE plpgsql; - diff --git a/components/electric/src/electric_ddlx_parser.yrl b/components/electric/src/electric_ddlx_parser.yrl index 491e72a542..0141f8d4d3 100644 --- a/components/electric/src/electric_ddlx_parser.yrl +++ b/components/electric/src/electric_ddlx_parser.yrl @@ -9,6 +9,9 @@ Nonterminals sqlite_stmt table_ident identifier + record + field_access + type_cast scoped_role grant_scoped_role scope @@ -21,12 +24,11 @@ Nonterminals func_args permissions privilege - privileges using_clause scope_path column_list columns - check_clause + where_clause . % terminals are the outputs of the tokeniser, so e.g. the terminal @@ -36,10 +38,10 @@ Nonterminals Terminals '.' '(' ')' ',' ':' 'ALTER' 'TABLE' 'DISABLE' 'ENABLE' 'ELECTRIC' 'NULL' 'UNASSIGN' 'ASSIGN' 'TO' 'IF' - 'GRANT' 'ON' 'USING' 'SELECT' 'INSERT' 'UPDATE' 'DELETE' 'ALL' 'READ' 'WRITE' 'CHECK' + 'GRANT' 'ON' 'USING' 'SELECT' 'INSERT' 'UPDATE' 'DELETE' 'ALL' 'READ' 'WRITE' 'WHERE' 'REVOKE' 'FROM' 'SQLITE' 'AUTHENTICATED' 'ANYONE' 'PRIVILEGES' - string int float + string integer float unquoted_identifier quoted_identifier '=' '>' '<' '<=' '>=' '!=' '<>' '+' '/' '*' '-' 'AND' 'IS' 'NOT' 'OR' @@ -84,7 +86,7 @@ assign_stmt -> 'ELECTRIC' 'ASSIGN' scoped_role 'TO' column_ident 'IF' if_expr : unassign_stmt -> 'ELECTRIC' 'UNASSIGN' scoped_role 'FROM' column_ident : unassign_cmd('$3' ++ '$5'). % ELECTRIC GRANT -grant_stmt -> 'ELECTRIC' 'GRANT' permissions 'ON' table_ident 'TO' grant_scoped_role using_clause check_clause : grant_cmd('$3' ++ '$5' ++ '$7' ++ '$8' ++ '$9'). +grant_stmt -> 'ELECTRIC' 'GRANT' permissions 'ON' table_ident 'TO' grant_scoped_role using_clause where_clause : grant_cmd('$3' ++ '$5' ++ '$7' ++ '$8' ++ '$9'). % ELECTRIC REVOKE revoke_stmt -> 'ELECTRIC' 'REVOKE' permissions 'ON' table_ident 'FROM' grant_scoped_role : revoke_cmd('$3' ++ '$5' ++ '$7'). @@ -101,6 +103,10 @@ table_ident -> identifier '.' identifier : [{table_schema, '$1'}, {table_name, ' identifier -> unquoted_identifier : unquoted_identifier('$1'). identifier -> quoted_identifier : unwrap('$1'). +%% upcase the record name, so e.g. it's always `AUTH.user_id`, `NEW.field_name` etc +record -> unquoted_identifier : 'Elixir.String':upcase(unwrap('$1')). +record -> quoted_identifier : 'Elixir.String':upcase(unwrap('$1')). + grant_scoped_role -> 'AUTHENTICATED' : [{role_name, 'AUTHENTICATED'}]. grant_scoped_role -> 'ANYONE' : [{role_name, 'ANYONE'}]. grant_scoped_role -> scoped_role : '$1'. @@ -129,7 +135,9 @@ if_expr -> '(' expr ')' : [{'if', erlang:iolist_to_binary('$2')}]. expr -> '(' expr ')' : ["(", '$2', ")"]. expr -> expr op expr : ['$1', " ", '$2', " ", '$3']. %[{expr, [{op, '$2'}, {left, '$1'}, {right, '$3'}]}]. +expr -> field_access : ['$1']. expr -> identifier '(' func_args ')' : ['$1', "(", '$3', ")"]. % [{func_call, '$1', '$3'}]. +expr -> type_cast : ['$1']. expr -> identifier : ['$1']. % [{name, '$1'}]. expr -> const : ['$1']. % [{const, '$1'}]. @@ -149,28 +157,29 @@ op -> 'OR' : ["OR"]. op -> 'NOT' : ["NOT"]. op -> 'IS' : ["IS"]. +field_access -> record '.' identifier : ['$1', ".", '$3']. + +type_cast -> field_access ':' ':' identifier : ['$1', "::", '$4']. +type_cast -> identifier ':' ':' identifier : ['$1', "::", '$4']. + const -> string : ["'", unwrap('$1'), "'"]. -const -> int : erlang:integer_to_list(unwrap('$1')). +const -> integer : erlang:integer_to_list(unwrap('$1')). const -> float : erlang:float_to_list(unwrap('$1')). func_args -> '$empty' : []. func_args -> expr : ['$1']. func_args -> expr ',' func_args : ['$1', "," , '$3']. -permissions -> privileges column_list : [{privilege, '$1'}] ++ '$2'. - -privileges -> '$empty' : []. -privileges -> 'ALL' : [<<"select">>, <<"insert">>, <<"update">>, <<"delete">>]. -privileges -> 'ALL' 'PRIVILEGES' : [<<"select">>, <<"insert">>, <<"update">>, <<"delete">>]. -privileges -> privilege : '$1'. -privileges -> privilege ',' privileges : lists:uniq('$1' ++ '$3'). +permissions -> privilege column_list : [{privilege, '$1'}] ++ '$2'. -privilege -> 'SELECT' : [<<"select">>]. -privilege -> 'INSERT' : [<<"insert">>]. -privilege -> 'UPDATE' : [<<"update">>]. -privilege -> 'DELETE' : [<<"delete">>]. -privilege -> 'READ' : [<<"select">>]. -privilege -> 'WRITE' : [<<"insert">>, <<"update">>, <<"delete">>]. +privilege -> 'ALL' : ['SELECT', 'INSERT', 'UPDATE', 'DELETE']. +privilege -> 'ALL' 'PRIVILEGES' : ['SELECT', 'INSERT', 'UPDATE', 'DELETE']. +privilege -> 'SELECT' : ['SELECT']. +privilege -> 'INSERT' : ['INSERT']. +privilege -> 'UPDATE' : ['UPDATE']. +privilege -> 'DELETE' : ['DELETE']. +privilege -> 'READ' : ['SELECT']. +privilege -> 'WRITE' : ['INSERT', 'UPDATE', 'DELETE']. column_list -> '$empty' : []. column_list -> '(' columns ')' : [{column_names, '$2'}] . @@ -186,8 +195,8 @@ scope_path -> '$empty' : []. scope_path -> identifier : ['$1']. scope_path -> identifier '/' scope_path : ['$1' | '$3']. -check_clause -> '$empty' : []. -check_clause -> 'CHECK' '(' expr ')' : [{check, erlang:iolist_to_binary('$3')}]. +where_clause -> '$empty' : []. +where_clause -> 'WHERE' '(' expr ')' : [{check, erlang:iolist_to_binary('$3')}]. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Erlang code. diff --git a/components/electric/test/electric/ddlx/command_test.exs b/components/electric/test/electric/ddlx/command_test.exs new file mode 100644 index 0000000000..1c4b181ea8 --- /dev/null +++ b/components/electric/test/electric/ddlx/command_test.exs @@ -0,0 +1,107 @@ +defmodule Electric.DDLX.CommandTest do + use ExUnit.Case, async: true + + alias Electric.DDLX + alias Electric.DDLX.Command + alias Electric.Satellite.SatPerms + + def parse(sql) do + assert {:ok, cmd} = DDLX.Parser.parse(sql) + cmd + end + + def pg_sql(ddlx) do + ddlx + |> parse() + |> Command.pg_sql() + end + + def parse_pb(hex) do + [hex, "::bytea" <> _] = String.split(hex, "'") + {:ok, bytes} = Base.decode16(hex) + assert {:ok, ddlx} = Protox.decode(bytes, SatPerms.DDLX) + ddlx + end + + describe "pg_sql/1" do + test "ELECTRIC ENABLE" do + ddlx = "ALTER TABLE my_table ENABLE ELECTRIC" + + assert pg_sql(ddlx) == [ + "CALL electric.enable('\"public\".\"my_table\"');\n" + ] + end + + test "ELECTRIC ASSIGN" do + ddlx = "ELECTRIC ASSIGN (projects, memberships.role) TO memberships.user_id" + + assert [ + ~S[INSERT INTO "electric"."ddlx_commands" (ddlx) VALUES ('\x] <> hex + ] = pg_sql(ddlx) + + assert %SatPerms.DDLX{assigns: [assign]} = parse_pb(hex) + + assert %SatPerms.Assign{ + table: %{schema: "public", name: "memberships"}, + scope: %{schema: "public", name: "projects"}, + user_column: "user_id", + role_column: "role" + } = assign + end + + test "ELECTRIC UNASSIGN" do + ddlx = "ELECTRIC UNASSIGN (projects, memberships.role) FROM memberships.user_id" + + assert [ + ~S[INSERT INTO "electric"."ddlx_commands" (ddlx) VALUES ('\x] <> hex + ] = pg_sql(ddlx) + + assert %SatPerms.DDLX{unassigns: [unassign]} = parse_pb(hex) + + assert %SatPerms.Unassign{ + table: %{schema: "public", name: "memberships"}, + scope: %{schema: "public", name: "projects"}, + user_column: "user_id", + role_column: "role" + } = unassign + end + + test "ELECTRIC GRANT" do + ddlx = "ELECTRIC GRANT INSERT ON issues TO (projects, 'member')" + + assert [ + ~S[INSERT INTO "electric"."ddlx_commands" (ddlx) VALUES ('\x] <> hex + ] = pg_sql(ddlx) + + assert %SatPerms.DDLX{grants: [grant]} = parse_pb(hex) + + assert %SatPerms.Grant{ + privilege: :INSERT, + table: %{schema: "public", name: "issues"}, + role: %SatPerms.RoleName{role: {:application, "member"}}, + columns: nil, + scope: %{schema: "public", name: "projects"}, + path: nil, + check: nil + } = grant + end + + test "ELECTRIC REVOKE" do + ddlx = "ELECTRIC REVOKE INSERT ON issues FROM (projects, 'member')" + + assert [ + ~S[INSERT INTO "electric"."ddlx_commands" (ddlx) VALUES ('\x] <> hex + ] = pg_sql(ddlx) + + assert %SatPerms.DDLX{revokes: [revoke]} = parse_pb(hex) + + assert %SatPerms.Revoke{ + privilege: :INSERT, + table: %{schema: "public", name: "issues"}, + role: %SatPerms.RoleName{role: {:application, "member"}}, + scope: %{schema: "public", name: "projects"}, + path: nil + } = revoke + end + end +end diff --git a/components/electric/test/electric/ddlx/ddlx_commands_test.exs b/components/electric/test/electric/ddlx/ddlx_commands_test.exs deleted file mode 100644 index 47059f7721..0000000000 --- a/components/electric/test/electric/ddlx/ddlx_commands_test.exs +++ /dev/null @@ -1,388 +0,0 @@ -defmodule Electric.DDLX.DDLXCommandsTest do - use Electric.Extension.Case, async: false - import ElectricTest.DDLXHelpers - - alias Electric.DDLX - alias Electric.DDLX.Command - - @moduletag ddlx: true - - @electric_grants "electric.grants" - - describe "parsing statements" do - test "parse success" do - sql = - "ELECTRIC GRANT UPDATE ON thing.Köln_en$ts TO 'projects:house.admin' USING project_id CHECK (name = Paul);" - - {:ok, _} = DDLX.parse(sql) - end - - test "parse fail" do - sql = - "ELECTRIC GRANT JUNK ON thing.Köln_en$ts TO 'projects:house.admin' USING project_id CHECK (name = Paul);" - - {:error, %Command.Error{sql: ^sql, message: "syntax error before: <<\"JUNK\">>"}} = - DDLX.parse(sql) - end - end - - describe "creating rows in postgres from command structs" do - test_tx "adding a grant from electric", fn conn -> - grant1 = %Command.Grant{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["*"], - scope: {"public", "projects"}, - using_path: nil, - check_fn: nil - } - - query(conn, Electric.DDLX.command_to_postgres(grant1)) - - assert_rows(conn, @electric_grants, [ - [ - "update", - quote_table(grant1.on_table), - "house.admin", - "*", - quote_table(grant1.scope), - nil, - nil - ] - ]) - end - - test_tx "adding a grant from electric twice", fn conn -> - grant1 = %Command.Grant{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["*"], - scope: {"public", "projects"}, - using_path: nil, - check_fn: nil - } - - sql = Electric.DDLX.command_to_postgres(grant1) - - {:ok, _, _} = query(conn, sql) - {:ok, _, _} = query(conn, sql) - end - - test_tx "adding a grant with multiple grant columns", fn conn -> - grant1 = %Command.Grant{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["name", "description"], - scope: {"public", "projects"}, - using_path: nil, - check_fn: nil - } - - query(conn, Electric.DDLX.command_to_postgres(grant1)) - - assert_rows( - conn, - @electric_grants, - [ - [ - "update", - quote_table(grant1.on_table), - "house.admin", - "name", - quote_table(grant1.scope), - nil, - nil - ], - [ - "update", - quote_table(grant1.on_table), - "house.admin", - "description", - quote_table(grant1.scope), - nil, - nil - ] - ] - ) - end - - test_tx "adding and delete a grant", fn conn -> - grant1 = %Command.Grant{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["*"], - scope: {"public", "projects"}, - using_path: nil, - check_fn: nil - } - - query(conn, Electric.DDLX.command_to_postgres(grant1)) - - assert_rows(conn, @electric_grants, [ - [ - "update", - quote_table(grant1.on_table), - "house.admin", - "*", - quote_table(grant1.scope), - nil, - nil - ] - ]) - - revoke = %Command.Revoke{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["*"], - scope: {"public", "projects"} - } - - query(conn, Command.pg_sql(revoke)) - - assert_rows( - conn, - @electric_grants, - [] - ) - end - - test_tx "adding and delete a grant no op", fn conn -> - grant1 = %Command.Grant{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["*"], - scope: {"public", "projects"}, - using_path: nil, - check_fn: nil - } - - query(conn, Electric.DDLX.command_to_postgres(grant1)) - - assert_rows(conn, @electric_grants, [ - [ - "update", - quote_table(grant1.on_table), - "house.admin", - "*", - quote_table(grant1.scope), - nil, - nil - ] - ]) - - revoke = %Command.Revoke{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["name"], - scope: {"public", "projects"} - } - - query(conn, Electric.DDLX.command_to_postgres(revoke)) - - assert_rows(conn, @electric_grants, [ - [ - "update", - quote_table(grant1.on_table), - "house.admin", - "*", - quote_table(grant1.scope), - nil, - nil - ] - ]) - end - - test_tx "adding a grant with using path", fn conn -> - grant1 = %Command.Grant{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["*"], - scope: {"public", "projects"}, - using_path: "project_id", - check_fn: nil - } - - query(conn, Electric.DDLX.command_to_postgres(grant1)) - - assert_rows(conn, @electric_grants, [ - [ - "update", - quote_table(grant1.on_table), - "house.admin", - "*", - quote_table(grant1.scope), - "project_id", - nil - ] - ]) - end - - test_tx "assign creates an assignment", fn conn -> - # {:ok, conn} = init_helper_db() - # setup_ddlx(conn) - - projects_sql = """ - CREATE TABLE projects( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - project_id uuid NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id), - CONSTRAINT project_fk - FOREIGN KEY(project_id) - REFERENCES projects(id) - ); - """ - - query(conn, memberships_sql) - - assign = %Command.Assign{ - table_name: {"public", "memberships"}, - user_column: "user_id", - scope: {"public", "projects"}, - role_name: nil, - role_column: "role", - if_statement: "hello" - } - - query(conn, Electric.DDLX.command_to_postgres(assign)) - - assert_rows_slice( - conn, - "electric.assignments", - [ - [ - quote_table(assign.table_name), - quote_table(assign.scope), - "user_id", - "__none__", - "role", - "hello" - ] - ], - 1..6 - ) - end - - test_tx "unassign", fn conn -> - projects_sql = """ - CREATE TABLE public.projects( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - project_id uuid NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES public.users(id), - CONSTRAINT project_fk - FOREIGN KEY(project_id) - REFERENCES public.projects(id) - ); - """ - - query(conn, memberships_sql) - - assign = %Command.Assign{ - table_name: {"public", "memberships"}, - user_column: "user_id", - scope: {"public", "projects"}, - role_name: nil, - role_column: "role", - if_statement: "hello" - } - - query(conn, Electric.DDLX.command_to_postgres(assign)) - - assert_rows_slice( - conn, - "electric.assignments", - [ - [ - quote_table(assign.table_name), - quote_table(assign.scope), - "user_id", - "__none__", - "role", - "hello" - ] - ], - 1..6 - ) - - unassign = %Command.Unassign{ - table_name: {"public", "memberships"}, - user_column: "user_id", - scope: {"public", "projects"}, - role_name: nil, - role_column: "role" - } - - query(conn, Electric.DDLX.command_to_postgres(unassign)) - - assert_rows_slice( - conn, - "electric.assignments", - [], - 1..6 - ) - end - - # test_tx "disable", fn conn -> - # disable = %Disable{ - # table_name: "test" - # } - # - # {:ok, _, _result} = query(conn, Electric.DDLX.command_to_postgres(disable)) - # end - # - # test_tx "sqlite", fn conn -> - # sqlite = %SQLite{ - # sqlite_statement: "--hello" - # } - # - # {:ok, _, _result} = query(conn, Electric.DDLX.command_to_postgres(sqlite)) - # end - end -end diff --git a/components/electric/test/electric/ddlx/ddlx_postgres_test.exs b/components/electric/test/electric/ddlx/ddlx_postgres_test.exs deleted file mode 100644 index 670b65ec51..0000000000 --- a/components/electric/test/electric/ddlx/ddlx_postgres_test.exs +++ /dev/null @@ -1,1116 +0,0 @@ -defmodule Electric.DDLX.DDLXPostgresTest do - use Electric.Extension.Case, async: false - import ElectricTest.DDLXHelpers - - @moduletag ddlx: true - - def list_tables(conn, schema \\ "public") do - {:ok, _cols, rows} = - query( - conn, - "select table_name from information_schema.tables WHERE table_schema = $1", - [schema] - ) - - for [col | _] <- rows, do: col - end - - def assert_tables(conn, table_names) do - existing = list_tables(conn) - assert MapSet.new(existing) == MapSet.new(table_names) - end - - def assert_table(conn, table_name, desired_columns) do - existing_columns = list_columns(conn, table_name) - - Enum.each(desired_columns, fn {column_name, assertions} -> - for {attribute_name, value} <- assertions do - # IO.inspect(existing_columns[column_name][attribute_name]) - # IO.inspect(value) - assert( - existing_columns[column_name][attribute_name] == value, - "Column assertion failed on #{table_name} #{column_name} #{attribute_name}, #{existing_columns[column_name][attribute_name]} != #{value}\n" - ) - end - end) - end - - def list_columns(conn, table_name) do - {:ok, columns, rows} = - query(conn, "select * from information_schema.columns WHERE table_name = $1", [table_name]) - - column_names = Enum.map(columns, &elem(&1, 1)) - column_name_index = Enum.find_index(column_names, &(&1 == "column_name")) - - for row <- rows, into: %{} do - column_name = Enum.at(row, column_name_index) - - attrs = - for {k, v} <- Enum.zip(column_names, row), into: %{} do - {k, v} - end - - {column_name, attrs} - end - end - - def get_foreign_keys(conn, table_name) do - query_str = """ - SELECT sch.nspname AS "from_schema", - tbl.relname AS "from_table", - ARRAY_AGG(col.attname ORDER BY u.attposition)::text[] AS "from_columns", - f_sch.nspname AS "to_schema", - f_tbl.relname AS "to_table", - ARRAY_AGG(f_col.attname ORDER BY f_u.attposition)::text[] AS "to_columns", - ARRAY_AGG((SELECT data_type FROM information_schema.columns WHERE table_name = $1 and column_name = col.attname) ORDER BY f_u.attposition)::text[] AS "to_types" - FROM pg_constraint c - LEFT JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE - LEFT JOIN LATERAL UNNEST(c.confkey) WITH ORDINALITY AS f_u(attnum, attposition) ON f_u.attposition = u.attposition - JOIN pg_class tbl ON tbl.oid = c.conrelid - JOIN pg_namespace sch ON sch.oid = tbl.relnamespace - LEFT JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum) - LEFT JOIN pg_class f_tbl ON f_tbl.oid = c.confrelid - LEFT JOIN pg_namespace f_sch ON f_sch.oid = f_tbl.relnamespace - LEFT JOIN pg_attribute f_col ON (f_col.attrelid = f_tbl.oid AND f_col.attnum = f_u.attnum) - WHERE c.contype = 'f' and tbl.relname = $2 - GROUP BY "from_schema", "from_table", "to_schema", "to_table" - ORDER BY "from_schema", "from_table"; - """ - - {:ok, _cols, rows} = query(conn, query_str, [table_name, table_name]) - - rows - end - - describe "testing creation of table and functions in postgres on init" do - test_tx "creates grants table", fn conn -> - grants_column_asserts = %{ - "privilege" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "on_table" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "role" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "column_name" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "scope" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "using_path" => %{ - "udt_name" => "text", - "is_nullable" => "YES" - }, - "check_fn" => %{ - "udt_name" => "text", - "is_nullable" => "YES" - } - } - - assert_table(conn, "grants", grants_column_asserts) - end - - test_tx "creates assignments table", fn conn -> - assignments_column_asserts = %{ - "id" => %{ - "udt_name" => "uuid", - "is_nullable" => "NO" - }, - "table_name" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "scope_table" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "user_column" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "role_name" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "role_column" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "if_fn" => %{ - "udt_name" => "text", - "is_nullable" => "YES" - } - } - - assert_table(conn, "assignments", assignments_column_asserts) - end - - test_tx "creates roles table", fn conn -> - roles_column_asserts = %{ - "id" => %{ - "udt_name" => "uuid", - "is_nullable" => "NO" - }, - "role" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "user_id" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "scope_table" => %{ - "udt_name" => "varchar", - "is_nullable" => "YES" - }, - "scope_id" => %{ - "udt_name" => "varchar", - "is_nullable" => "YES" - } - } - - assert_table(conn, "roles", roles_column_asserts) - end - - test_tx "add ddlx functions", fn conn -> - func_sql = """ - SELECT - routine_name - FROM - information_schema.routines - WHERE - routine_type IN ('FUNCTION', 'PROCEDURE') - AND - routine_schema = 'electric'; - """ - - {:ok, _, rows} = query(conn, func_sql) - - expected_funcs = [ - "enable", - "disable", - "grant", - "revoke", - "assign", - "unassign", - "sqlite", - "find_fk_to_table", - "find_fk_for_column", - "find_pk" - ] - - installed_funcs = List.flatten(rows) - - for f <- expected_funcs do - assert f in installed_funcs - end - end - end - - def set_up_assignment(conn) do - projects_sql = """ - CREATE TABLE public.projects( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - project_id uuid NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id), - CONSTRAINT project_fk - FOREIGN KEY(project_id) - REFERENCES projects(id) - ); - """ - - query(conn, memberships_sql) - end - - def set_up_assignment_compound(conn) do - projects_sql = """ - CREATE TABLE public.projects( - id uuid DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL, - PRIMARY KEY (id, name) - ); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - project_id uuid NOT NULL, - project_name VARCHAR(64) NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id), - CONSTRAINT project_fk - FOREIGN KEY(project_id, project_name) - REFERENCES projects(id, name) - ); - """ - - query(conn, memberships_sql) - end - - def set_up_assignment_compound_membership(conn) do - projects_sql = """ - CREATE TABLE public.projects( - id uuid DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL, - PRIMARY KEY (id, name) - ); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - role VARCHAR(64) NOT NULL, - project_id uuid NOT NULL, - project_name VARCHAR(64) NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id), - CONSTRAINT project_fk - FOREIGN KEY(project_id, project_name) - REFERENCES projects(id, name), - PRIMARY KEY (user_id, project_id, project_name) - ); - """ - - query(conn, memberships_sql) - end - - describe "testing postgres functions" do - test_tx "adding a grant", fn conn -> - pg_sql = """ - CALL electric.grant('update', 'things', 'admin' , ARRAY['one', 'two'], 'project', 'project_id', 'function body') - """ - - query(conn, pg_sql) - - assert_rows( - conn, - "electric.grants", - [ - ["update", "things", "admin", "one", "project", "project_id", "function body"], - ["update", "things", "admin", "two", "project", "project_id", "function body"] - ] - ) - end - - test_tx "removing a grant", fn conn -> - pg_sql = """ - CALL electric.grant('update', '"public"."things"', 'admin' , ARRAY['one', 'two'], '"public"."project"', 'project_id', 'function body') - """ - - query(conn, pg_sql) - - assert_rows( - conn, - "electric.grants", - [ - [ - "update", - ~s["public"."things"], - "admin", - "one", - ~s["public"."project"], - "project_id", - "function body" - ], - [ - "update", - ~s["public"."things"], - "admin", - "two", - ~s["public"."project"], - "project_id", - "function body" - ] - ] - ) - - pg_sql2 = """ - CALL electric.revoke('update', '"public"."things"', 'admin' , ARRAY['one'], '"public"."project"') - """ - - query(conn, pg_sql2) - - assert_rows( - conn, - "electric.grants", - [ - [ - "update", - ~s["public"."things"], - "admin", - "two", - ~s["public"."project"], - "project_id", - "function body" - ] - ] - ) - end - - test_tx "assign creates an assignment", fn conn -> - set_up_assignment(conn) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'hello'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - assert_rows_slice( - conn, - "electric.assignments", - [ - [ - ~s["public"."memberships"], - ~s["public"."projects"], - "user_id", - "__none__", - "role", - "hello" - ] - ], - 1..6 - ) - end - - test_tx "assign with scope compound key makes join table", fn conn -> - set_up_assignment_compound(conn) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'hello'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - {:ok, _, rows} = query(conn, "select * from electric.assignments") - - row = List.first(rows) - - assert Enum.slice(row, 1..6) == [ - ~s["public"."memberships"], - ~s["public"."projects"], - "user_id", - "__none__", - "role", - "hello" - ] - - ## checking the join table that is created - assignment_id = List.first(row) - uuid_string = assignment_id |> String.replace("-", "_") - join_table_name = "assignment_#{uuid_string}_join" - - tables = list_tables(conn, "electric") - - assert join_table_name in tables - columns = list_columns(conn, join_table_name) - - assert %{ - "assignment_id" => _, - "id" => _, - "project_id" => _, - "project_name" => _, - "memberships_id" => _, - "user_id" => _ - } = columns - - fks = get_foreign_keys(conn, join_table_name) - - assert Enum.sort([ - [ - "electric", - join_table_name, - ["assignment_id"], - "electric", - "assignments", - ["id"], - ["uuid"] - ], - [ - "electric", - join_table_name, - ["role_id"], - "electric", - "roles", - ["id"], - ["uuid"] - ], - [ - "electric", - join_table_name, - ["memberships_id"], - "public", - "memberships", - ["id"], - ["uuid"] - ], - [ - "electric", - join_table_name, - ["project_id", "project_name"], - "public", - "projects", - ["id", "name"], - ["uuid", "character varying"] - ], - [ - "electric", - join_table_name, - ["user_id"], - "public", - "users", - ["id"], - ["uuid"] - ] - ]) == Enum.sort(fks) - end - - test_tx "assign makes functions and triggers", fn conn -> - set_up_assignment_compound(conn) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'hello'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - {:ok, _, rows} = query(conn, "select * from electric.assignments") - - row = List.first(rows) - - assignment_id = List.first(row) - uuid_string = assignment_id |> String.replace("-", "_") - - func_sql = """ - SELECT - routine_name - FROM - information_schema.routines - WHERE - routine_type = 'FUNCTION' - AND - routine_schema = 'electric'; - """ - - {:ok, _, rows} = query(conn, func_sql) - - assert ["upsert_role_#{uuid_string}"] in rows - assert ["cleanup_role_#{uuid_string}"] in rows - - triggers_sql = """ - SELECT - trigger_name - FROM - information_schema.triggers - WHERE - event_object_table = 'memberships'; - """ - - {:ok, _, rows} = query(conn, triggers_sql) - - assert ["electric_insert_role_#{uuid_string}"] in rows - assert ["electric_update_role_#{uuid_string}"] in rows - - triggers_sql = """ - SELECT - trigger_name - FROM - information_schema.triggers - WHERE - event_object_table = 'assignment_#{uuid_string}_join'; - """ - - {:ok, _, rows} = query(conn, triggers_sql) - assert ["electric_cleanup_role_#{uuid_string}"] in rows - end - - test_tx "role assignment", fn conn -> - set_up_assignment_compound(conn) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'TRUE'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - ## add a user, project and membership - - add_project_sql = """ - INSERT INTO projects ( name ) VALUES ( 'project_1' ) returning id; - """ - - {:ok, _query, rows} = query(conn, add_project_sql) - - project_id = List.first(List.first(rows)) - - add_user_sql = """ - INSERT INTO users ( name ) VALUES ( 'paul' ) returning id; - """ - - {:ok, _, rows} = query(conn, add_user_sql) - - person_id = List.first(List.first(rows)) - - add_membership_sql = """ - INSERT INTO memberships ( role, project_id, project_name, user_id ) VALUES ( 'admin', $1, 'project_1', $2); - """ - - {:ok, _, _rows} = - query(conn, add_membership_sql, [project_id, person_id]) - - assert_rows_slice( - conn, - "electric.roles", - [ - [ - "admin", - person_id, - ~s["public"."projects"], - "#{project_id}, project_1" - ] - ], - 1..4 - ) - end - - test_tx "role assignment with compound membership pk", fn conn -> - set_up_assignment_compound_membership(conn) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'TRUE'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - ## add a user, project and membership - - add_project_sql = """ - INSERT INTO projects ( name ) VALUES ( 'project_1' ) returning id; - """ - - {:ok, _query, rows} = query(conn, add_project_sql) - - project_id = List.first(List.first(rows)) - - add_user_sql = """ - INSERT INTO users ( name ) VALUES ( 'paul' ) returning id; - """ - - {:ok, _, rows} = query(conn, add_user_sql) - - person_id = List.first(List.first(rows)) - - add_membership_sql = """ - INSERT INTO memberships ( role, project_id, project_name, user_id ) VALUES ( 'admin', $1, 'project_1', $2); - """ - - {:ok, _, _rows} = - query(conn, add_membership_sql, [project_id, person_id]) - - assert_rows_slice( - conn, - "electric.roles", - [ - [ - "admin", - person_id, - ~s["public"."projects"], - "#{project_id}, project_1" - ] - ], - 1..4 - ) - end - - test_tx "dupelicate assignment fails", fn conn -> - set_up_assignment_compound(conn) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'TRUE'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'TRUE'); - """ - - {:error, {:error, :error, _code, :unique_violation, _message, params}} = query(conn, pg_sql) - assert params[:constraint_name] == "unique_assign" - end - - test_tx "role update", fn conn -> - set_up_assignment_compound(conn) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'TRUE'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - ## add a user, project and membership - - add_project_sql = """ - INSERT INTO projects ( name ) VALUES ( 'project_1' ) returning id; - """ - - {:ok, _query, rows} = query(conn, add_project_sql) - - project_id = List.first(List.first(rows)) - - add_user_sql = """ - INSERT INTO users ( name ) VALUES ( 'paul' ) returning id; - """ - - {:ok, _, rows} = query(conn, add_user_sql) - - person_id = List.first(List.first(rows)) - - add_membership_sql = """ - INSERT INTO memberships ( role, project_id, project_name, user_id ) VALUES ( 'admin', $1, 'project_1', $2) returning id; - """ - - {:ok, _, rows} = - query(conn, add_membership_sql, [project_id, person_id]) - - membership_id = List.first(List.first(rows)) - - assert_rows_slice( - conn, - "electric.roles", - [ - [ - "admin", - person_id, - ~s["public"."projects"], - "#{project_id}, project_1" - ] - ], - 1..4 - ) - - update_membership_sql = """ - UPDATE memberships SET role = 'member' WHERE id = $1; - """ - - {:ok, _, _rows} = query(conn, update_membership_sql, [membership_id]) - - assert_rows_slice( - conn, - "electric.roles", - [ - [ - "member", - person_id, - ~s["public"."projects"], - "#{project_id}, project_1" - ] - ], - 1..4 - ) - end - - test_tx "role removed by func", fn conn -> - set_up_assignment_compound(conn) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => 'admin', - role_column_name => null, - if_fn => E'NEW.role = \\'admin\\''); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - ## add a user, project and membership - - add_project_sql = """ - INSERT INTO projects ( name ) VALUES ( 'project_1' ) returning id; - """ - - {:ok, _query, rows} = query(conn, add_project_sql) - - project_id = List.first(List.first(rows)) - - add_user_sql = """ - INSERT INTO users ( name ) VALUES ( 'paul' ) returning id; - """ - - {:ok, _, rows} = query(conn, add_user_sql) - - person_id = List.first(List.first(rows)) - - add_membership_sql = """ - INSERT INTO memberships ( role, project_id, project_name, user_id ) VALUES ( 'admin', $1, 'project_1', $2) returning id; - """ - - {:ok, _, rows} = - query(conn, add_membership_sql, [project_id, person_id]) - - membership_id = List.first(List.first(rows)) - - assert_rows_slice( - conn, - "electric.roles", - [ - [ - "admin", - person_id, - ~s["public"."projects"], - "#{project_id}, project_1" - ] - ], - 1..4 - ) - - update_membership_sql = """ - UPDATE memberships SET role = 'member' WHERE id = $1; - """ - - {:ok, _, _rows} = query(conn, update_membership_sql, [membership_id]) - - assert_rows_slice( - conn, - "electric.roles", - [], - 1..4 - ) - end - - test_tx "assign with no scope from string and update", fn conn -> - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id) - ); - """ - - query(conn, memberships_sql) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => null, - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => null); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - assert_rows_slice( - conn, - "electric.assignments", - [[~s["public"."memberships"], "__none__", "user_id", "__none__", "role", nil]], - 1..6 - ) - - add_user_sql = """ - INSERT INTO users ( name ) VALUES ( 'paul' ) returning id; - """ - - {:ok, _cols, rows} = query(conn, add_user_sql) - - [[person_id | _] | _] = rows - - add_membership_sql = """ - INSERT INTO memberships ( role, user_id ) VALUES ( 'admin', $1) returning id; - """ - - {:ok, _, rows} = query(conn, add_membership_sql, [person_id]) - - [[membership_id | _] | _] = rows - - assert_rows_slice( - conn, - "electric.roles", - [["admin", person_id, nil, nil]], - 1..4 - ) - - update_membership_sql = """ - UPDATE memberships SET role = 'member' WHERE id = $1; - """ - - {:ok, _, _rows} = query(conn, update_membership_sql, [membership_id]) - - assert_rows_slice( - conn, - "electric.roles", - [["member", person_id, nil, nil]], - 1..4 - ) - end - - test_tx "assign fails with bad scope", fn conn -> - projects_sql = """ - CREATE TABLE public.projects( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id) - ); - """ - - query(conn, memberships_sql) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => 'member', - role_column_name => null, - if_fn => null); - """ - - {:error, _error} = query(conn, pg_sql) - end - - test_tx "unassign cleans up", fn conn -> - set_up_assignment_compound(conn) - - pg_sql = """ - CALL electric.assign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'hello'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - {:ok, _, rows} = query(conn, "select * from electric.assignments") - row = List.first(rows) - - assert Enum.slice(row, 1..6) == [ - ~s["public"."memberships"], - ~s["public"."projects"], - "user_id", - "__none__", - "role", - "hello" - ] - - ## checking the join table that is created - assignment_id = List.first(row) - uuid_string = assignment_id |> String.replace("-", "_") - - join_table_name = "assignment_#{uuid_string}_join" - - tables = list_tables(conn, "electric") - - assert join_table_name in tables - - func_sql = """ - SELECT - routine_name - FROM - information_schema.routines - WHERE - routine_type = 'FUNCTION' - AND - routine_schema = 'electric'; - """ - - {:ok, _, rows} = query(conn, func_sql) - - assert ["upsert_role_#{uuid_string}"] in rows - assert ["cleanup_role_#{uuid_string}"] in rows - - triggers_sql = """ - SELECT - trigger_name - FROM - information_schema.triggers - WHERE - event_object_table = 'memberships'; - """ - - {:ok, _, rows} = query(conn, triggers_sql) - - assert ["electric_insert_role_#{uuid_string}"] in rows - assert ["electric_update_role_#{uuid_string}"] in rows - - pg_sql = """ - CALL electric.unassign( - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - tables = list_tables(conn, "electric") - - assert join_table_name not in tables - - func_sql = """ - SELECT - routine_name - FROM - information_schema.routines - WHERE - routine_type = 'FUNCTION' - AND - routine_schema = 'electric'; - """ - - {:ok, _, rows} = query(conn, func_sql) - - assert ["upsert_role_#{uuid_string}"] not in rows - assert ["cleanup_role_#{uuid_string}"] not in rows - - triggers_sql = """ - SELECT - trigger_name - FROM - information_schema.triggers - WHERE - event_object_table = 'memberships'; - """ - - {:ok, _, rows} = query(conn, triggers_sql) - - assert ["electric_insert_role_#{uuid_string}"] not in rows - assert ["electric_update_role_#{uuid_string}"] not in rows - end - end -end diff --git a/components/electric/test/electric/ddlx/parser/tokenizer_test.exs b/components/electric/test/electric/ddlx/parser/tokenizer_test.exs index fce717285b..f6e853756c 100644 --- a/components/electric/test/electric/ddlx/parser/tokenizer_test.exs +++ b/components/electric/test/electric/ddlx/parser/tokenizer_test.exs @@ -181,6 +181,21 @@ defmodule Electric.DDLX.Parser.TokenizerTest do ] = tokens end + test "field accesses" do + tokens = + Tokenizer.tokens(~s[ROW.user_id = AUTH.user_id]) + + assert [ + {:unquoted_identifier, {1, 0, nil}, "ROW"}, + {:., {1, 3, nil}}, + {:unquoted_identifier, {1, 4, nil}, "user_id"}, + {:=, {1, 12, nil}}, + {:unquoted_identifier, {1, 14, nil}, "AUTH"}, + {:., {1, 18, nil}}, + {:unquoted_identifier, {1, 19, nil}, "user_id"} + ] = tokens + end + test "comments" do src = """ -- this is my first comment diff --git a/components/electric/test/electric/ddlx/parser_test.exs b/components/electric/test/electric/ddlx/parser_test.exs index c85933e37b..cef5645e62 100644 --- a/components/electric/test/electric/ddlx/parser_test.exs +++ b/components/electric/test/electric/ddlx/parser_test.exs @@ -4,34 +4,48 @@ defmodule Electric.DDLX.ParserTest do alias Electric.DDLX.Parser alias Electric.DDLX.Command + alias Electric.Satellite.SatPerms + alias ElectricTest.PermissionsHelpers.Proto describe "ENABLE ELECTRIC" do test "parse enable" do sql = "ALTER TABLE things ENABLE ELECTRIC;" {:ok, result} = Parser.parse(sql) - assert result == %Command.Enable{ - table_name: {"public", "things"} - } + assert result == + %Command{ + action: %Command.Enable{ + table_name: {"public", "things"} + }, + stmt: sql, + tables: [{"public", "things"}], + tag: "ELECTRIC ENABLE" + } end test "parse enable with quoted names" do sql = ~s[ALTER TABLE "Private"."Items" ENABLE ELECTRIC;] - {:ok, result} = Parser.parse(sql) - - assert result == %Command.Enable{ - table_name: {"Private", "Items"} + assert parse(sql) == %Command{ + action: %Command.Enable{ + table_name: {"Private", "Items"} + }, + stmt: sql, + tables: [{"Private", "Items"}], + tag: "ELECTRIC ENABLE" } end test "parse enable with unquoted uppercase names" do sql = ~s[ALTER TABLE Private.Items ENABLE ELECTRIC;] - {:ok, result} = Parser.parse(sql) - - assert result == %Command.Enable{ - table_name: {"private", "items"} + assert parse(sql) == %Command{ + action: %Command.Enable{ + table_name: {"private", "items"} + }, + stmt: sql, + tables: [{"private", "items"}], + tag: "ELECTRIC ENABLE" } end @@ -41,148 +55,223 @@ defmodule Electric.DDLX.ParserTest do ddlx <- Electric.Postgres.SQLGenerator.DDLX.enable(table: table) ) do # IO.puts(ddlx) - assert {:ok, %Command.Enable{} = cmd} = Parser.parse(ddlx, default_schema: "my_default") + assert {:ok, %Command{action: %Command.Enable{} = cmd}} = + Parser.parse(ddlx, default_schema: "my_default") + assert cmd.table_name == normalise(table, "my_default") end end end + def parse(sql) do + assert {:ok, result} = Parser.parse(sql, default_schema: "my_default") + result + end + describe "ELECTRIC ASSIGN" do test "www example 1" do - assert {:ok, - %Command.Assign{ - table_name: {"my_default", "admin_users"}, - user_column: "user_id", - scope: nil, - role_name: "admin", - role_column: nil, - if_statement: nil - }} = - Parser.parse("ELECTRIC ASSIGN 'admin' TO admin_users.user_id;", - default_schema: "my_default" - ) + sql = "ELECTRIC ASSIGN 'admin' TO admin_users.user_id;" - assert {:ok, - %Command.Assign{ - table_name: {"application", "admin_users"}, - user_column: "user_id", - scope: nil, - role_name: "admin", - role_column: nil, - if_statement: nil - }} = - Parser.parse("ELECTRIC ASSIGN 'admin' TO application.admin_users.user_id;", - default_schema: "my_default" - ) + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "2uidsvryaa2k6xjbmq6zlu7nfy2ytg6b", + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + scope: nil, + role_name: "admin", + role_column: nil, + if: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "admin_users"}], + tag: "ELECTRIC ASSIGN" + } + + sql = "ELECTRIC ASSIGN 'admin' TO admin_users.user_id;" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "2uidsvryaa2k6xjbmq6zlu7nfy2ytg6b", + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + scope: nil, + role_name: "admin", + role_column: nil, + if: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "admin_users"}], + tag: "ELECTRIC ASSIGN" + } end test "www example 2" do - assert {:ok, - %Command.Assign{ - table_name: {"my_default", "user_roles"}, - user_column: "user_id", - scope: nil, - role_name: nil, - role_column: "role_name", - if_statement: nil - }} = - Parser.parse("ELECTRIC ASSIGN user_roles.role_name TO user_roles.user_id;", - default_schema: "my_default" - ) + sql = "ELECTRIC ASSIGN user_roles.role_name TO user_roles.user_id;" - assert {:ok, - %Command.Assign{ - table_name: {"application", "user_roles"}, - user_column: "user_id", - scope: nil, - role_name: nil, - role_column: "role_name", - if_statement: nil - }} = - Parser.parse( - "ELECTRIC ASSIGN application.user_roles.role_name TO application.user_roles.user_id;", - default_schema: "my_default" - ) + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "pxrga7klxw65mybjn6vrta3vs5t2rkhe", + table: Proto.table("my_default", "user_roles"), + user_column: "user_id", + scope: nil, + role_name: nil, + role_column: "role_name", + if: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "user_roles"}], + tag: "ELECTRIC ASSIGN" + } - assert {:ok, - %Command.Assign{ - table_name: {"application", "user_roles"}, - user_column: "user_id", - scope: nil, - role_name: nil, - role_column: "role_name", - if_statement: nil - }} = - Parser.parse( - "ELECTRIC ASSIGN (NuLl, application.user_roles.role_name) TO application.user_roles.user_id;", - default_schema: "my_default" - ) + sql = "ELECTRIC ASSIGN application.user_roles.role_name TO application.user_roles.user_id;" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "tcdchugjrxs6o52wlikvoaoqz5gef7du", + table: Proto.table("application", "user_roles"), + user_column: "user_id", + scope: nil, + role_name: nil, + role_column: "role_name", + if: nil + } + ] + }, + stmt: sql, + tables: [{"application", "user_roles"}], + tag: "ELECTRIC ASSIGN" + } - assert {:ok, - %Command.Assign{ - table_name: {"Application", "user_roles"}, - user_column: "user_id", - scope: nil, - role_name: nil, - role_column: "role_name", - if_statement: nil - }} = - Parser.parse( - "ELECTRIC ASSIGN (NuLl, \"Application\".user_roles.role_name) TO \"Application\".user_roles.user_id;", - default_schema: "my_default" - ) + sql = + "ELECTRIC ASSIGN (NuLl, application.user_roles.role_name) TO application.user_roles.user_id;" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "tcdchugjrxs6o52wlikvoaoqz5gef7du", + table: Proto.table("application", "user_roles"), + user_column: "user_id", + scope: nil, + role_name: nil, + role_column: "role_name", + if: nil + } + ] + }, + stmt: sql, + tables: [{"application", "user_roles"}], + tag: "ELECTRIC ASSIGN" + } - assert {:ok, - %Command.Assign{ - table_name: {"application", "user_roles"}, - user_column: "user_id", - scope: nil, - role_name: nil, - role_column: "role_name", - if_statement: nil - }} = - Parser.parse( - "ELECTRIC ASSIGN (NuLl, Application.User_roles.Role_name) TO Application.user_roles.User_id;", - default_schema: "my_default" - ) + sql = + "ELECTRIC ASSIGN (NuLl, \"Application\".user_roles.role_name) TO \"Application\".user_roles.user_id;" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "fq7ybdqfg6mee6cdnhx2ciyhzjxtwj3a", + table: Proto.table("Application", "user_roles"), + user_column: "user_id", + scope: nil, + role_name: nil, + role_column: "role_name", + if: nil + } + ] + }, + stmt: sql, + tables: [{"Application", "user_roles"}], + tag: "ELECTRIC ASSIGN" + } + + sql = + "ELECTRIC ASSIGN (NuLl, Application.User_roles.Role_name) TO Application.user_roles.User_id;" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "tcdchugjrxs6o52wlikvoaoqz5gef7du", + table: Proto.table("application", "user_roles"), + user_column: "user_id", + scope: nil, + role_name: nil, + role_column: "role_name", + if: nil + } + ] + }, + stmt: sql, + tables: [{"application", "user_roles"}], + tag: "ELECTRIC ASSIGN" + } end test "www example 3" do - assert {:ok, - %Command.Assign{ - table_name: {"my_default", "project_members"}, - user_column: "user_id", - scope: {"my_default", "projects"}, - role_name: nil, - role_column: "role", - if_statement: nil - }} = - Parser.parse( - "ELECTRIC ASSIGN ( projects, project_members.role) TO project_members.user_id;", - default_schema: "my_default" - ) + sql = "ELECTRIC ASSIGN (projects, project_members.role) TO project_members.user_id;" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "rzs4jo7bvzfmj2a5pjjblypdy2kw5vzc", + table: Proto.table("my_default", "project_members"), + user_column: "user_id", + scope: Proto.table("my_default", "projects"), + role_name: nil, + role_column: "role", + if: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "project_members"}], + tag: "ELECTRIC ASSIGN" + } - assert {:ok, - %Command.Assign{ - table_name: {"application", "project_members"}, - user_column: "user_id", - scope: {"auth", "projects"}, - role_name: nil, - role_column: "role", - if_statement: nil - }} = - Parser.parse( - "ELECTRIC ASSIGN ( auth.projects, application.project_members.role) TO application.project_members.user_id;", - default_schema: "my_default" - ) + sql = + "ELECTRIC ASSIGN (auth.projects, application.project_members.role) TO application.project_members.user_id;" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "fcld4tiw2qsr4yxlwmytijlj5tl4mklp", + table: Proto.table("application", "project_members"), + user_column: "user_id", + scope: Proto.table("auth", "projects"), + role_name: nil, + role_column: "role", + if: nil + } + ] + }, + stmt: sql, + tables: [{"application", "project_members"}], + tag: "ELECTRIC ASSIGN" + } end test "invalid examples" do stmts = [ - "electric assign 'projects:' to users.user_id", + "electric assign (projects, ) to users.user_id", "electric assign '' to users.user_id", - "electric assign ':' to users.user_id", - "electric assign ':admin' to users.user_id", "electric assign abusers.role to users.user_id" ] @@ -193,13 +282,23 @@ defmodule Electric.DDLX.ParserTest do end test "scope extraction" do - assert {:ok, %{scope: {"my_default", "bslaiqzpkkrql_ugfjog"}}} = + assert {:ok, + %{ + action: %{ + assigns: [%{scope: %{schema: "my_default", name: "bslaiqzpkkrql_ugfjog"}}] + } + }} = Parser.parse( ~s[ELECTRIC ASSIGN (bslaiqzpkkrql_ugfjog, 'mscuqqjmltikiblihlbizrdwfgxxbkzhiqznwnguehipzktiecxbw') TO lfqtmmgnkcawqqtayufujumxmkwsz_nbj_odyzhxjxomc_jicpmi_dzkkgozlednrqsspibjspgyabumzxoxhccnomssuzqf."BKxHbrgtmXdAeebwgDiGuLWt"."wGUBAoaXNAAxYJqtItIHckiflTvyKmCebTUYsYtbFxpekYhCKRyJMfbUaeiRnNHrOfKrrYIkdB"], default_schema: "my_default" ) - assert {:ok, %{scope: {"my_default", "aaa"}}} = + assert {:ok, + %{ + action: %{ + assigns: [%{scope: %{schema: "my_default", name: "aaa"}}] + } + }} = Parser.parse( ~s[ELECTRIC ASSIGN aaa:'gzp' TO "pTw"."cjd".twi], default_schema: "my_default" @@ -214,12 +313,13 @@ defmodule Electric.DDLX.ParserTest do ddlx <- Assign.generator(scope_user_role: scope_user_role) ) do # IO.puts(ddlx) - assert {:ok, assign} = Parser.parse(ddlx, default_schema: "my_default") + assert {:ok, %{action: %SatPerms.DDLX{assigns: [assign]}}} = + Parser.parse(ddlx, default_schema: "my_default") {user_table, user_column} = user_def - assert assign.table_name == normalise(user_table, "my_default") - assert assign.user_column == normalise(user_column) + assert assign.table == pbnormalise(user_table, "my_default") + assert assign.user_column == pbnormalise(user_column) case role_def do {{_, _} = _table, {_, _} = column} -> @@ -229,103 +329,166 @@ defmodule Electric.DDLX.ParserTest do assert assign.role_name == normalise(name) end - assert assign.scope == normalise(scope, "my_default") + assert assign.scope == pbnormalise(scope, "my_default") end end test "parse assign global named role" do sql = "ELECTRIC ASSIGN 'admin' TO admin_users.user_id;" - {:ok, result} = Parser.parse(sql) - assert result == - %Command.Assign{ - table_name: {"public", "admin_users"}, - user_column: "user_id", - scope: nil, - role_name: "admin", - role_column: nil, - if_statement: nil - } + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "2uidsvryaa2k6xjbmq6zlu7nfy2ytg6b", + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + scope: nil, + role_name: "admin", + role_column: nil, + if: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "admin_users"}], + tag: "ELECTRIC ASSIGN" + } sql = "ELECTRIC ASSIGN (NULL, 'admin') TO admin_users.user_id;" - {:ok, result} = Parser.parse(sql) - assert result == %Command.Assign{ - table_name: {"public", "admin_users"}, - user_column: "user_id", - scope: nil, - role_name: "admin", - role_column: nil, - if_statement: nil + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "2uidsvryaa2k6xjbmq6zlu7nfy2ytg6b", + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + scope: nil, + role_name: "admin", + role_column: nil, + if: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "admin_users"}], + tag: "ELECTRIC ASSIGN" } sql = "ELECTRIC ASSIGN (NULL, admin) TO admin_users.user_id;" + {:error, _} = Parser.parse(sql) end test "parse assign global role with column name" do sql = "ELECTRIC ASSIGN user_roles.role_name TO user_roles.user_id;" - {:ok, result} = Parser.parse(sql) - assert result == %Command.Assign{ - table_name: {"public", "user_roles"}, - user_column: "user_id", - scope: nil, - role_name: nil, - role_column: "role_name", - if_statement: nil + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "pxrga7klxw65mybjn6vrta3vs5t2rkhe", + table: Proto.table("my_default", "user_roles"), + user_column: "user_id", + scope: nil, + role_name: nil, + role_column: "role_name", + if: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "user_roles"}], + tag: "ELECTRIC ASSIGN" } sql = "ELECTRIC ASSIGN (NULL, user_roles.role_name) TO user_roles.user_id;" - {:ok, result} = Parser.parse(sql) - assert result == %Command.Assign{ - table_name: {"public", "user_roles"}, - user_column: "user_id", - scope: nil, - role_name: nil, - role_column: "role_name", - if_statement: nil + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "pxrga7klxw65mybjn6vrta3vs5t2rkhe", + table: Proto.table("my_default", "user_roles"), + user_column: "user_id", + scope: nil, + role_name: nil, + role_column: "role_name", + if: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "user_roles"}], + tag: "ELECTRIC ASSIGN" } end test "parse assign scoped role with column name" do sql = "ELECTRIC ASSIGN ( projects, project_members.role ) TO project_members.user_id;" - {:ok, result} = Parser.parse(sql) - assert result == %Command.Assign{ - table_name: {"public", "project_members"}, - user_column: "user_id", - scope: {"public", "projects"}, - role_name: nil, - role_column: "role", - if_statement: nil + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "rzs4jo7bvzfmj2a5pjjblypdy2kw5vzc", + table: Proto.table("my_default", "project_members"), + user_column: "user_id", + scope: Proto.table("my_default", "projects"), + role_name: nil, + role_column: "role", + if: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "project_members"}], + tag: "ELECTRIC ASSIGN" } end test "parse assign scoped role with name" do sql = "ELECTRIC ASSIGN 'deliveries:driver' TO deliveries.driver_id;" - {:ok, result} = Parser.parse(sql) - assert result == %Command.Assign{ - table_name: {"public", "deliveries"}, - user_column: "driver_id", - scope: {"public", "deliveries"}, - role_name: "driver", - role_column: nil, - if_statement: nil + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "uk3vbzo7am3uxtlnjprioxwgv52aeojz", + table: Proto.table("my_default", "deliveries"), + user_column: "driver_id", + scope: Proto.table("my_default", "deliveries"), + role_name: "driver", + role_column: nil, + if: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "deliveries"}], + tag: "ELECTRIC ASSIGN" } sql = "ELECTRIC ASSIGN 'other.deliveries:driver' TO other.deliveries.driver_id;" - {:ok, result} = Parser.parse(sql) - assert result == %Command.Assign{ - table_name: {"other", "deliveries"}, - user_column: "driver_id", - scope: {"other", "deliveries"}, - role_name: "driver", - role_column: nil, - if_statement: nil + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "z5wssirjrctqy3zfbs25yfuuvou77gip", + table: Proto.table("other", "deliveries"), + user_column: "driver_id", + scope: Proto.table("other", "deliveries"), + role_name: "driver", + role_column: nil, + if: nil + } + ] + }, + stmt: sql, + tables: [{"other", "deliveries"}], + tag: "ELECTRIC ASSIGN" } sql = "ELECTRIC ASSIGN deliveries:driver TO deliveries.driver_id;" @@ -337,19 +500,49 @@ defmodule Electric.DDLX.ParserTest do sql = "ELECTRIC ASSIGN 'record.reader' TO user_permissions.user_id IF ( can_read_records() )" - {:ok, result} = Parser.parse(sql) - - assert result == %Command.Assign{ - table_name: {"public", "user_permissions"}, - user_column: "user_id", - scope: nil, - role_name: "record.reader", - role_column: nil, - if_statement: "can_read_records()" + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + assigns: [ + %SatPerms.Assign{ + id: "o7iyzse5guwyxjwr367hpfbmcg2irbyi", + table: Proto.table("my_default", "user_permissions"), + user_column: "user_id", + scope: nil, + role_name: "record.reader", + role_column: nil, + if: "can_read_records()" + } + ] + }, + stmt: sql, + tables: [{"my_default", "user_permissions"}], + tag: "ELECTRIC ASSIGN" } end end + defp pbnormalise(nil, _default_schema) do + nil + end + + defp pbnormalise({{_, _} = schema, {_, _} = table}, _default_schema) do + Proto.table(normalise_case(schema), normalise_case(table)) + end + + defp pbnormalise({_, _} = table, default_schema) do + Proto.table(default_schema, normalise_case(table)) + end + + defp pbnormalise(nil) do + nil + end + + defp pbnormalise({_, _} = column) do + normalise_case(column) + end + + ### + defp normalise(nil, _default_schema) do nil end @@ -380,19 +573,31 @@ defmodule Electric.DDLX.ParserTest do describe "ELECTRIC GRANT" do test "parse grant" do + # using clauses are currently ignored, and not referenced in the docs, but I'm retaining + # support in the parser sql = - "ELECTRIC GRANT UPDATE (status, name) ON thing.\"Köln_en$ts\" TO 'projects:house.admin' USING issue_id;" + "ELECTRIC GRANT UPDATE (status, name) ON thing.\"Köln_en$ts\" TO (projects, 'house.admin') USING issue_id;" {:ok, result} = Parser.parse(sql) - assert result == %Command.Grant{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["status", "name"], - scope: {"public", "projects"}, - using_path: ["issue_id"], - check_fn: nil + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "6qwbckegxcxt2zesymulmwqotberhp4m", + privilege: :UPDATE, + table: Proto.table("thing", "Köln_en$ts"), + role: Proto.role("house.admin"), + columns: %SatPerms.ColumnList{names: ["status", "name"]}, + scope: Proto.table("public", "projects"), + path: ["issue_id"], + check: nil + } + ] + }, + stmt: sql, + tables: [{"thing", "Köln_en$ts"}], + tag: "ELECTRIC GRANT" } end @@ -402,93 +607,145 @@ defmodule Electric.DDLX.ParserTest do {:ok, result} = Parser.parse(sql) - assert result == %Command.Grant{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["status", "name"], - scope: {"public", "projects"}, - using_path: ["issue_id"], - check_fn: nil + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "6qwbckegxcxt2zesymulmwqotberhp4m", + privilege: :UPDATE, + table: Proto.table("thing", "Köln_en$ts"), + role: Proto.role("house.admin"), + columns: %SatPerms.ColumnList{names: ["status", "name"]}, + scope: Proto.table("public", "projects"), + path: ["issue_id"], + check: nil + } + ] + }, + stmt: sql, + tables: [{"thing", "Köln_en$ts"}], + tag: "ELECTRIC GRANT" } end test "parse grant with no columns" do - sql = "ELECTRIC GRANT UPDATE ON thing.\"Köln_en$ts\" TO 'projects:house.admin';" + sql = "ELECTRIC GRANT UPDATE ON thing.\"Köln_en$ts\" TO (projects, 'house.admin');" {:ok, result} = Parser.parse(sql) - assert result == %Command.Grant{ - privileges: ["update"], - on_table: {"thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["*"], - scope: {"public", "projects"}, - using_path: nil, - check_fn: nil + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "6qwbckegxcxt2zesymulmwqotberhp4m", + privilege: :UPDATE, + table: Proto.table("thing", "Köln_en$ts"), + role: Proto.role("house.admin"), + columns: nil, + scope: Proto.table("public", "projects"), + path: nil, + check: nil + } + ] + }, + stmt: sql, + tables: [{"thing", "Köln_en$ts"}], + tag: "ELECTRIC GRANT" } end test "parse grant with check" do sql = - "ELECTRIC GRANT UPDATE ON thing.Köln_en$ts TO 'projects:house.admin' USING project_id CHECK (name = 'Paul');" + "ELECTRIC GRANT UPDATE ON thing.Köln_en$ts TO (projects, 'house.admin') WHERE (name = 'Paul');" {:ok, result} = Parser.parse(sql) - assert result == %Command.Grant{ - check_fn: "name = 'Paul'", - column_names: ["*"], - on_table: {"thing", "köln_en$ts"}, - privileges: ["update"], - role: "house.admin", - scope: {"public", "projects"}, - using_path: ["project_id"] + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "unz3ra6f4w3luf2wdfhjsiryuyp4bdse", + check: "name = 'Paul'", + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :UPDATE, + role: Proto.role("house.admin"), + scope: Proto.table("public", "projects") + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC GRANT" } end - test "parse grant with multiple privileges" do - sql = - "ELECTRIC GRANT INSERT, UPDATE, DELETE ON thing.Köln_en$ts TO 'projects:house.admin' USING project_id CHECK (name = 'Paul');" - - {:ok, result} = Parser.parse(sql) - - assert result == %Command.Grant{ - check_fn: "name = 'Paul'", - column_names: ["*"], - on_table: {"thing", "köln_en$ts"}, - privileges: ["insert", "update", "delete"], - role: "house.admin", - scope: {"public", "projects"}, - using_path: ["project_id"] - } - + test "parse grant with old style string scope definition" do sql = - "ELECTRIC GRANT READ, WRITE ON thing.Köln_en$ts TO 'projects:house.admin' USING project_id CHECK (name = 'Paul');" + "ELECTRIC GRANT UPDATE ON thing.Köln_en$ts TO 'projects:house.admin' WHERE (name = 'Paul');" {:ok, result} = Parser.parse(sql) - assert result == %Command.Grant{ - check_fn: "name = 'Paul'", - column_names: ["*"], - on_table: {"thing", "köln_en$ts"}, - privileges: ["select", "insert", "update", "delete"], - role: "house.admin", - scope: {"public", "projects"}, - using_path: ["project_id"] + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "l5clz3xxefjb7pn2erskct2qvh3jjxzv", + check: "name = 'Paul'", + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :UPDATE, + role: Proto.role("projects:house.admin"), + scope: nil + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC GRANT" } + end + test "parse grant with multiple privileges" do sql = - "ELECTRIC GRANT READ, WRITE, UPDATE ON thing.Köln_en$ts TO 'projects:house.admin' USING project_id CHECK (name = 'Paul');" + "ELECTRIC GRANT WRITE ON thing.Köln_en$ts TO (projects, 'house.admin') WHERE (name = 'Paul');" {:ok, result} = Parser.parse(sql) - assert result == %Command.Grant{ - check_fn: "name = 'Paul'", - column_names: ["*"], - on_table: {"thing", "köln_en$ts"}, - privileges: ["select", "insert", "update", "delete"], - role: "house.admin", - scope: {"public", "projects"}, - using_path: ["project_id"] + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "2ag4ijgsjmrexpfbqzpyljuqnj4x4qry", + check: "name = 'Paul'", + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :INSERT, + role: Proto.role("house.admin"), + scope: Proto.table("public", "projects") + }, + %SatPerms.Grant{ + id: "unz3ra6f4w3luf2wdfhjsiryuyp4bdse", + check: "name = 'Paul'", + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :UPDATE, + role: Proto.role("house.admin"), + scope: Proto.table("public", "projects") + }, + %SatPerms.Grant{ + id: "tr7tdsl5c7uv6pkcth5ybgtz6tddewnd", + check: "name = 'Paul'", + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :DELETE, + role: Proto.role("house.admin"), + scope: Proto.table("public", "projects") + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC GRANT" } end @@ -496,27 +753,162 @@ defmodule Electric.DDLX.ParserTest do sql = "ELECTRIC GRANT ALL ON thing.Köln_en$ts TO 'house.admin';" {:ok, result} = Parser.parse(sql) - assert result == %Command.Grant{ - check_fn: nil, - column_names: ["*"], - on_table: {"thing", "köln_en$ts"}, - privileges: ["select", "insert", "update", "delete"], - role: "house.admin", - scope: "__global__", - using_path: nil + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "q5m3kn7dzjptvnf7a4y456l6n4j3bmy3", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :SELECT, + role: Proto.role("house.admin"), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "qcw7p6e4aj7nfev7vqwrgtuqx3kai3xd", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :INSERT, + role: Proto.role("house.admin"), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "qw366w63mnmifcedq3aqr7wt4gfxhc2v", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :UPDATE, + role: Proto.role("house.admin"), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "qrvaeunuhz3tzvkvxfyidadr6w6a4zis", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :DELETE, + role: Proto.role("house.admin"), + scope: nil, + path: nil + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC GRANT" } sql = "ELECTRIC GRANT ALL PRIVILEGES ON thing.Köln_en$ts TO 'house.admin';" {:ok, result} = Parser.parse(sql) - assert result == %Command.Grant{ - check_fn: nil, - column_names: ["*"], - on_table: {"thing", "köln_en$ts"}, - privileges: ["select", "insert", "update", "delete"], - role: "house.admin", - scope: "__global__", - using_path: nil + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "q5m3kn7dzjptvnf7a4y456l6n4j3bmy3", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :SELECT, + role: Proto.role("house.admin"), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "qcw7p6e4aj7nfev7vqwrgtuqx3kai3xd", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :INSERT, + role: Proto.role("house.admin"), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "qw366w63mnmifcedq3aqr7wt4gfxhc2v", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :UPDATE, + role: Proto.role("house.admin"), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "qrvaeunuhz3tzvkvxfyidadr6w6a4zis", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :DELETE, + role: Proto.role("house.admin"), + scope: nil, + path: nil + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC GRANT" + } + end + + test "parse grant with all and column list" do + sql = "ELECTRIC GRANT ALL (col1, col2) ON thing.Köln_en$ts TO 'house.admin';" + {:ok, result} = Parser.parse(sql) + + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "q5m3kn7dzjptvnf7a4y456l6n4j3bmy3", + check: nil, + columns: %SatPerms.ColumnList{names: ["col1", "col2"]}, + table: Proto.table("thing", "köln_en$ts"), + privilege: :SELECT, + role: Proto.role("house.admin"), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "qcw7p6e4aj7nfev7vqwrgtuqx3kai3xd", + check: nil, + columns: %SatPerms.ColumnList{names: ["col1", "col2"]}, + table: Proto.table("thing", "köln_en$ts"), + privilege: :INSERT, + role: Proto.role("house.admin"), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "qw366w63mnmifcedq3aqr7wt4gfxhc2v", + check: nil, + columns: %SatPerms.ColumnList{names: ["col1", "col2"]}, + table: Proto.table("thing", "köln_en$ts"), + privilege: :UPDATE, + role: Proto.role("house.admin"), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "qrvaeunuhz3tzvkvxfyidadr6w6a4zis", + check: nil, + columns: %SatPerms.ColumnList{names: ["col1", "col2"]}, + table: Proto.table("thing", "köln_en$ts"), + privilege: :DELETE, + role: Proto.role("house.admin"), + scope: nil, + path: nil + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC GRANT" } end @@ -524,92 +916,305 @@ defmodule Electric.DDLX.ParserTest do sql = "ELECTRIC GRANT ALL ON thing.Köln_en$ts TO ANYONE;" {:ok, result} = Parser.parse(sql) - assert result == %Command.Grant{ - check_fn: nil, - column_names: ["*"], - on_table: {"thing", "köln_en$ts"}, - privileges: ["select", "insert", "update", "delete"], - role: "__electric__.__anyone__", - scope: "__global__", - using_path: nil + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "2akoxmzkfwchadl6qcf22f6syd2btygl", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :SELECT, + role: Proto.anyone(), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "jk7n6coz7jejdybyayxtwfni7jet43pv", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :INSERT, + role: Proto.anyone(), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "nv2253mnh3xo6ozaefj4kpfmbb5ervsz", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :UPDATE, + role: Proto.anyone(), + scope: nil, + path: nil + }, + %SatPerms.Grant{ + id: "t3rp5vrt5r3tzzye33pcgwmxyovzgxb7", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :DELETE, + role: Proto.anyone(), + scope: nil, + path: nil + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC GRANT" } end test "parse grant to authenticated" do - sql = "ELECTRIC GRANT ALL ON thing.Köln_en$ts TO AUTHENTICATED;" + sql = "ELECTRIC GRANT READ ON thing.Köln_en$ts TO AUTHENTICATED;" {:ok, result} = Parser.parse(sql) - assert result == %Command.Grant{ - check_fn: nil, - column_names: ["*"], - on_table: {"thing", "köln_en$ts"}, - privileges: ["select", "insert", "update", "delete"], - role: "__electric__.__authenticated__", - scope: "__global__", - using_path: nil + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "dfhpttndlmoswwso2idggsadq4vwuikg", + check: nil, + columns: nil, + table: Proto.table("thing", "köln_en$ts"), + privilege: :SELECT, + role: Proto.authenticated(), + scope: nil, + path: nil + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC GRANT" } end - end - describe "ELECTRIC REVOKE" do - test "parse revoke" do - sql = "ELECTRIC REVOKE UPDATE ON \"Thing\".\"Köln_en$ts\" FROM 'projects:house.admin';" + test "grant with field accesses in check clause" do + sql = + "ELECTRIC GRANT READ ON issues TO (projects, 'editor') WHERE (row.user_id = AUTH.user_id)" + {:ok, result} = Parser.parse(sql) - assert result == %Command.Revoke{ - privileges: ["update"], - on_table: {"Thing", "Köln_en$ts"}, - role: "house.admin", - column_names: ["*"], - scope: {"public", "projects"} + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "baa4uqpavntlksnbmmw7eqp24mela3ed", + check: "ROW.user_id = AUTH.user_id", + table: Proto.table("issues"), + privilege: :SELECT, + role: Proto.role("editor"), + scope: Proto.scope("projects") + } + ] + }, + stmt: sql, + tables: [{"public", "issues"}], + tag: "ELECTRIC GRANT" } end - test "parse revoke all" do - sql = "ELECTRIC REVOKE ALL ON thing.Köln_en$ts FROM 'projects:house.admin';" + test "grant with type casting in check clause" do + sql = + "ELECTRIC GRANT READ ON issues TO (projects, 'editor') WHERE (row.user_id::text = AUTH.user_id)" + {:ok, result} = Parser.parse(sql) - assert result == %Command.Revoke{ - privileges: ["select", "insert", "update", "delete"], - on_table: {"thing", "köln_en$ts"}, - role: "house.admin", - column_names: ["*"], - scope: {"public", "projects"} + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "baa4uqpavntlksnbmmw7eqp24mela3ed", + check: "ROW.user_id::text = AUTH.user_id", + table: Proto.table("issues"), + privilege: :SELECT, + role: Proto.role("editor"), + scope: Proto.scope("projects") + } + ] + }, + stmt: sql, + tables: [{"public", "issues"}], + tag: "ELECTRIC GRANT" + } + end + + test "grant with multiple clauses in check clause" do + sql = + "ELECTRIC GRANT READ ON issues TO (projects, 'editor') WHERE ((row.user_id = AUTH.user_id) AND (thing.reason > 2))" + + {:ok, result} = Parser.parse(sql) + + assert result == %Command{ + action: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "baa4uqpavntlksnbmmw7eqp24mela3ed", + check: "(ROW.user_id = AUTH.user_id) AND (THING.reason > 2)", + table: Proto.table("issues"), + privilege: :SELECT, + role: Proto.role("editor"), + scope: Proto.scope("projects") + } + ] + }, + stmt: sql, + tables: [{"public", "issues"}], + tag: "ELECTRIC GRANT" + } + end + end + + describe "ELECTRIC REVOKE" do + test "parse revoke" do + sql = + "ELECTRIC REVOKE UPDATE ON \"Thing\".\"Köln_en$ts\" FROM (\"Thing\".projects, 'house.admin');" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + revokes: [ + %SatPerms.Revoke{ + id: "dajpwzccceliaxpwwru4rxc4f4qisw6j", + privilege: :UPDATE, + table: Proto.table("Thing", "Köln_en$ts"), + role: Proto.role("house.admin"), + scope: Proto.table("Thing", "projects") + } + ] + }, + stmt: sql, + tables: [{"Thing", "Köln_en$ts"}], + tag: "ELECTRIC REVOKE" + } + end + + test "parse revoke all" do + sql = "ELECTRIC REVOKE ALL ON thing.Köln_en$ts FROM (projects, 'house.admin');" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + revokes: [ + %SatPerms.Revoke{ + id: "7nwvujrdfzxc6i2733x2bl3z72ea7htc", + privilege: :SELECT, + table: Proto.table("thing", "köln_en$ts"), + role: Proto.role("house.admin"), + scope: Proto.table("my_default", "projects") + }, + %SatPerms.Revoke{ + id: "tuanwoqchn5fvfkffu6bvjdhqkogo6nd", + privilege: :INSERT, + table: Proto.table("thing", "köln_en$ts"), + role: Proto.role("house.admin"), + scope: Proto.table("my_default", "projects") + }, + %SatPerms.Revoke{ + id: "5re2yqzv7oogtv7p7pyt7cmfxtnl3bpo", + privilege: :UPDATE, + table: Proto.table("thing", "köln_en$ts"), + role: Proto.role("house.admin"), + scope: Proto.table("my_default", "projects") + }, + %SatPerms.Revoke{ + id: "cpqo4as7pkf4coouxze6xfec2bd65hio", + privilege: :DELETE, + table: Proto.table("thing", "köln_en$ts"), + role: Proto.role("house.admin"), + scope: Proto.table("my_default", "projects") + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC REVOKE" } end test "parse revoke fails with string for table" do - sql = "ELECTRIC REVOKE UPDATE ON 'thing.Köln_en$ts' FROM 'projects:house.admin';" + sql = "ELECTRIC REVOKE UPDATE ON 'thing.Köln_en$ts' FROM (projects, 'house.admin');" {:error, _} = Parser.parse(sql) end test "parse revoke cols" do sql = - "ELECTRIC REVOKE UPDATE (status, name) ON thing.Köln_en$ts FROM 'projects:house.admin';" - - {:ok, result} = Parser.parse(sql) - - assert result == %Command.Revoke{ - privileges: ["update"], - on_table: {"thing", "köln_en$ts"}, - role: "house.admin", - column_names: ["status", "name"], - scope: {"public", "projects"} + "ELECTRIC REVOKE UPDATE (status, name) ON thing.Köln_en$ts FROM (projects, 'house.admin');" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + revokes: [ + %SatPerms.Revoke{ + id: "5re2yqzv7oogtv7p7pyt7cmfxtnl3bpo", + privilege: :UPDATE, + table: Proto.table("thing", "köln_en$ts"), + role: Proto.role("house.admin"), + scope: Proto.table("my_default", "projects") + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC REVOKE" } end test "parse revoke namespaced scope" do sql = - "ELECTRIC REVOKE UPDATE (status, name) ON thing.Köln_en$ts FROM 'thing.projects:house.admin';" - - {:ok, result} = Parser.parse(sql) + "ELECTRIC REVOKE UPDATE (status, name) ON thing.Köln_en$ts FROM (thing.projects, 'house.admin');" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + revokes: [ + %SatPerms.Revoke{ + id: "lwxqdr32qyfd6g7e3jfiioid4pxv7j2i", + privilege: :UPDATE, + table: Proto.table("thing", "köln_en$ts"), + role: Proto.role("house.admin"), + scope: Proto.table("thing", "projects") + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC REVOKE" + } + end - assert result == %Command.Revoke{ - privileges: ["update"], - on_table: {"thing", "köln_en$ts"}, - role: "house.admin", - column_names: ["status", "name"], - scope: {"thing", "projects"} + test "parse revoke multiple permissions" do + sql = + "ELECTRIC REVOKE WRITE (status, name) ON thing.Köln_en$ts FROM (thing.projects, 'house.admin');" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + revokes: [ + %SatPerms.Revoke{ + id: "ew7qw5tu7zaqwuwv72cdppnqbbynzpoj", + privilege: :INSERT, + table: Proto.table("thing", "köln_en$ts"), + role: Proto.role("house.admin"), + scope: Proto.table("thing", "projects") + }, + %SatPerms.Revoke{ + id: "lwxqdr32qyfd6g7e3jfiioid4pxv7j2i", + privilege: :UPDATE, + table: Proto.table("thing", "köln_en$ts"), + role: Proto.role("house.admin"), + scope: Proto.table("thing", "projects") + }, + %SatPerms.Revoke{ + id: "7ocohdpexauh56fkfgsakqeldvposuvw", + privilege: :DELETE, + table: Proto.table("thing", "köln_en$ts"), + role: Proto.role("house.admin"), + scope: Proto.table("thing", "projects") + } + ] + }, + stmt: sql, + tables: [{"thing", "köln_en$ts"}], + tag: "ELECTRIC REVOKE" } end end @@ -617,30 +1222,40 @@ defmodule Electric.DDLX.ParserTest do describe "ELECTRIC DISABLE" do test "parses" do sql = "ALTER TABLE things DISABLE ELECTRIC;" - {:ok, result} = Parser.parse(sql) - assert result == %Command.Disable{ - table_name: {"public", "things"} + assert parse(sql) == %Command{ + action: %Command.Disable{ + table_name: {"my_default", "things"} + }, + stmt: sql, + tables: [{"my_default", "things"}], + tag: "ELECTRIC DISABLE" } end test "parse disable with quoted names" do sql = ~s[ALTER TABLE "Private"."Items" DISABLE ELECTRIC;] - {:ok, result} = Parser.parse(sql) - - assert result == %Command.Disable{ - table_name: {"Private", "Items"} + assert parse(sql) == %Command{ + action: %Command.Disable{ + table_name: {"Private", "Items"} + }, + stmt: sql, + tables: [{"Private", "Items"}], + tag: "ELECTRIC DISABLE" } end test "parse disable with unquoted uppercase names" do sql = ~s[ALTER TABLE Private.Items DISABLE ELECTRIC;] - {:ok, result} = Parser.parse(sql) - - assert result == %Command.Disable{ - table_name: {"private", "items"} + assert parse(sql) == %Command{ + action: %Command.Disable{ + table_name: {"private", "items"} + }, + stmt: sql, + tables: [{"private", "items"}], + tag: "ELECTRIC DISABLE" } end end @@ -648,34 +1263,74 @@ defmodule Electric.DDLX.ParserTest do describe "ELECTRIC {EN,DIS}ABLE" do test "parse electrify" do sql = "ELECTRIC ENABLE things;" - {:ok, result} = Parser.parse(sql, default_schema: "application") - assert result == %Command.Enable{ - table_name: {"application", "things"} + assert parse(sql) == %Command{ + action: %Command.Enable{ + table_name: {"my_default", "things"} + }, + stmt: sql, + tables: [{"my_default", "things"}], + tag: "ELECTRIC ENABLE" } end test "parse unelectrify" do sql = "ELECTRIC DISABLE application.things;" - {:ok, result} = Parser.parse(sql) - assert result == %Command.Disable{ - table_name: {"application", "things"} + assert parse(sql) == %Command{ + action: %Command.Disable{ + table_name: {"application", "things"} + }, + stmt: sql, + tables: [{"application", "things"}], + tag: "ELECTRIC DISABLE" } end end describe "ELECTRIC UNASSIGN" do - test "parse unassign " do + test "parse unassign" do sql = "ELECTRIC UNASSIGN 'record.reader' FROM user_permissions.user_id;" - {:ok, result} = Parser.parse(sql) - assert result == %Command.Unassign{ - table_name: {"public", "user_permissions"}, - user_column: "user_id", - scope: nil, - role_name: "record.reader", - role_column: nil + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + unassigns: [ + %SatPerms.Unassign{ + id: "o7iyzse5guwyxjwr367hpfbmcg2irbyi", + table: Proto.table("my_default", "user_permissions"), + user_column: "user_id", + scope: nil, + role_name: "record.reader", + role_column: nil + } + ] + }, + stmt: sql, + tables: [{"my_default", "user_permissions"}], + tag: "ELECTRIC UNASSIGN" + } + end + + test "parse unassign with scope" do + sql = + "ELECTRIC UNASSIGN (other.projects, other.user_permissions.user_role) FROM other.user_permissions.user_id;" + + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + unassigns: [ + %SatPerms.Unassign{ + id: "nugp4djlkslpzpevh245r2kzurlf3k4p", + table: Proto.table("other", "user_permissions"), + user_column: "user_id", + scope: Proto.table("other", "projects"), + role_name: nil, + role_column: "user_role" + } + ] + }, + stmt: sql, + tables: [{"other", "user_permissions"}], + tag: "ELECTRIC UNASSIGN" } end end @@ -683,19 +1338,35 @@ defmodule Electric.DDLX.ParserTest do describe "ELECTRIC SQLITE" do test "parse sqlite " do sql = "ELECTRIC SQLITE '-- a comment;';" - {:ok, result} = Parser.parse(sql) - assert result == %Command.SQLite{ - sqlite_statement: "-- a comment;" + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + sqlite: [ + %SatPerms.Sqlite{ + stmt: "-- a comment;" + } + ] + }, + stmt: sql, + tables: [], + tag: "ELECTRIC SQLITE" } end test "parse sqlite with $ delim" do sql = "ELECTRIC SQLITE $sqlite$-- comment\nselect 'this';$sqlite$;" - {:ok, result} = Parser.parse(sql) - assert result == %Command.SQLite{ - sqlite_statement: "-- comment\nselect 'this';" + assert parse(sql) == %Command{ + action: %SatPerms.DDLX{ + sqlite: [ + %SatPerms.Sqlite{ + stmt: "-- comment\nselect 'this';" + } + ] + }, + stmt: sql, + tables: [], + tag: "ELECTRIC SQLITE" } end end diff --git a/components/electric/test/electric/postgres/extension/schema_loader/epgsql_test.exs b/components/electric/test/electric/postgres/extension/schema_loader/epgsql_test.exs new file mode 100644 index 0000000000..2e089a00fe --- /dev/null +++ b/components/electric/test/electric/postgres/extension/schema_loader/epgsql_test.exs @@ -0,0 +1,269 @@ +defmodule Electric.Postgres.Extension.SchemaLoader.EpgsqlTest do + use Electric.Extension.Case, async: false + + alias Electric.DDLX.Command + alias Electric.Postgres.Extension + alias Electric.Postgres.Extension.SchemaLoader + alias Electric.Satellite.Permissions.State + alias Electric.Satellite.SatPerms + alias ElectricTest.PermissionsHelpers.Proto + + def epgsql_loader(conn) do + {:ok, loader} = SchemaLoader.connect({SchemaLoader.Epgsql, []}, __connection__: conn) + loader + end + + def epgsql_loader_with_rules(conn) do + loader = epgsql_loader(conn) + + rules = + %SatPerms.Rules{ + id: 2, + parent_id: 1, + grants: [ + Proto.grant( + privilege: :INSERT, + table: Proto.table("issues"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ], + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + scope: Proto.scope("projects"), + user_column: "user_id", + role_column: "project_role" + ), + Proto.assign( + table: Proto.table("site_admins"), + user_column: "user_id", + role_column: "site_role" + ) + ] + } + + assert {:ok, _loader} = SchemaLoader.save_global_permissions(loader, rules) + + {loader, rules} + end + + test_tx "global_permissions/1", fn conn -> + loader = epgsql_loader(conn) + assert {:ok, %SatPerms.Rules{id: 1} = _rules} = SchemaLoader.global_permissions(loader) + end + + test_tx "global_permissions/2", fn conn -> + loader = epgsql_loader(conn) + assert {:ok, %SatPerms.Rules{id: 1} = _rules} = SchemaLoader.global_permissions(loader, 1) + end + + test_tx "save_global_permissions/2", fn conn -> + loader = epgsql_loader(conn) + + rules = + %SatPerms.Rules{ + id: 2, + parent_id: 1, + grants: [ + Proto.grant( + privilege: :INSERT, + table: Proto.table("issues"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ], + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + scope: Proto.scope("projects"), + user_column: "user_id", + role_column: "project_role" + ), + Proto.assign( + table: Proto.table("site_admins"), + user_column: "user_id", + role_column: "site_role" + ) + ] + } + + assert {:ok, _loader} = SchemaLoader.save_global_permissions(loader, rules) + assert {:ok, %SatPerms.Rules{id: 2} = ^rules} = SchemaLoader.global_permissions(loader) + end + + test_tx "user_permissions/2", fn conn -> + {loader, _rules} = epgsql_loader_with_rules(conn) + + assert {:ok, _loader, + %SatPerms{ + id: 1, + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + roles: [], + rules: %SatPerms.Rules{id: 2} + }} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, _loader, + %SatPerms{ + id: 2, + user_id: "11f03d43-09e9-483b-9e8c-1f0e117f20fe", + roles: [], + rules: %SatPerms.Rules{id: 2} + }} = + SchemaLoader.user_permissions(loader, "11f03d43-09e9-483b-9e8c-1f0e117f20fe") + end + + test_tx "user_permissions/3", fn conn -> + {loader, _rules} = epgsql_loader_with_rules(conn) + + assert {:ok, _loader, %SatPerms{id: 1}} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, %SatPerms{id: 1}} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27", 1) + end + + test_tx "save_user_permissions/3", fn conn -> + {loader, _rules} = epgsql_loader_with_rules(conn) + + assert {:ok, _loader, %SatPerms{id: 1, rules: %{id: rules_id}}} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, _loader, %SatPerms{id: 2, roles: [_]}} = + SchemaLoader.save_user_permissions( + loader, + "e815dfe6-f64d-472a-a322-bfc9e7993d27", + %SatPerms.Roles{ + parent_id: 1, + rules_id: rules_id, + roles: [ + %SatPerms.Role{ + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + role: "editor" + } + ] + } + ) + + assert {:ok, _loader, %SatPerms{id: 2, roles: [_]}} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + end + + test_tx "save_global_permissions/2 migrates existing user roles", fn conn -> + {loader, rules} = epgsql_loader_with_rules(conn) + + assert {:ok, _loader, + %SatPerms{ + id: 1, + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + roles: [], + rules: %SatPerms.Rules{id: 2} + }} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, _loader, + %SatPerms{ + id: 2, + user_id: "11f03d43-09e9-483b-9e8c-1f0e117f20fe", + roles: [], + rules: %SatPerms.Rules{id: 2} + }} = + SchemaLoader.user_permissions(loader, "11f03d43-09e9-483b-9e8c-1f0e117f20fe") + + assert {:ok, _loader, %SatPerms{id: 3, roles: [_]}} = + SchemaLoader.save_user_permissions( + loader, + "e815dfe6-f64d-472a-a322-bfc9e7993d27", + %SatPerms.Roles{ + parent_id: 1, + rules_id: 2, + roles: [ + %SatPerms.Role{ + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + role: "editor" + } + ] + } + ) + + ddlx = + Command.ddlx( + grants: [ + Proto.grant( + privilege: :INSERT, + table: Proto.table("comments"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ] + ) + + rules = State.apply_ddlx(rules, ddlx) + + assert {:ok, _loader} = SchemaLoader.save_global_permissions(loader, rules) + + assert {:ok, _loader, + %SatPerms{ + id: 5, + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + rules: ^rules + }} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, _loader, + %SatPerms{ + id: 4, + user_id: "11f03d43-09e9-483b-9e8c-1f0e117f20fe", + rules: ^rules + }} = + SchemaLoader.user_permissions(loader, "11f03d43-09e9-483b-9e8c-1f0e117f20fe") + + ddlx = + Command.ddlx( + grants: [ + Proto.grant( + privilege: :DELETE, + table: Proto.table("comments"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ] + ) + + rules = State.apply_ddlx(rules, ddlx) + + assert {:ok, _loader} = SchemaLoader.save_global_permissions(loader, rules) + + assert {:ok, _loader, + %SatPerms{ + id: 7, + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + rules: ^rules + }} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, _loader, + %SatPerms{ + id: 6, + user_id: "11f03d43-09e9-483b-9e8c-1f0e117f20fe", + rules: ^rules + }} = + SchemaLoader.user_permissions(loader, "11f03d43-09e9-483b-9e8c-1f0e117f20fe") + + {:ok, _, rows} = + :epgsql.equery( + conn, + "select count(id) as n from #{Extension.user_perms_table()} where global_perms_id = $1 group by (user_id)", + [rules.id] + ) + + # two users + assert length(rows) == 2 + + # there should only be one user permissions state for each user for each global rules state + for {n} <- rows do + assert n == 1 + end + end +end diff --git a/components/electric/test/electric/postgres/proxy/injector/electric_test.exs b/components/electric/test/electric/postgres/proxy/injector/electric_test.exs index 03b7dfcebf..c2491022d8 100644 --- a/components/electric/test/electric/postgres/proxy/injector/electric_test.exs +++ b/components/electric/test/electric/postgres/proxy/injector/electric_test.exs @@ -98,7 +98,8 @@ defmodule Electric.Postgres.Proxy.Injector.ElectricTest do proxy_ddlx_grant: false, proxy_ddlx_revoke: false, proxy_ddlx_assign: false, - proxy_ddlx_unassign: false + proxy_ddlx_unassign: false, + proxy_ddlx_sqlite: false ) migrations = [ @@ -140,13 +141,14 @@ defmodule Electric.Postgres.Proxy.Injector.ElectricTest do test "#{scenario.description()} ELECTRIC GRANT", cxt do query = - "ELECTRIC GRANT UPDATE ON public.items TO 'projects:house.admin' USING project_id CHECK (name = Paul);" + "ELECTRIC GRANT UPDATE ON public.items TO (projects, 'house.admin') WHERE (name = Paul);" cxt.scenario.assert_injector_error(cxt.injector, query, code: "EX900") end test "#{scenario.description()} ELECTRIC REVOKE", cxt do - query = ~s[ELECTRIC REVOKE UPDATE (status, name) ON truths FROM 'projects:house.admin';] + query = + ~s[ELECTRIC REVOKE UPDATE (status, name) ON truths FROM (projects, 'house.admin');] cxt.scenario.assert_injector_error(cxt.injector, query, code: "EX900") end diff --git a/components/electric/test/electric/postgres/proxy/injector_test.exs b/components/electric/test/electric/postgres/proxy/injector_test.exs index bc205ab3b2..fb457ae76c 100644 --- a/components/electric/test/electric/postgres/proxy/injector_test.exs +++ b/components/electric/test/electric/postgres/proxy/injector_test.exs @@ -14,7 +14,8 @@ defmodule Electric.Postgres.Proxy.InjectorTest do proxy_ddlx_grant: true, proxy_ddlx_revoke: true, proxy_ddlx_assign: true, - proxy_ddlx_unassign: true + proxy_ddlx_unassign: true, + proxy_ddlx_sqlite: true ) migrations = [ @@ -148,7 +149,7 @@ defmodule Electric.Postgres.Proxy.InjectorTest do passthrough: ~s[CREATE TABLE "socks" ("id" uuid PRIMARY KEY, colour TEXT)], electric: {~s[CALL electric.electrify('socks')], - command: %Electric.DDLX.Command.Enable{table_name: ~s["public"."socks"]}}, + command: Electric.DDLX.Command.electric_enable({"public", "socks"})}, capture: {~s[ALTER TABLE "socks" ADD COLUMN size int2], shadow_add_column: [ diff --git a/components/electric/test/electric/postgres/proxy/query_analyser_test.exs b/components/electric/test/electric/postgres/proxy/query_analyser_test.exs index c619eb663c..b9233fdd99 100644 --- a/components/electric/test/electric/postgres/proxy/query_analyser_test.exs +++ b/components/electric/test/electric/postgres/proxy/query_analyser_test.exs @@ -6,6 +6,7 @@ defmodule Electric.Postgres.Proxy.QueryAnalyserTest do alias Electric.Postgres.MockSchemaLoader alias Electric.Postgres.Extension.SchemaLoader alias Electric.Postgres.SQLGenerator + alias Electric.DDLX.Command alias PgProtocol.Message, as: M def simple(sql), do: %M.Query{query: sql} @@ -30,7 +31,8 @@ defmodule Electric.Postgres.Proxy.QueryAnalyserTest do proxy_ddlx_grant: true, proxy_ddlx_revoke: true, proxy_ddlx_assign: true, - proxy_ddlx_unassign: true + proxy_ddlx_unassign: true, + proxy_ddlx_sqlite: true ) migrations = [ @@ -660,13 +662,13 @@ defmodule Electric.Postgres.Proxy.QueryAnalyserTest do test "ELECTRIC...", cxt do assert [ %QueryAnalysis{ - action: {:electric, %Electric.DDLX.Command.Enable{}}, + action: {:electric, %Command{action: %Command.Enable{}}}, table: {"public", "truths"}, electrified?: true, tx?: true, allowed?: true, capture?: true, - ast: %Electric.DDLX.Command.Enable{}, + ast: %Command{action: %Command.Enable{}}, sql: "ALTER TABLE truths ENABLE ELECTRIC" } ] = @@ -674,13 +676,13 @@ defmodule Electric.Postgres.Proxy.QueryAnalyserTest do assert [ %QueryAnalysis{ - action: {:electric, %Electric.DDLX.Command.Revoke{}}, + action: {:electric, %Command{action: %{revokes: [_]}}}, table: {"public", "truths"}, electrified?: true, tx?: true, allowed?: true, capture?: true, - ast: %Electric.DDLX.Command.Revoke{}, + ast: %Command{}, sql: ~s[ELECTRIC REVOKE UPDATE (status, name) ON truths FROM 'projects:house.admin'] } @@ -717,13 +719,13 @@ defmodule Electric.Postgres.Proxy.QueryAnalyserTest do sql: ^query1 }, %QueryAnalysis{ - action: {:electric, %Electric.DDLX.Command.Enable{}}, + action: {:electric, %Command{action: %Command.Enable{}}}, table: {"public", "pants"}, electrified?: true, tx?: true, allowed?: true, capture?: true, - ast: %Electric.DDLX.Command.Enable{}, + ast: %Command{action: %Command.Enable{}}, sql: ^query2 }, %QueryAnalysis{ @@ -748,25 +750,25 @@ defmodule Electric.Postgres.Proxy.QueryAnalyserTest do assert [ %QueryAnalysis{ - action: {:electric, %Electric.DDLX.Command.Enable{}}, + action: {:electric, %Command{action: %Command.Enable{}}}, table: {"public", "pants"}, type: nil, electrified?: true, tx?: true, allowed?: true, capture?: true, - ast: %Electric.DDLX.Command.Enable{}, + ast: %Command{action: %Command.Enable{}}, sql: ^query1 }, %QueryAnalysis{ - action: {:electric, %Electric.DDLX.Command.Enable{}}, + action: {:electric, %Command{action: %Command.Enable{}}}, table: {"public", "hats"}, type: nil, electrified?: true, tx?: true, allowed?: true, capture?: true, - ast: %Electric.DDLX.Command.Enable{}, + ast: %Command{action: %Command.Enable{}}, sql: ^query2 }, %QueryAnalysis{ @@ -781,13 +783,13 @@ defmodule Electric.Postgres.Proxy.QueryAnalyserTest do sql: ^query3 }, %QueryAnalysis{ - action: {:electric, %Electric.DDLX.Command.Grant{}}, + action: {:electric, %Command{}}, table: {"public", "truths"}, electrified?: true, tx?: true, allowed?: true, capture?: true, - ast: %Electric.DDLX.Command.Grant{}, + ast: %Command{}, sql: ^query4 } ] = analyse(query, cxt) @@ -796,7 +798,7 @@ defmodule Electric.Postgres.Proxy.QueryAnalyserTest do test "electric.electrify(...)", cxt do assert [ %QueryAnalysis{ - action: {:electric, %Electric.DDLX.Command.Enable{}}, + action: {:electric, %Command{action: %Command.Enable{}}}, table: {"public", "pants"}, type: :table, electrified?: true, diff --git a/components/electric/test/electric/replication/eval/parser_test.exs b/components/electric/test/electric/replication/eval/parser_test.exs index a282ecf24d..9472fc48cd 100644 --- a/components/electric/test/electric/replication/eval/parser_test.exs +++ b/components/electric/test/electric/replication/eval/parser_test.exs @@ -287,6 +287,36 @@ defmodule Electric.Replication.Eval.ParserTest do assert %Const{value: true, type: :bool} = result end + test "casting uuid to text" do + assert {:ok, _} = + Parser.parse_and_validate_expression( + ~S|(new.is_valid) AND (new.user_id::text = auth.user_id)|, + %{ + ["new", "user_id"] => :uuid, + ["new", "is_valid"] => :bool, + ["auth", "user_id"] => :text + } + ) + end + + test "float[48] / float8" do + assert {:ok, _} = + Parser.parse_and_validate_expression( + ~S|new.percent / 100.0 > 0.3|, + %{ + ["new", "percent"] => :float8 + } + ) + + assert {:ok, _} = + Parser.parse_and_validate_expression( + ~S|new.percent / 100.0 > 0.3|, + %{ + ["new", "percent"] => :float4 + } + ) + end + test "should work with IN clauses" do env = Env.new() diff --git a/components/electric/test/electric/replication/eval/runner_test.exs b/components/electric/test/electric/replication/eval/runner_test.exs index 8328ba114b..0a599e6b92 100644 --- a/components/electric/test/electric/replication/eval/runner_test.exs +++ b/components/electric/test/electric/replication/eval/runner_test.exs @@ -61,5 +61,12 @@ defmodule Electric.Replication.Eval.RunnerTest do |> Parser.parse_and_validate_expression!(%{["test"] => :int4}) |> Runner.execute(%{["test"] => "test"}) end + + test "should correctly cast uuids" do + assert {:ok, "b06d507c-4e08-4a7f-896a-5c3c6c5dc332"} = + ~S|test::text| + |> Parser.parse_and_validate_expression!(%{["test"] => :uuid}) + |> Runner.execute(%{["test"] => "b06d507c-4e08-4a7f-896a-5c3c6c5dc332"}) + end end end diff --git a/components/electric/test/electric/replication/postgres/migration_consumer_test.exs b/components/electric/test/electric/replication/postgres/migration_consumer_test.exs index f7b09139bf..dad3de4c8a 100644 --- a/components/electric/test/electric/replication/postgres/migration_consumer_test.exs +++ b/components/electric/test/electric/replication/postgres/migration_consumer_test.exs @@ -1,8 +1,8 @@ defmodule Electric.Replication.Postgres.MigrationConsumerTest do use ExUnit.Case, async: true + use Electric.Postgres.MockSchemaLoader - alias Electric.Postgres.MockSchemaLoader - + alias Electric.Replication.Changes alias Electric.Replication.Changes.NewRecord alias Electric.Replication.Changes.Transaction alias Electric.Replication.Postgres.MigrationConsumer @@ -78,7 +78,17 @@ defmodule Electric.Replication.Postgres.MigrationConsumerTest do {"public", "first_enum_table"} => 10001, {"electric", "shadow__public__first_enum_table"} => 20001, {"public", "second_enum_table"} => 10002, - {"electric", "shadow__public__second_enum_table"} => 20002 + {"electric", "shadow__public__second_enum_table"} => 20002, + {"public", "users"} => 30001, + {"electric", "shadow__public__users"} => 30011, + {"public", "projects"} => 30002, + {"electric", "shadow__public__projects"} => 30012, + {"public", "project_memberships"} => 30003, + {"electric", "shadow__public__project_memberships"} => 30013, + {"public", "teams"} => 30004, + {"electric", "shadow__public__teams"} => 30014, + {"public", "team_memberships"} => 30005, + {"electric", "shadow__public__team_memberships"} => 30015 } } @@ -86,8 +96,30 @@ defmodule Electric.Replication.Postgres.MigrationConsumerTest do {"public", "mistakes"} => ["id"] } + migrations = [ + {"20220000", + [ + """ + create table projects (id uuid primary key) + """, + """ + create table users (id uuid primary key) + """, + """ + create table project_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + project_id uuid not null references projects (id), + project_role text not null + ) + """ + ]} + ] + backend = - MockSchemaLoader.start_link([oids: oids, pks: pks], name: __MODULE__.Loader) + MockSchemaLoader.start_link([oids: oids, pks: pks, migrations: migrations], + name: __MODULE__.Loader + ) pid = start_link_supervised!( @@ -105,236 +137,485 @@ defmodule Electric.Replication.Postgres.MigrationConsumerTest do {:ok, origin: origin, producer: producer, version: version, loader: backend} end - test "refreshes subscription after receiving a migration", cxt do - %{producer: producer, origin: origin, version: version} = cxt - assert_receive {MockSchemaLoader, {:connect, _}} - - events = [ - %Transaction{ - changes: [ - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "6", - "query" => "create table something_else (id uuid primary key);", - "txid" => "101", - "txts" => "201" - }, - tags: [] - } - ] - } - ] - - GenStage.call(producer, {:emit, cxt.loader, events, version}) - - assert_receive {MockSchemaLoader, {:refresh_subscription, ^origin}}, 1500 - end + describe "migrations" do + test "refreshes subscription after receiving a migration", cxt do + %{producer: producer, origin: origin, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table something_else (id uuid primary key);", + "txid" => "101", + "txts" => "201" + }, + tags: [] + } + ] + } + ] + + GenStage.call(producer, {:emit, cxt.loader, events, version}) + + assert_receive {MockSchemaLoader, {:refresh_subscription, ^origin}}, 1500 + end - test "captures migration records", cxt do - %{origin: origin, producer: producer, version: version} = cxt - assert_receive {MockSchemaLoader, {:connect, _}} - - events = [ - %Transaction{ - changes: [ - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "6", - "query" => "create table something_else (id uuid primary key);", - "txid" => "100", - "txts" => "200" - }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "7", - "query" => "create table other_thing (id uuid primary key);", - "txid" => "100", - "txts" => "200" + test "captures migration records", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table something_else (id uuid primary key);", + "txid" => "100", + "txts" => "200" + }, + tags: [] }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "8", - "query" => "create table yet_another_thing (id uuid primary key);", - "txid" => "100", - "txts" => "200" + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "7", + "query" => "create table other_thing (id uuid primary key);", + "txid" => "100", + "txts" => "200" + }, + tags: [] }, - tags: [] - } - ], - commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], - origin: origin, - publication: "mock_pub", - origin_type: :postgresql - } - ] - - GenStage.call(producer, {:emit, cxt.loader, events, version}) - - assert_receive {FakeConsumer, :events, ^events}, @receive_timeout - assert_receive {MockSchemaLoader, :load}, @receive_timeout - # only 1 save instruction is observed - assert_receive {MockSchemaLoader, {:save, ^version, schema, [_, _, _]}}, @receive_timeout - refute_receive {MockSchemaLoader, {:save, _, _schema}}, @refute_receive_timeout - - assert Enum.map(schema.tables, & &1.name.name) == [ - "something_else", - "other_thing", - "yet_another_thing", - "shadow__public__something_else", - "shadow__public__other_thing", - "shadow__public__yet_another_thing" - ] - end + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "8", + "query" => "create table yet_another_thing (id uuid primary key);", + "txid" => "100", + "txts" => "200" + }, + tags: [] + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, events, version}) + + assert_receive {FakeConsumer, :events, ^events}, @receive_timeout + assert_receive {MockSchemaLoader, :load}, @receive_timeout + # only 1 save instruction is observed + assert_receive {MockSchemaLoader, {:save, ^version, schema, [_, _, _]}}, @receive_timeout + refute_receive {MockSchemaLoader, {:save, _, _schema}}, @refute_receive_timeout + + assert Enum.map(schema.tables, & &1.name.name) == [ + "projects", + "users", + "project_memberships", + "something_else", + "other_thing", + "yet_another_thing", + "shadow__public__projects", + "shadow__public__users", + "shadow__public__project_memberships", + "shadow__public__something_else", + "shadow__public__other_thing", + "shadow__public__yet_another_thing" + ] + end - test "captures unique enum types from migrations", cxt do - %{origin: origin, producer: producer, version: version} = cxt - assert_receive {MockSchemaLoader, {:connect, _}} - - events = [ - %Transaction{ - changes: [ - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "1", - "query" => "create type colour as enum ('red', 'green', 'blue');", - "txid" => "100", - "txts" => "200" - }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "2", - "query" => """ - create table first_enum_table ( - id uuid primary key, - foo colour - ); - """, - "txid" => "100", - "txts" => "200" + test "captures unique enum types from migrations", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "1", + "query" => "create type colour as enum ('red', 'green', 'blue');", + "txid" => "100", + "txts" => "200" + }, + tags: [] }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "3", - "query" => "create type colour as enum ('red', 'green', 'blue');", - "txid" => "100", - "txts" => "200" + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "2", + "query" => """ + create table first_enum_table ( + id uuid primary key, + foo colour + ); + """, + "txid" => "100", + "txts" => "200" + }, + tags: [] }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "4", - "query" => """ - create table second_enum_table ( - id uuid primary key, - bar colour - ); - """, - "txid" => "100", - "txts" => "200" + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "3", + "query" => "create type colour as enum ('red', 'green', 'blue');", + "txid" => "100", + "txts" => "200" + }, + tags: [] }, - tags: [] - } - ], - commit_timestamp: ~U[2024-02-06 10:08:00.000000Z], - origin: origin, - publication: "mock_pub", - origin_type: :postgresql - } - ] - - GenStage.call(producer, {:emit, cxt.loader, events, version}) - - # only 1 save instruction is observed - assert_receive {MockSchemaLoader, {:save, ^version, schema, [_, _, _, _]}}, @receive_timeout - refute_receive {MockSchemaLoader, {:save, _, _schema}}, @refute_receive_timeout + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "4", + "query" => """ + create table second_enum_table ( + id uuid primary key, + bar colour + ); + """, + "txid" => "100", + "txts" => "200" + }, + tags: [] + } + ], + commit_timestamp: ~U[2024-02-06 10:08:00.000000Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, events, version}) + + # only 1 save instruction is observed + assert_receive {MockSchemaLoader, {:save, ^version, schema, [_, _, _, _]}}, @receive_timeout + refute_receive {MockSchemaLoader, {:save, _, _schema}}, @refute_receive_timeout + + assert [ + %{ + name: %{name: "colour", schema: "public"}, + values: ["red", "green", "blue"] + } + ] = schema.enums + end - assert [ - %{ - name: %{name: "colour", schema: "public"}, - values: ["red", "green", "blue"] - } - ] = schema.enums + test "filters non-migration records", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + raw_events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table something_else (id uuid primary key);", + "txid" => "101", + "txts" => "201" + }, + tags: [] + }, + %NewRecord{ + relation: {"electric", "schema"}, + record: %{ + "id" => "7", + "version" => version, + "schema" => "{}" + }, + tags: [] + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + filtered_events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table something_else (id uuid primary key);", + "txid" => "101", + "txts" => "201" + }, + tags: [] + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) + + assert_receive {FakeConsumer, :events, ^filtered_events}, 1000 + assert_receive {MockSchemaLoader, :load}, 500 + + assert_receive {MockSchemaLoader, + {:save, ^version, _schema, + ["create table something_else (id uuid primary key);"]}} + end end - test "filters non-migration records", cxt do - %{origin: origin, producer: producer, version: version} = cxt - assert_receive {MockSchemaLoader, {:connect, _}} - - raw_events = [ - %Transaction{ - changes: [ - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "6", - "query" => "create table something_else (id uuid primary key);", - "txid" => "101", - "txts" => "201" - }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "schema"}, - record: %{ - "id" => "7", - "version" => version, - "schema" => "{}" - }, - tags: [] - } - ], - commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], - origin: origin, - publication: "mock_pub", - origin_type: :postgresql - } - ] + describe "permissions" do + alias ElectricTest.PermissionsHelpers.Proto + alias ElectricTest.PermissionsHelpers.Chgs + + test "converts ddlx events into global permission change messages", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + raw_events = [ + %Transaction{ + changes: [ + Chgs.ddlx( + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + user_column: "user_id", + role_column: "project_role", + scope: Proto.table("projects") + ) + ] + ) + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) + + assert_receive {FakeConsumer, :events, filtered_events}, 1000 + + assert [ + %Transaction{ + changes: [ + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{permissions_id: 2} + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + publication: "mock_pub", + origin_type: :postgresql + } + ] = filtered_events + end - filtered_events = [ - %Transaction{ - changes: [ - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "6", - "query" => "create table something_else (id uuid primary key);", - "txid" => "101", - "txts" => "201" - }, - tags: [] + test "converts membership changes into user permission change messages", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + raw_events = [ + %Transaction{ + changes: [ + Chgs.ddlx( + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + user_column: "user_id", + role_column: "project_role", + scope: Proto.table("projects") + ) + ] + ) + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) + assert_receive {MockSchemaLoader, {:save_global_permissions, _}}, 500 + + assert_receive {FakeConsumer, :events, _filtered_events}, 1000 + + insert = + Chgs.insert( + {"public", "project_memberships"}, + %{ + "id" => "pm-1", + "user_id" => "user-1", + "project_id" => "p-1", + "project_role" => "admin" } - ], - commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], - origin: origin, - publication: "mock_pub", - origin_type: :postgresql - } - ] - - GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) - - assert_receive {FakeConsumer, :events, ^filtered_events}, 1000 - assert_receive {MockSchemaLoader, :load}, 500 + ) + + raw_events = [ + %Transaction{ + changes: [insert], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) + + assert_receive {FakeConsumer, :events, filtered_events}, 1000 + + assert [ + %Transaction{ + changes: [ + ^insert, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: "user-1", + permissions: _user_perms + } + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + publication: "mock_pub", + origin_type: :postgresql + } + ] = filtered_events + + assert_receive {MockSchemaLoader, {:save_user_permissions, "user-1", _}}, 500 + end - assert_receive {MockSchemaLoader, - {:save, ^version, _schema, - ["create table something_else (id uuid primary key);"]}} + test "uses updated schema information", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + insert = + Chgs.insert( + {"public", "team_memberships"}, + %{ + "id" => "tm-1", + "user_id" => "user-1", + "team_id" => "t-1", + "team_role" => "manager" + } + ) + + raw_events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table teams (id uuid primary key);", + "txid" => "101", + "txts" => "201" + }, + tags: [] + }, + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "7", + "query" => """ + create table team_memberships ( + id uuid primary key, + team_id uuid references teams (id), + user_id uuid references users (id), + team_role text not null + ); + """, + "txid" => "101", + "txts" => "201" + }, + tags: [] + }, + Chgs.ddlx( + assigns: [ + Proto.assign( + table: Proto.table("team_memberships"), + user_column: "user_id", + role_column: "team_role", + scope: Proto.table("teams") + ) + ] + ), + insert + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) + + assert_receive {FakeConsumer, :events, filtered_events}, 1000 + + assert [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table teams (id uuid primary key);", + "txid" => "101", + "txts" => "201" + } + }, + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "7", + "query" => """ + create table team_memberships ( + id uuid primary key, + team_id uuid references teams (id), + user_id uuid references users (id), + team_role text not null + ); + """, + "txid" => "101", + "txts" => "201" + } + }, + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{permissions_id: 2} + }, + ^insert, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: "user-1", + permissions: _user_perms + } + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + publication: "mock_pub", + origin_type: :postgresql + } + ] = filtered_events + end end end diff --git a/components/electric/test/electric/satellite/permissions/helper_test.exs b/components/electric/test/electric/satellite/permissions/helper_test.exs index f3ab0b6864..6e294e3d20 100644 --- a/components/electric/test/electric/satellite/permissions/helper_test.exs +++ b/components/electric/test/electric/satellite/permissions/helper_test.exs @@ -3,6 +3,7 @@ defmodule Electric.Satellite.Permissions.HelperTest do alias ElectricTest.PermissionsHelpers.{ Chgs, + Schema, Tree } @@ -16,11 +17,12 @@ defmodule Electric.Satellite.Permissions.HelperTest do @issues {"public", "issues"} @comments {"public", "comments"} @reactions {"public", "reactions"} - @project_memberships {"public", "project_memberships"} @tags {"public", "tags"} @issue_tags {"public", "issue_tags"} setup do + {:ok, schema_version} = Schema.load() + tree = Tree.new( [ @@ -55,17 +57,7 @@ defmodule Electric.Satellite.Permissions.HelperTest do {@tags, "t1", [{@issue_tags, "it1", []}, {@issue_tags, "it2", []}]}, {@tags, "t2", []} ], - [ - {@comments, @issues, ["issue_id"]}, - {@issues, @projects, ["project_id"]}, - {@offices, @regions, ["region_id"]}, - {@project_memberships, @projects, ["project_id"]}, - {@projects, @workspaces, ["workspace_id"]}, - {@reactions, @comments, ["comment_id"]}, - # tasty join table - {@issue_tags, @tags, ["tag_id"]}, - {@issue_tags, @issues, ["issue_id"]} - ] + schema_version ) {:ok, tree: tree} diff --git a/components/electric/test/electric/satellite/permissions/join_table_test.exs b/components/electric/test/electric/satellite/permissions/join_table_test.exs index 47644b8ea6..a8ed2214d7 100644 --- a/components/electric/test/electric/satellite/permissions/join_table_test.exs +++ b/components/electric/test/electric/satellite/permissions/join_table_test.exs @@ -5,9 +5,12 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do Chgs, Perms, Roles, + Schema, Tree } + alias Electric.Postgres.Extension.SchemaLoader + alias Electric.Postgres.MockSchemaLoader alias Electric.Satellite.Permissions alias Electric.Satellite.Permissions.Graph @@ -16,7 +19,6 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do @addresses {"public", "addresses"} @customers {"public", "customers"} @dishes {"public", "dishes"} - @order_dishes {"public", "order_dishes"} @order_riders {"public", "order_riders"} @orders {"public", "orders"} @restaurants {"public", "restaurants"} @@ -43,24 +45,38 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do describe "simple join table" do setup do - tree = - Tree.new( - [ - {@restaurants, "rt1", []}, - {@orders, "or1", []}, - {@orders, "or2", []}, - {@riders, "rd1", []}, - {@riders, "rd2", []} - ], - [ - {@orders, @restaurants, ["restaurant_id"]}, - {@order_riders, @orders, ["order_id"]}, - {@order_riders, @riders, ["rider_id"]} - ] - ) + migrations = [ + {"01", + [ + "create table restaurants (id uuid primary key)", + "create table orders (id uuid primary key, restaurant_id uuid not null references restaurants (id))", + "create table riders (id uuid primary key)", + """ + create table order_riders ( + id uuid primary key, + order_id uuid not null references orders (id), + rider_id uuid not null references riders (id) + ) + """ + ]} + ] + + data = [ + {@restaurants, "rt1", []}, + {@orders, "or1", []}, + {@orders, "or2", []}, + {@riders, "rd1", []}, + {@riders, "rd2", []} + ] + + {:ok, loader} = Schema.loader(migrations) + {:ok, schema_version} = SchemaLoader.load(loader) + + tree = Tree.new(data, schema_version) tree = add_order(tree, "rt1", "or1") - {:ok, tree: tree} + + {:ok, tree: tree, data: data, loader: loader, schema_version: schema_version} end test "scope_id resolves across join tables", cxt do @@ -96,6 +112,50 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do describe "more complex schema" do setup do + loader_spec = + MockSchemaLoader.backend_spec( + migrations: [ + {"01", + [ + "create table restaurants (id uuid primary key)", + "create table customers (id uuid primary key)", + "create table riders (id uuid primary key)", + "create table addresses (id uuid primary key, customer_id uuid references customers (id))", + """ + create table orders ( + id uuid primary key, + restaurant_id uuid not null references restaurants (id), + customer_id uuid not null references customers (id), + address_id uuid not null references addresses (id) + ) + """, + """ + create table dishes ( + id uuid primary key, + restaurant_id uuid not null references restaurants (id) + ) + """, + """ + create table order_riders ( + id uuid primary key, + order_id uuid not null references orders (id), + rider_id uuid not null references riders (id) + ) + """, + """ + create table order_dishes ( + id uuid primary key, + order_id uuid not null references orders (id), + dish_id uuid not null references dishes (id) + ) + """ + ]} + ] + ) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + {:ok, schema_version} = SchemaLoader.load(loader) + tree = Tree.new( [ @@ -146,22 +206,12 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do {@riders, "d2", []}, {@riders, "d3", []} ], - [ - {@addresses, @customers, ["customer_id"]}, - {@dishes, @restaurants, ["restaurant_id"]}, - {@order_dishes, @dishes, ["dish_id"]}, - {@order_dishes, @orders, ["order_id"]}, - {@order_riders, @orders, ["order_id"]}, - {@order_riders, @riders, ["rider_id"]}, - {@orders, @addresses, ["address_id"]}, - {@orders, @customers, ["customer_id"]}, - {@orders, @restaurants, ["restaurant_id"]} - ] + schema_version ) {:ok, _} = start_supervised(Perms.Transient) - {:ok, tree: tree} + {:ok, tree: tree, loader: loader, schema_version: schema_version} end test "scope_id/3", cxt do @@ -258,12 +308,14 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do perms = perms_build( + cxt, [ ~s[GRANT READ ON #{table(@orders)} TO (#{table(@orders)}, 'rider')], - ~s[GRANT READ ON #{table(@addresses)} TO (#{table(@orders)}, 'rider')] + ~s[GRANT READ ON #{table(@addresses)} TO (#{table(@orders)}, 'rider')], + ~s[ASSIGN (#{table(@orders)}, 'rider') TO #{table(@order_riders)}.user_id] ], [ - Roles.role("rider", @orders, "c2-r2-o2") + Roles.role("rider", @orders, "c2-r2-o2", "assign-1") ] ) diff --git a/components/electric/test/electric/satellite/permissions/state_test.exs b/components/electric/test/electric/satellite/permissions/state_test.exs new file mode 100644 index 0000000000..ddabd4f084 --- /dev/null +++ b/components/electric/test/electric/satellite/permissions/state_test.exs @@ -0,0 +1,1476 @@ +defmodule Electric.Satellite.Permissions.StateTest do + use ExUnit.Case, async: true + use Electric.Postgres.MockSchemaLoader + + alias Electric.DDLX + alias Electric.DDLX.Command + alias Electric.Replication.Changes + alias Electric.Satellite.Permissions.State + alias Electric.Satellite.SatPerms + alias ElectricTest.PermissionsHelpers.{Chgs, Proto} + + def apply_ddlx(rules \\ %SatPerms.Rules{}, cmds) do + State.apply_ddlx(rules, Command.ddlx(cmds)) + end + + def new(cmds) do + apply_ddlx(cmds) + end + + def parse_ddlx(ddlx) do + ddlx + |> Enum.map(&DDLX.Parser.parse/1) + |> Enum.map(&elem(&1, 1)) + |> Enum.map(fn %{action: %SatPerms.DDLX{} = action} -> action end) + end + + @scoped_assign_relation {"public", "project_memberships"} + @unscoped_assign_relation {"public", "site_admins"} + + describe "apply_ddlx/2" do + test "ASSIGN" do + assign = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + + assert %SatPerms.Rules{id: 1, parent_id: 0} = rules = apply_ddlx(assigns: [assign]) + + assert [^assign] = rules.assigns + end + + test "ASSIGN, UNASSIGN" do + rules = + new( + assigns: [ + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + ] + ) + + updated = + apply_ddlx( + rules, + unassigns: [ + Proto.unassign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + ] + ) + + assert updated.id == 2 + assert updated.assigns == [] + end + + test "ASSIGN ... IF, UNASSIGN" do + rules = + new( + assigns: [ + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin", + if: "something()" + ) + ] + ) + + updated = + apply_ddlx( + rules, + unassigns: [ + Proto.unassign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + ] + ) + + assert updated.id == 2 + assert updated.assigns == [] + end + + test "ASSIGN, ASSIGN, UNASSIGN" do + assign1 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin", + scope: Proto.scope("projects") + ) + + assign2 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_column: "role_name" + ) + + rules = new(assigns: [assign1, assign2]) + + rules = + apply_ddlx(rules, + unassigns: [ + Proto.unassign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin", + scope: Proto.scope("projects") + ) + ] + ) + + assert rules.id == 2 + assert [^assign2] = rules.assigns + end + + test "ASSIGN, re-ASSIGN" do + assign1 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin", + scope: Proto.scope("projects") + ) + + assign2 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin", + scope: Proto.scope("projects"), + if: "some_test()" + ) + + rules = new(assigns: [assign1]) + + rules = apply_ddlx(rules, assigns: [assign2]) + + assert rules.id == 2 + assert [^assign2] = rules.assigns + end + + test "GRANT" do + grant = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + + rules = apply_ddlx(grants: [grant]) + + assert rules.id == 1 + assert [^grant] = rules.grants + end + + test "GRANT, REVOKE" do + grant = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + + rules = new(grants: [grant]) + + updated = + apply_ddlx( + rules, + revokes: [ + Proto.revoke( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + ] + ) + + assert updated.grants == [] + end + + test "GRANT ... CHECK, REVOKE" do + grant = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects"), + check: "something()" + ) + + rules = new(grants: [grant]) + + updated = + apply_ddlx( + rules, + revokes: [ + Proto.revoke( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + ] + ) + + assert updated.grants == [] + end + + test "GRANT, GRANT, REVOKE" do + grant1 = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + + grant2 = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :UPDATE, + scope: Proto.scope("projects") + ) + + rules = new(grants: [grant1, grant2]) + + updated = + apply_ddlx( + rules, + revokes: [ + Proto.revoke( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + ] + ) + + assert updated.grants == [grant2] + end + + test "GRANT, re-GRANT" do + grant1 = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + + grant2 = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects"), + check: "some_check()" + ) + + rules = new(grants: [grant1]) + + updated = apply_ddlx(rules, grants: [grant2]) + + assert updated.grants == [grant2] + end + + test "update with DDLX" do + ddlx = [ + ~S[ELECTRIC ASSIGN (projects, members.role_name) TO members.user_id], + ~S[ELECTRIC ASSIGN (projects, members.role_name) TO members.user_id IF (some_check_passes())], + ~S[ELECTRIC GRANT ALL ON issues TO (projects, 'editor')], + ~S[ELECTRIC GRANT READ ON issues TO (projects, 'editor') WHERE ((ROW.user_id = AUTH.user_id) AND (ROW.value > 3))], + ~S[ELECTRIC REVOKE DELETE ON issues FROM (projects, 'editor')] + ] + + rules = + ddlx + |> parse_ddlx() + |> Enum.reduce(%SatPerms.Rules{}, &State.apply_ddlx(&2, &1)) + + assert rules == %SatPerms.Rules{ + id: 5, + parent_id: 4, + assigns: [ + Proto.assign( + scope: Proto.scope("projects"), + table: Proto.table("members"), + user_column: "user_id", + role_column: "role_name", + if: "some_check_passes()" + ) + ], + grants: [ + Proto.grant( + privilege: :UPDATE, + scope: Proto.scope("projects"), + table: Proto.table("issues"), + role: Proto.role("editor") + ), + Proto.grant( + privilege: :SELECT, + scope: Proto.scope("projects"), + table: Proto.table("issues"), + role: Proto.role("editor"), + check: "(ROW.user_id = AUTH.user_id) AND (ROW.value > 3)" + ), + Proto.grant( + privilege: :INSERT, + scope: Proto.scope("projects"), + table: Proto.table("issues"), + role: Proto.role("editor") + ) + ] + } + + ddlx = [ + ~S[ELECTRIC UNASSIGN (projects, members.role_name) FROM members.user_id], + ~S[ELECTRIC REVOKE UPDATE ON issues FROM (projects, 'editor')], + ~S[ELECTRIC REVOKE READ ON issues FROM (projects, 'editor')], + ~S[ELECTRIC REVOKE INSERT ON issues FROM (projects, 'editor')] + ] + + rules = + ddlx + |> parse_ddlx() + |> Enum.reduce(rules, &State.apply_ddlx(&2, &1)) + + assert rules == %SatPerms.Rules{ + id: 9, + parent_id: 8, + assigns: [], + grants: [] + } + end + end + + def loader_with_global_perms(cxt, ddlx \\ default_ddlx()) do + loader = loader(cxt) + + ddlx = Command.ddlx(ddlx) + + assert {:ok, _, loader, rules} = State.update_global(ddlx, loader) + + {loader, rules} + end + + defp default_ddlx do + [ + grants: [ + Proto.grant( + privilege: :INSERT, + table: Proto.table("issues"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ], + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + scope: Proto.scope("projects"), + user_column: "user_id", + role_column: "project_role" + ), + Proto.assign( + table: Proto.table("site_admins"), + user_column: "user_id", + role_column: "site_role" + ) + ] + ] + end + + def loader(_cxt) do + loader_spec = + MockSchemaLoader.backend_spec( + migrations: [ + {"01", + [ + """ + create table projects (id uuid primary key) + """, + """ + create table users (id uuid primary key) + """, + """ + create table teams (id uuid primary key) + """, + """ + create table project_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + project_id uuid not null references projects (id), + project_role text not null, + is_enabled bool + ) + """, + """ + create table team_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + team_id uuid not null references teams (id), + team_role text not null + ) + """, + """ + create table site_admins ( + id uuid primary key, + user_id uuid not null references users (id), + site_role text not null, + is_superuser bool default false + ) + """, + """ + create table my_default.admin_users ( + id uuid primary key, + user_id uuid not null references users (id) + ) + """ + ]} + ] + ) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + loader + end + + describe "global rules serialisation" do + test "is initialised with empty state", cxt do + loader = loader(cxt) + + assert {:ok, %SatPerms.Rules{id: 1, assigns: [], grants: []}} = + SchemaLoader.global_permissions(loader) + end + + test "can update its state", cxt do + loader = loader(cxt) + assert {:ok, consumer} = State.new(loader) + + assign1 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + + ddlx = Command.ddlx(assigns: [assign1]) + + tx = + Chgs.tx([ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.ddlx(ddlx) + ]) + + assert {:ok, tx, consumer, loader} = State.update(tx, consumer, loader) + + assert tx.changes == [ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{ + permissions_id: 2 + } + } + ] + + assert {:ok, rules} = SchemaLoader.global_permissions(loader) + assert %SatPerms.Rules{id: 2, parent_id: 1, assigns: [^assign1]} = rules + + assign2 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin2" + ) + + ddlx = Command.ddlx(assigns: [assign2]) + + tx = + Chgs.tx([ + Chgs.ddlx(ddlx), + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}) + ]) + + assert {:ok, tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert tx.changes == [ + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{ + permissions_id: 3 + } + }, + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}) + ] + + assert {:ok, rules} = SchemaLoader.global_permissions(loader) + assert %SatPerms.Rules{id: 3, parent_id: 2, assigns: [^assign1, ^assign2]} = rules + end + + test "sequential updates are coalesced", cxt do + # we want to minimize permissions churn when possible + loader = loader(cxt) + assert {:ok, consumer} = State.new(loader) + + assign1 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + + ddlx1 = Command.ddlx(assigns: [assign1]) + + assign2 = + Proto.assign( + table: Proto.table("project_memberships"), + user_column: "user_id", + scope: Proto.scope("projects"), + role_column: "role" + ) + + ddlx2 = Command.ddlx(assigns: [assign2]) + + assign3 = + Proto.assign( + table: Proto.table("team_memberships"), + user_column: "user_id", + scope: Proto.scope("teams"), + role_column: "role" + ) + + ddlx3 = Command.ddlx(assigns: [assign3]) + + tx = + Chgs.tx([ + Chgs.ddlx(ddlx1), + Chgs.ddlx(ddlx2), + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.insert({"public", "kittens"}, %{"fur" => "furry"}), + Chgs.ddlx(ddlx3) + ]) + + assert {:ok, tx, _consumer, _loader} = State.update(tx, consumer, loader) + + assert tx.changes == [ + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{ + permissions_id: 2 + } + }, + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.insert({"public", "kittens"}, %{"fur" => "furry"}), + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{ + permissions_id: 3 + } + } + ] + end + end + + @user_id "7a81b0d0-97bf-466d-9053-4612146c2b67" + + describe "user roles state" do + test "starts with empty state", cxt do + {loader, rules} = loader_with_global_perms(cxt) + + assert {:ok, _loader, + %SatPerms{ + id: 1, + user_id: @user_id, + rules: ^rules, + roles: [] + } = perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert {:ok, _loader, ^perms} = + SchemaLoader.user_permissions(loader, @user_id) + end + + test "can load a specific version", cxt do + {loader, _rules} = loader_with_global_perms(cxt) + + assert {:ok, loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert {:ok, ^perms} = + SchemaLoader.user_permissions(loader, @user_id, perms.id) + + assert {:ok, _loader, other_perms} = + SchemaLoader.user_permissions(loader, "7c9fe38c-895b-48f5-9b31-bb6ca992bf2b") + + refute other_perms.id == perms.id + + # attempting to load another user's perms by id + assert {:error, _} = + SchemaLoader.user_permissions(loader, @user_id, other_perms.id) + end + + test "scoped user roles are added via an insert to roles table", cxt do + {loader, rules} = loader_with_global_perms(cxt) + {:ok, consumer} = State.new(loader) + + %{assigns: [%{id: assign_id1}, %{id: assign_id2}]} = rules + + # table: Proto.table("project_memberships"), + # scope: Proto.scope("projects"), + # user_column: "user_id", + # role_name: "editor" + tx = + Chgs.tx([ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, tx, consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 2, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ), + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id2, + role: "editor", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["123"]} + } + ] + + tx = + Chgs.tx([ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + } + ) + ]) + + assert {:ok, tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + } + ), + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["5c0fd272-3fc2-4ae8-8574-92823c814096"], + assign_id: assign_id1, + role: "site_admin", + user_id: @user_id, + scope: nil + }, + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id2, + role: "editor", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["123"]} + } + ] + end + + test "new assign rules are used on changes in tx", cxt do + {loader, _rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = State.new(loader) + + assign = + Proto.assign( + table: Proto.table("team_memberships"), + scope: Proto.scope("teams"), + user_column: "user_id", + role_column: "team_role" + ) + + ddlx = Command.ddlx(assigns: [assign]) + + tx = + Chgs.tx([ + Chgs.ddlx(ddlx), + Chgs.insert( + {"public", "team_memberships"}, + %{ + "id" => "b72c24b5-20b5-4eea-ab12-ec38d6adcab7", + "team_role" => "team_owner", + "user_id" => @user_id, + "team_id" => "7dde618b-0cb2-44b5-8b12-b98c59338116" + } + ) + ]) + + assert {:ok, _tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert Enum.filter(perms.roles, &(&1.assign_id == assign.id)) == [ + %SatPerms.Role{ + row_id: ["b72c24b5-20b5-4eea-ab12-ec38d6adcab7"], + assign_id: assign.id, + role: "team_owner", + user_id: @user_id, + scope: %SatPerms.Scope{ + table: Proto.table("teams"), + id: ["7dde618b-0cb2-44b5-8b12-b98c59338116"] + } + } + ] + end + + test "user roles are updated via an update to roles table", cxt do + {loader, rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = State.new(loader) + + %{assigns: [_, %{id: assign_id}]} = rules + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = State.update(tx, consumer, loader) + + tx = + Chgs.tx([ + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{ + "project_role" => "manager" + } + ) + ]) + + assert {:ok, tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{ + "project_role" => "manager" + } + ), + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id, + role: "manager", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["123"]} + } + ] + end + + test "changes in role ownership are managed", cxt do + {loader, rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = State.new(loader) + + %{assigns: [_, %{id: assign_id}]} = rules + + user_id2 = "0c7afad3-213a-4158-9e89-312fc5e682e1" + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = State.update(tx, consumer, loader) + + tx = + Chgs.tx([ + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{ + "user_id" => user_id2 + } + ) + ]) + + assert {:ok, tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert {:ok, _loader, perms2} = + SchemaLoader.user_permissions(loader, user_id2) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + assert %{id: 5, user_id: user_id2, rules: %{id: 2}} = perms2 + + assert tx.changes == [ + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{ + "user_id" => user_id2 + } + ), + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + }, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: user_id2, + permissions: perms2 + } + } + ] + + assert perms.roles == [] + + assert perms2.roles == [ + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id, + role: "editor", + user_id: user_id2, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["123"]} + } + ] + end + + test "changes in role scope are managed", cxt do + {loader, rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = State.new(loader) + + %{assigns: [_, %{id: assign_id}]} = rules + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = State.update(tx, consumer, loader) + + update = + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{ + "project_id" => "234" + } + ) + + tx = Chgs.tx([update]) + + assert {:ok, tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + update, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id, + role: "editor", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["234"]} + } + ] + end + + test "user roles are deleted with deletes to roles table", cxt do + {loader, rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = State.new(loader) + + %{assigns: [_, %{id: assign_id}]} = rules + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ), + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "5e41153f-eb42-4b97-8f42-85ca8f40fa1d", + "project_role" => "viewer", + "user_id" => @user_id, + "project_id" => "234" + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = State.update(tx, consumer, loader) + + tx = + Chgs.tx([ + Chgs.delete( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 4, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + Chgs.delete( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ), + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["5e41153f-eb42-4b97-8f42-85ca8f40fa1d"], + assign_id: assign_id, + role: "viewer", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["234"]} + } + ] + end + + test "scoped roles are deleted when columns are nulled", cxt do + {loader, _rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = State.new(loader) + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = State.update(tx, consumer, loader) + + for column <- ~w(user_id project_id project_role) do + update = + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{column => nil} + ) + + tx = Chgs.tx([update]) + + assert {:ok, tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + update, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [] + end + end + + test "unscoped roles are deleted when columns are nulled", cxt do + {loader, _rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = State.new(loader) + + tx = + Chgs.tx([ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = State.update(tx, consumer, loader) + + for column <- ~w(user_id site_role) do + update = + Chgs.update( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + }, + %{column => nil} + ) + + tx = Chgs.tx([update]) + + assert {:ok, tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + update, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [] + end + end + + test "updates with no changes do nothing", cxt do + {loader, _rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = State.new(loader) + + tx = + Chgs.tx([ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = State.update(tx, consumer, loader) + + update = + Chgs.update( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + }, + %{} + ) + + tx = Chgs.tx([update]) + + assert {:ok, tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 2, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [update] + end + + test "roles belonging to removed assigns are GC'd", cxt do + {loader, rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = State.new(loader) + %{assigns: [%{id: _assign_id1}, %{id: assign_id2}]} = rules + + tx = + Chgs.tx([ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = State.update(tx, consumer, loader) + + ddlx = + Command.ddlx( + unassigns: [ + Proto.unassign( + table: Proto.table("site_admins"), + user_column: "user_id", + role_column: "site_role" + ) + ] + ) + + tx = Chgs.tx([Chgs.ddlx(ddlx)]) + + assert {:ok, _tx, consumer, loader} = State.update(tx, consumer, loader) + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, _tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 4, user_id: @user_id, rules: %{id: 3}} = perms + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id2, + role: "editor", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["123"]} + } + ] + end + + test "assign if clauses are honoured", cxt do + ddlx = [ + grants: [ + Proto.grant( + privilege: :INSERT, + table: Proto.table("issues"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ], + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + scope: Proto.scope("projects"), + user_column: "user_id", + role_column: "project_role", + if: "is_enabled" + ), + Proto.assign( + table: Proto.table("site_admins"), + user_column: "user_id", + role_column: "site_role", + if: "NOT is_superuser" + ), + Proto.assign( + table: Proto.table("site_admins"), + user_column: "user_id", + role_name: "superuser", + if: "is_superuser = true" + ) + ] + ] + + {loader, rules} = loader_with_global_perms(cxt, ddlx) + {:ok, consumer} = State.new(loader) + + %{assigns: [%{id: assign_id2}, %{id: _assign_id1}, %{id: assign_id3}]} = rules + + tx = + Chgs.tx([ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123", + "is_enabled" => false + } + ) + ]) + + assert {:ok, tx, consumer, loader} = State.update(tx, consumer, loader) + + assert tx.changes == [ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123", + "is_enabled" => false + } + ) + ] + + [insert1, insert2] = + changes = [ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id, + "is_superuser" => true + } + ), + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id, + "is_superuser" => false + } + ) + ] + + tx = Chgs.tx(changes) + + assert {:ok, tx, _consumer, loader} = State.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert [ + ^insert1, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: _perms + } + }, + ^insert2, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] = tx.changes + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["5c0fd272-3fc2-4ae8-8574-92823c814096"], + assign_id: assign_id2, + role: "site_admin", + user_id: @user_id, + scope: nil + }, + %SatPerms.Role{ + row_id: ["5c0fd272-3fc2-4ae8-8574-92823c814096"], + assign_id: assign_id3, + role: "superuser", + user_id: @user_id, + scope: nil + } + ] + end + end + + test "sqlite ddlx messages are a no-op", cxt do + loader = loader(cxt) + assert {:ok, consumer} = State.new(loader) + + ddlx = Command.ddlx(sqlite: [Proto.sqlite("create table local (id primary key)")]) + + tx = + Chgs.tx([ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.ddlx(ddlx) + ]) + + assert {:ok, tx, _consumer, _loader} = State.update(tx, consumer, loader) + + assert tx.changes == [ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}) + ] + end +end diff --git a/components/electric/test/electric/satellite/permissions/transient_test.exs b/components/electric/test/electric/satellite/permissions/transient_test.exs index 11379d160d..3bf1240472 100644 --- a/components/electric/test/electric/satellite/permissions/transient_test.exs +++ b/components/electric/test/electric/satellite/permissions/transient_test.exs @@ -116,12 +116,12 @@ defmodule Electric.Satellite.Permissions.TransientTest do role_grants = [ - Roles.role("editor", @projects, "p1", assign_id: "assign-01"), - Roles.role("editor", @projects, "p2", assign_id: "assign-01"), - Roles.role("editor", @projects, "p3", assign_id: "assign-01"), - Roles.role("reader", @projects, "p1", assign_id: "assign-02"), - Roles.role("reader", @projects, "p2", assign_id: "assign-02"), - Roles.role("reader", @projects, "p3", assign_id: "assign-02") + Roles.role("editor", @projects, "p1", "assign-01"), + Roles.role("editor", @projects, "p2", "assign-01"), + Roles.role("editor", @projects, "p3", "assign-01"), + Roles.role("reader", @projects, "p1", "assign-02"), + Roles.role("reader", @projects, "p2", "assign-02"), + Roles.role("reader", @projects, "p3", "assign-02") ] |> Enum.map(&%RoleGrant{role: Role.new(&1)}) diff --git a/components/electric/test/electric/satellite/permissions/trigger_test.exs b/components/electric/test/electric/satellite/permissions/trigger_test.exs index 61c08dbe3f..29a5d351b8 100644 --- a/components/electric/test/electric/satellite/permissions/trigger_test.exs +++ b/components/electric/test/electric/satellite/permissions/trigger_test.exs @@ -1,8 +1,9 @@ defmodule Electric.Satellite.Permissions.TriggerTest do use ExUnit.Case, async: true + use Electric.Postgres.MockSchemaLoader - alias Electric.Satellite.Permissions alias Electric.Satellite.Permissions.Trigger + alias Electric.Satellite.SatPerms alias ElectricTest.PermissionsHelpers.{ Auth, @@ -15,12 +16,36 @@ defmodule Electric.Satellite.Permissions.TriggerTest do @workspaces {"public", "workspaces"} @projects {"public", "projects"} - @issues {"public", "issues"} - @comments {"public", "comments"} - @reactions {"public", "reactions"} @project_memberships {"public", "project_memberships"} setup do + loader_spec = + MockSchemaLoader.backend_spec( + migrations: [ + {"01", + [ + "create table users (id uuid primary key)", + "create table workspaces (id uuid primary key)", + "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", + "create table issues (id uuid primary key, project_id uuid not null references projects (id))", + "create table comments (id uuid primary key, issue_id uuid not null references issues (id))", + "create table reactions (id uuid primary key, comment_id uuid not null references comments (id))", + """ + create table project_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + project_id uuid not null references projects (id), + role text not null, + is_enabled bool + ) + """ + ]} + ] + ) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + {:ok, schema_version} = SchemaLoader.load(loader) + tree = Tree.new( [ @@ -31,18 +56,12 @@ defmodule Electric.Satellite.Permissions.TriggerTest do {@projects, "p3", []} ]} ], - [ - {@comments, @issues, ["issue_id"]}, - {@issues, @projects, ["project_id"]}, - {@project_memberships, @projects, ["project_id"]}, - {@projects, @workspaces, ["workspace_id"]}, - {@reactions, @comments, ["comment_id"]} - ] + schema_version ) {:ok, _} = start_supervised(Perms.Transient) - {:ok, tree: tree} + {:ok, tree: tree, loader: loader, schema_version: schema_version} end def assign(ddlx) do @@ -51,6 +70,10 @@ defmodule Electric.Satellite.Permissions.TriggerTest do assign end + def callback(event, change, :loader) do + {event, change} + end + describe "for_assign/1" do test "generates a function that turns inserts into transient roles", cxt do assign = @@ -58,10 +81,13 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "assign (projects, #{table(@project_memberships)}.role) to #{table(@project_memberships)}.user_id" ) - assert [{@project_memberships, fun}] = Trigger.for_assign(assign) - assert is_function(fun, 3) + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) - %{user_id: user_id} = auth = Auth.user() + assert is_function(fun, 2) + + %{id: assign_id} = assign + user_id = Auth.user_id() change = Chgs.insert(@project_memberships, %{ @@ -71,27 +97,31 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "role" => "admin" }) - assert [{:insert, {@project_memberships, ["pm1"]}, role}] = fun.(change, cxt.tree, auth) + assert {{:insert, role}, ^change} = fun.(change, :loader) - assert %Permissions.Role{ - id: ["pm1"], + assert %SatPerms.Role{ + row_id: ["pm1"], role: "admin", + assign_id: ^assign_id, user_id: ^user_id, - scope: {@projects, ["p1"]} + scope: %SatPerms.Scope{ + table: %SatPerms.Table{schema: "public", name: "projects"}, + id: ["p1"] + } } = role - - assert [] = fun.(change, cxt.tree, Auth.user("1191723b-37a5-46c8-818e-326cfbc2c0a7")) - assert [] = fun.(change, cxt.tree, Auth.nobody()) end test "supports static role names", cxt do assign = assign("assign (projects, 'something') to #{table(@project_memberships)}.user_id") - assert [{@project_memberships, fun}] = Trigger.for_assign(assign) - assert is_function(fun, 3) + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + assert is_function(fun, 2) - %{user_id: user_id} = auth = Auth.user() + %{id: assign_id} = assign + user_id = Auth.user_id() change = Chgs.insert(@project_memberships, %{ @@ -100,13 +130,17 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "user_id" => user_id }) - assert [{:insert, {@project_memberships, ["pm1"]}, role}] = fun.(change, cxt.tree, auth) + assert {{:insert, role}, ^change} = fun.(change, :loader) - assert %Permissions.Role{ - id: ["pm1"], + assert %SatPerms.Role{ + row_id: ["pm1"], role: "something", + assign_id: ^assign_id, user_id: ^user_id, - scope: {@projects, ["p1"]} + scope: %SatPerms.Scope{ + table: %SatPerms.Table{schema: "public", name: "projects"}, + id: ["p1"] + } } = role end @@ -116,10 +150,13 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "assign #{table(@project_memberships)}.role to #{table(@project_memberships)}.user_id" ) - assert [{@project_memberships, fun}] = Trigger.for_assign(assign) - assert is_function(fun, 3) + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) - %{user_id: user_id} = auth = Auth.user() + assert is_function(fun, 2) + + %{id: assign_id} = assign + user_id = Auth.user_id() change = Chgs.insert(@project_memberships, %{ @@ -129,11 +166,12 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "role" => "admin" }) - assert [{:insert, {@project_memberships, ["pm1"]}, role}] = fun.(change, cxt.tree, auth) + assert {{:insert, role}, ^change} = fun.(change, :loader) - assert %Permissions.Role{ - id: ["pm1"], + assert %SatPerms.Role{ + row_id: ["pm1"], role: "admin", + assign_id: ^assign_id, user_id: ^user_id, scope: nil } = role @@ -143,10 +181,13 @@ defmodule Electric.Satellite.Permissions.TriggerTest do assign = assign("assign 'something' to #{table(@project_memberships)}.user_id") - assert [{@project_memberships, fun}] = Trigger.for_assign(assign) - assert is_function(fun, 3) + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + assert is_function(fun, 2) - %{user_id: user_id} = auth = Auth.user() + %{id: assign_id} = assign + user_id = Auth.user_id() change = Chgs.insert(@project_memberships, %{ @@ -155,14 +196,159 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "user_id" => user_id }) - assert [{:insert, {@project_memberships, ["pm1"]}, role}] = fun.(change, cxt.tree, auth) + assert {{:insert, role}, ^change} = fun.(change, :loader) - assert %Permissions.Role{ - id: ["pm1"], + assert %SatPerms.Role{ + row_id: ["pm1"], role: "something", + assign_id: ^assign_id, user_id: ^user_id, scope: nil } = role end + + test "insert matching where clause", cxt do + assign = + assign("assign 'something' to #{table(@project_memberships)}.user_id if (row.is_enabled)") + + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + change = + Chgs.insert(@project_memberships, %{ + "id" => "pm1", + "project_id" => "p1", + "user_id" => Auth.user_id(), + "is_enabled" => true + }) + + assert {{:insert, _role}, ^change} = fun.(change, :loader) + end + + test "insert not matching where clause", cxt do + assign = + assign("assign 'something' to #{table(@project_memberships)}.user_id if (row.is_enabled)") + + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + change = + Chgs.insert(@project_memberships, %{ + "id" => "pm1", + "project_id" => "p1", + "user_id" => Auth.user_id(), + "is_enabled" => false + }) + + assert {:passthrough, ^change} = fun.(change, :loader) + end + + test "update that means row fails where clause", cxt do + assign = + assign("assign 'something' to #{table(@project_memberships)}.user_id if (row.is_enabled)") + + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + change = + Chgs.update( + @project_memberships, + %{ + "id" => "pm1", + "project_id" => "p1", + "user_id" => Auth.user_id(), + "is_enabled" => true + }, + %{"is_enabled" => false} + ) + + assert {{:delete, _role}, ^change} = fun.(change, :loader) + end + + test "update that means row now passes where clause", cxt do + assign = + assign("assign 'something' to #{table(@project_memberships)}.user_id if (row.is_enabled)") + + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + change = + Chgs.update( + @project_memberships, + %{ + "id" => "pm1", + "project_id" => "p1", + "user_id" => Auth.user_id(), + "is_enabled" => false + }, + %{"is_enabled" => true} + ) + + assert {{:insert, _role}, ^change} = fun.(change, :loader) + end + + test "update where means row still passes where clause", cxt do + assign = + assign("assign 'something' to #{table(@project_memberships)}.user_id if (row.is_enabled)") + + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + change = + Chgs.update( + @project_memberships, + %{ + "id" => "pm1", + "project_id" => "p1", + "user_id" => Auth.user_id(), + "is_enabled" => true, + "role" => "something" + }, + %{"role" => "changed"} + ) + + assert {{:update, _old_role, _new_role}, ^change} = fun.(change, :loader) + end + + test "update where means row still fails where clause", cxt do + assign = + assign("assign 'something' to #{table(@project_memberships)}.user_id if (row.is_enabled)") + + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + change = + Chgs.update( + @project_memberships, + %{ + "id" => "pm1", + "project_id" => "p1", + "user_id" => Auth.user_id(), + "is_enabled" => false, + "role" => "something" + }, + %{"role" => "changed"} + ) + + assert {:passthrough, ^change} = fun.(change, :loader) + end + + test "delete row failing where clause", cxt do + assign = + assign("assign 'something' to #{table(@project_memberships)}.user_id if (row.is_enabled)") + + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + change = + Chgs.delete(@project_memberships, %{ + "id" => "pm1", + "project_id" => "p1", + "user_id" => Auth.user_id(), + "is_enabled" => false + }) + + assert {{:delete, _role}, ^change} = fun.(change, :loader) + end end end diff --git a/components/electric/test/electric/satellite/permissions/where_clause_test.exs b/components/electric/test/electric/satellite/permissions/where_clause_test.exs new file mode 100644 index 0000000000..5dd482fe80 --- /dev/null +++ b/components/electric/test/electric/satellite/permissions/where_clause_test.exs @@ -0,0 +1,275 @@ +defmodule Electric.Satellite.Permissions.WhereClauseTest do + use ExUnit.Case, async: true + + alias ElectricTest.PermissionsHelpers + alias ElectricTest.PermissionsHelpers.Chgs + + alias Electric.Satellite.Auth + alias Electric.Satellite.Permissions + + @user_id "b2ce289a-3d2d-4ff7-9892-d446d5866f74" + @not_user_id "ec61ba28-7195-47a2-8d93-e71068dc7160" + @table {"public", "lotsoftypes"} + + setup do + {:ok, schema_version} = PermissionsHelpers.Schema.load() + auth = %Auth{user_id: @user_id} + + evaluator = Permissions.Eval.new(schema_version, auth) + + {:ok, auth: auth, schema_version: schema_version, evaluator: evaluator} + end + + def expression(cxt, stmt) do + assert {:ok, _expr_cxt} = Permissions.Eval.expression_context(cxt.evaluator, stmt, @table) + end + + # execute the statement. if we pass `stmt` as a single-arity function + # then it will be tested against all the valid prefixes for a generic + # row, that is `this.`, `row.` and ``, this allows us to refer to + # row columns as either `this.column`, `row.column` or just `column` + # in where/if expressions. + def execute(cxt, stmt, change) do + if is_function(stmt) do + results = + for prefix <- ["ROW.", "THIS.", ""] do + assert {:ok, expr_cxt} = expression(cxt, stmt.(prefix)) + assert {:ok, result} = Permissions.Eval.execute(expr_cxt, change) + result + end + + # make sure that all results are the same + assert length(Enum.uniq(results)) == 1 + + {:ok, hd(results)} + else + assert {:ok, expr_cxt} = expression(cxt, stmt) + Permissions.Eval.execute(expr_cxt, change) + end + end + + def update(base, changes \\ %{}) do + Chgs.update(@table, base, changes) + end + + def insert(record) do + Chgs.insert(@table, record) + end + + def delete(record) do + Chgs.delete(@table, record) + end + + def change(f, r) do + apply(__MODULE__, f, [r]) + end + + describe "UPDATE" do + test "automatic casting when comparing auth", cxt do + stmt = &"#{&1}user_id = AUTH.user_id" + + assert {:ok, true} = execute(cxt, stmt, update(%{"user_id" => @user_id})) + end + + test "with NEW reference", cxt do + stmt = "NEW.user_id::text = AUTH.user_id" + + assert {:ok, true} = + execute(cxt, stmt, update(%{"user_id" => @not_user_id}, %{"user_id" => @user_id})) + end + + test "with OLD reference", cxt do + stmt = "OLD.user_id::text = auth.user_id" + + assert {:ok, true} = + execute(cxt, stmt, update(%{"user_id" => @user_id})) + + assert {:ok, false} = + execute(cxt, stmt, update(%{"user_id" => @not_user_id}, %{"user_id" => @user_id})) + end + + test "with ROW reference", cxt do + stmt = &"#{&1}user_id::text = auth.user_id" + + assert {:ok, true} = + execute(cxt, stmt, update(%{"user_id" => @user_id})) + + assert {:ok, false} = + execute(cxt, stmt, update(%{"user_id" => @not_user_id}, %{"user_id" => @user_id})) + + assert {:ok, false} = + execute(cxt, stmt, update(%{"user_id" => @user_id}, %{"user_id" => @not_user_id})) + end + + test "multi-clause ROW/THIS reference", cxt do + stmt = &"(#{&1}user_id::text = auth.user_id) AND #{&1}valid" + + assert {:ok, true} = + execute( + cxt, + stmt, + update(%{"user_id" => @user_id, "valid" => true}) + ) + + assert {:ok, false} = + execute( + cxt, + stmt, + update(%{"user_id" => @user_id, "valid" => false}, %{"valid" => true}) + ) + + assert {:ok, false} = + execute( + cxt, + stmt, + update(%{"user_id" => @not_user_id, "valid" => true}, %{"user_id" => @user_id}) + ) + end + + test "mixed row and NEW references", cxt do + stmt = &"(#{&1}user_id::text = auth.user_id) AND NOT new.valid" + + assert {:ok, true} = + execute( + cxt, + stmt, + update(%{"user_id" => @user_id, "valid" => true}, %{"valid" => false}) + ) + + assert {:ok, false} = + execute( + cxt, + stmt, + update(%{"user_id" => @user_id, "valid" => true}) + ) + + assert {:ok, false} = + execute( + cxt, + stmt, + update(%{"user_id" => @not_user_id, "valid" => true}, %{ + "user_id" => @user_id, + "valid" => false + }) + ) + end + + test "with NEW reference to bool column", cxt do + stmt = "new.valid" + + assert {:ok, true} = + execute(cxt, stmt, update(%{"valid" => true})) + + assert {:ok, false} = + execute(cxt, stmt, update(%{"valid" => false})) + end + + test "with NOT(ROW) reference to bool column", cxt do + stmt = &"NOT #{&1}valid" + + assert {:ok, false} = + execute(cxt, stmt, update(%{"valid" => true})) + + assert {:ok, true} = + execute(cxt, stmt, update(%{"valid" => false})) + + assert {:ok, false} = + execute(cxt, stmt, update(%{"valid" => false}, %{"valid" => true})) + end + + test "with THIS reference to bool column", cxt do + stmt = &"#{&1}valid" + + assert {:ok, true} = + execute(cxt, stmt, update(%{"valid" => true})) + + assert {:ok, false} = + execute(cxt, stmt, update(%{"valid" => true}, %{"valid" => false})) + + assert {:ok, false} = + execute(cxt, stmt, update(%{"valid" => false}, %{"valid" => true})) + end + end + + for {change_fun, name, ref} <- [ + {:insert, "INSERT", "NEW"}, + {:delete, "DELETE", "OLD"} + ] do + describe name do + test "with #{ref} reference", cxt do + stmt = "#{unquote(ref)}.user_id::text = AUTH.user_id" + + assert {:ok, true} = + execute(cxt, stmt, change(unquote(change_fun), %{"user_id" => @user_id})) + + assert {:ok, false} = + execute(cxt, stmt, change(unquote(change_fun), %{"user_id" => @not_user_id})) + end + + test "with ROW reference", cxt do + stmt = &"#{&1}user_id::text = auth.user_id" + + assert {:ok, true} = + execute(cxt, stmt, change(unquote(change_fun), %{"user_id" => @user_id})) + + assert {:ok, false} = + execute(cxt, stmt, change(unquote(change_fun), %{"user_id" => @not_user_id})) + end + + test "multi-clause ROW/THIS reference", cxt do + stmt = &"(#{&1}user_id::text = auth.user_id) AND #{&1}valid" + + assert {:ok, true} = + execute( + cxt, + stmt, + change(unquote(change_fun), %{"user_id" => @user_id, "valid" => true}) + ) + + assert {:ok, false} = + execute( + cxt, + stmt, + change(unquote(change_fun), %{"user_id" => @user_id, "valid" => false}) + ) + + assert {:ok, false} = + execute( + cxt, + stmt, + change(unquote(change_fun), %{"user_id" => @not_user_id, "valid" => true}) + ) + end + + test "with #{ref} reference to bool column", cxt do + stmt = "#{unquote(ref)}.valid" + + assert {:ok, true} = + execute(cxt, stmt, change(unquote(change_fun), %{"valid" => true})) + + assert {:ok, false} = + execute(cxt, stmt, change(unquote(change_fun), %{"valid" => false})) + end + + test "with NOT(ROW) reference to bool column", cxt do + stmt = &"NOT #{&1}valid" + + assert {:ok, false} = + execute(cxt, stmt, change(unquote(change_fun), %{"valid" => true})) + + assert {:ok, true} = + execute(cxt, stmt, change(unquote(change_fun), %{"valid" => false})) + end + + test "with THIS reference to bool column", cxt do + stmt = &"#{&1}valid" + + assert {:ok, true} = + execute(cxt, stmt, change(unquote(change_fun), %{"valid" => true})) + + assert {:ok, false} = + execute(cxt, stmt, change(unquote(change_fun), %{"valid" => false})) + end + end + end +end diff --git a/components/electric/test/electric/satellite/permissions/write_buffer_test.exs b/components/electric/test/electric/satellite/permissions/write_buffer_test.exs index f11f28f363..512b34ea2d 100644 --- a/components/electric/test/electric/satellite/permissions/write_buffer_test.exs +++ b/components/electric/test/electric/satellite/permissions/write_buffer_test.exs @@ -3,9 +3,10 @@ defmodule Electric.Satellite.Permissions.WriteBufferTest do alias Electric.Satellite.Permissions.WriteBuffer alias Electric.Satellite.Permissions.Graph + alias ElectricTest.PermissionsHelpers.Auth alias ElectricTest.PermissionsHelpers.Chgs + alias ElectricTest.PermissionsHelpers.Schema alias ElectricTest.PermissionsHelpers.Tree - alias ElectricTest.PermissionsHelpers.Auth @workspaces {"public", "workspaces"} @projects {"public", "projects"} @@ -14,7 +15,7 @@ defmodule Electric.Satellite.Permissions.WriteBufferTest do @tags {"public", "tags"} @issue_tags {"public", "issue_tags"} - def upstream(fks) do + def upstream(schema_version) do Tree.new( [ {@workspaces, "w1", @@ -34,17 +35,27 @@ defmodule Electric.Satellite.Permissions.WriteBufferTest do ]}, {@workspaces, "w2", []} ], - fks + schema_version ) end setup do - upstream = - upstream([ - {@comments, @issues, ["issue_id"]}, - {@issues, @projects, ["project_id"]}, - {@projects, @workspaces, ["workspace_id"]} - ]) + migrations = [ + {"01", + [ + "create table workspaces (id uuid primary key)", + "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", + "create table issues (id uuid primary key, project_id uuid not null references projects (id), description text)", + "create table comments (id uuid primary key, issue_id uuid not null references issues (id), comment text, owner text)", + "create table reactions (id uuid primary key, comment_id uuid not null references comments (id))", + "create table tags (id uuid primary key, tag text not null)", + "create table issue_tags (id uuid primary key, issue_id uuid not null references issues (id), tag_id uuid not null references tags (id))" + ]} + ] + + {:ok, schema_version} = Schema.load(migrations) + + upstream = upstream(schema_version) write_buffer = WriteBuffer.with_upstream(WriteBuffer.new(Auth.user()), upstream) @@ -266,6 +277,21 @@ defmodule Electric.Satellite.Permissions.WriteBufferTest do describe "join table" do setup do + migrations = [ + {"01", + [ + "create table workspaces (id uuid primary key)", + "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", + "create table issues (id uuid primary key, project_id uuid not null references projects (id), description text)", + "create table comments (id uuid primary key, issue_id uuid not null references issues (id), comment text, owner text)", + "create table reactions (id uuid primary key, comment_id uuid not null references comments (id))", + "create table tags (id uuid primary key, tag text not null)", + "create table issue_tags (id uuid primary key, issue_id uuid not null references issues (id), tag_id uuid not null references tags (id))" + ]} + ] + + {:ok, schema_version} = Schema.load(migrations) + upstream = Tree.new( [ @@ -295,13 +321,7 @@ defmodule Electric.Satellite.Permissions.WriteBufferTest do {@tags, "t1", [{@issue_tags, "it1", []}, {@issue_tags, "it9", []}]}, {@tags, "t2", []} ], - [ - {@issue_tags, @tags, ["tag_id"]}, - {@issue_tags, @issues, ["issue_id"]}, - {@comments, @issues, ["issue_id"]}, - {@issues, @projects, ["project_id"]}, - {@projects, @workspaces, ["workspace_id"]} - ] + schema_version ) write_buffer = WriteBuffer.with_upstream(WriteBuffer.new(Auth.user()), upstream) diff --git a/components/electric/test/electric/satellite/permissions_test.exs b/components/electric/test/electric/satellite/permissions_test.exs index 97d89dd06e..0ce01103e9 100644 --- a/components/electric/test/electric/satellite/permissions_test.exs +++ b/components/electric/test/electric/satellite/permissions_test.exs @@ -1,6 +1,8 @@ defmodule Electric.Satellite.PermissionsTest do use ExUnit.Case, async: true + alias ElectricTest.PermissionsHelpers + alias ElectricTest.PermissionsHelpers.{ Auth, Chgs, @@ -10,11 +12,15 @@ defmodule Electric.Satellite.PermissionsTest do Tree } + alias Electric.Postgres.Extension.SchemaLoader alias Electric.Satellite.{Permissions, Permissions.MoveOut} alias Electric.Replication.Changes import ElectricTest.PermissionsHelpers + @comments {"public", "comments"} + @issues {"public", "issues"} + @users {"public", "users"} @regions {"public", "regions"} @offices {"public", "offices"} @workspaces {"public", "workspaces"} @@ -22,63 +28,91 @@ defmodule Electric.Satellite.PermissionsTest do @issues {"public", "issues"} @comments {"public", "comments"} @reactions {"public", "reactions"} + @site_admins {"public", "site_admins"} @project_memberships {"public", "project_memberships"} + @compound_root {"public", "compound_root"} + @compound_level1 {"public", "compound_level1"} + @compound_level2 {"public", "compound_level2"} + + @projects_assign ~s[ELECTRIC ASSIGN (#{table(@projects)}, #{table(@project_memberships)}.role) TO #{table(@project_memberships)}.user_id] + @global_assign ~s[ELECTRIC ASSIGN #{table(@users)}.role TO #{table(@users)}.id] + + defmacrop assert_write_rejected(test) do + # permissions failure messages are prefixed with `"permissions:"` so we're double checking + # that the error is caused by the permissions checks themselves, not by some other data error + # this is particularly important for the sqlite backed tests + quote do + assert {:error, "permissions:" <> _} = unquote(test) + end + end + setup do - tree = - Tree.new( - [ - {@regions, "r1", [{@offices, "o1"}, {@offices, "o2"}]}, - {@regions, "r2", [{@offices, "o3"}, {@offices, "o4"}]}, - {@workspaces, "w1", - [ - {@projects, "p1", - [ - {@issues, "i1", - [ - {@comments, "c1", - [{@reactions, "r1"}, {@reactions, "r2"}, {@reactions, "r3"}]}, - {@comments, "c2", [{@reactions, "r4"}]} - ]}, - {@issues, "i2", [{@comments, "c5"}]}, - {@project_memberships, "pm1", []} - ]}, - {@projects, "p2", - [ - {@issues, "i3", - [ - {@comments, "c3", - [{@reactions, "r5"}, {@reactions, "r6"}, {@reactions, "r7"}]}, - {@comments, "c4", [{@reactions, "r8"}]} - ]}, - {@issues, "i4"} - ]}, - {@projects, "p3", [{@issues, "i5", []}]}, - {@projects, "p4", [{@issues, "i6", []}]} - ]} - ], - [ - {@comments, @issues, ["issue_id"]}, - {@issues, @projects, ["project_id"]}, - {@offices, @regions, ["region_id"]}, - {@project_memberships, @projects, ["project_id"]}, - {@projects, @workspaces, ["workspace_id"]}, - {@reactions, @comments, ["comment_id"]} - ] - ) + {:ok, loader} = PermissionsHelpers.Schema.loader() + {:ok, schema_version} = SchemaLoader.load(loader) + + data = [ + {@regions, "rg1", [{@offices, "o1"}, {@offices, "o2"}]}, + {@regions, "rg2", [{@offices, "o3"}, {@offices, "o4"}]}, + {@workspaces, "w1", + [ + {@projects, "p1", + [ + {@issues, "i1", + [ + {@comments, "c1", [{@reactions, "r1"}, {@reactions, "r2"}, {@reactions, "r3"}]}, + {@comments, "c2", [{@reactions, "r4"}]} + ]}, + {@issues, "i2", [{@comments, "c5"}]}, + {@project_memberships, "pm1", %{"user_id" => Auth.user_id(), "role" => "member"}, []} + ]}, + {@projects, "p2", + [ + {@issues, "i3", + [ + {@comments, "c3", [{@reactions, "r5"}, {@reactions, "r6"}, {@reactions, "r7"}]}, + {@comments, "c4", [{@reactions, "r8"}]} + ]}, + {@issues, "i4"} + ]}, + {@projects, "p3", [{@issues, "i5", [{@comments, "c6"}]}]}, + {@projects, "p4", [{@issues, "i6", []}]} + ]}, + {@compound_root, ["cmr1_1", "cmr2_1"], + [ + { + @compound_level1, + ["cml1_1", "cml2_1"], + [{@compound_level2, ["cmll1_1", "cmll2_1"], []}] + } + ]}, + {@users, [Auth.user_id()]}, + {@site_admins, ["sa1"], %{"role" => "site.admin", "user_id" => Auth.user_id()}, []} + ] + + tree = Tree.new(data, schema_version) {:ok, _} = start_supervised(Perms.Transient) - {:ok, tree: tree} + {:ok, + tree: tree, + loader: loader, + schema_version: schema_version, + data: data, + migrations: PermissionsHelpers.Schema.migrations()} end describe "validate_write/3" do test "scoped role, scoped grant", cxt do perms = perms_build( - ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], + cxt, + [ + ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], + @projects_assign + ], [ - Roles.role("editor", @projects, "p2") + Roles.role("editor", @projects, "p2", "assign-1") ] ) @@ -118,9 +152,13 @@ defmodule Electric.Satellite.PermissionsTest do test "unscoped role, scoped grant", cxt do perms = perms_build( - ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], + cxt, [ - Roles.role("editor") + ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], + @global_assign + ], + [ + Roles.role("editor", "assign-1") ] ) @@ -138,10 +176,14 @@ defmodule Electric.Satellite.PermissionsTest do test "scoped role, unscoped grant", cxt do perms = perms_build( - ~s[GRANT ALL ON #{table(@comments)} TO 'editor'], + cxt, + [ + ~s[GRANT ALL ON #{table(@comments)} TO 'editor'], + @projects_assign + ], [ # we have an editor role within project p2 - Roles.role("editor", @projects, "p2") + Roles.role("editor", @projects, "p2", "assign-1") ] ) @@ -169,12 +211,14 @@ defmodule Electric.Satellite.PermissionsTest do test "grant for different table", cxt do perms = perms_build( + cxt, [ ~s[GRANT SELECT ON #{table(@comments)} TO 'editor'], - ~s[GRANT ALL ON #{table(@reactions)} TO 'editor'] + ~s[GRANT ALL ON #{table(@reactions)} TO 'editor'], + @global_assign ], [ - Roles.role("editor") + Roles.role("editor", "assign-1") ] ) @@ -200,9 +244,13 @@ defmodule Electric.Satellite.PermissionsTest do test "unscoped role, unscoped grant", cxt do perms = perms_build( - ~s[GRANT UPDATE ON #{table(@comments)} TO 'editor'], + cxt, + [ + ~s[GRANT UPDATE ON #{table(@comments)} TO 'editor'], + @global_assign + ], [ - Roles.role("editor") + Roles.role("editor", "assign-1") ] ) @@ -234,13 +282,16 @@ defmodule Electric.Satellite.PermissionsTest do test "scoped role, change outside of scope", cxt do perms = perms_build( + cxt, [ ~s[GRANT UPDATE ON #{table(@comments)} TO 'editor'], - ~s[GRANT ALL ON #{table(@regions)} TO 'admin'] + ~s[GRANT ALL ON #{table(@regions)} TO 'admin'], + @projects_assign, + @global_assign ], [ - Roles.role("editor", @projects, "p2"), - Roles.role("admin") + Roles.role("editor", @projects, "p2", "assign-1"), + Roles.role("admin", "assign-2") ] ) @@ -256,6 +307,30 @@ defmodule Electric.Satellite.PermissionsTest do ) end + test "role with no matching assign", cxt do + perms = + perms_build( + cxt, + [ + ~s[GRANT UPDATE ON #{table(@comments)} TO (#{table(@projects)}, 'editor')] + ], + [ + Roles.role("editor", @projects, "p1", "non-existant") + ] + ) + + assert {:error, _} = + Permissions.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@comments, %{"id" => "c1", "comment" => "old comment"}, %{ + "comment" => "new comment" + }) + ]) + ) + end + test "overlapping global and scoped perms", cxt do # Test that even though the global perm doesn't grant # the required permissions, the scoped perms are checked @@ -264,13 +339,16 @@ defmodule Electric.Satellite.PermissionsTest do # until we run out of get permission. perms = perms_build( + cxt, [ ~s[GRANT UPDATE (description) ON #{table(@issues)} TO (projects, 'editor')], - ~s[GRANT UPDATE (title) ON #{table(@issues)} TO 'editor'] + ~s[GRANT UPDATE (title) ON #{table(@issues)} TO 'editor'], + @projects_assign, + @global_assign ], [ - Roles.role("editor", @projects, "p1"), - Roles.role("editor") + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", "assign-2") ] ) @@ -289,6 +367,7 @@ defmodule Electric.Satellite.PermissionsTest do test "AUTHENTICATED w/user_id", cxt do perms = perms_build( + cxt, ~s[GRANT ALL ON #{table(@comments)} TO AUTHENTICATED], [] ) @@ -306,6 +385,7 @@ defmodule Electric.Satellite.PermissionsTest do test "AUTHENTICATED w/o permission", cxt do perms = perms_build( + cxt, ~s[GRANT SELECT ON #{table(@comments)} TO AUTHENTICATED], [] ) @@ -323,6 +403,7 @@ defmodule Electric.Satellite.PermissionsTest do test "AUTHENTICATED w/o user_id", cxt do perms = perms_build( + cxt, ~s[GRANT ALL ON #{table(@comments)} TO AUTHENTICATED], [], auth: Auth.nobody() @@ -341,6 +422,7 @@ defmodule Electric.Satellite.PermissionsTest do test "ANYONE w/o user_id", cxt do perms = perms_build( + cxt, ~s[GRANT ALL ON #{table(@comments)} TO ANYONE], [], auth: Auth.nobody() @@ -359,12 +441,14 @@ defmodule Electric.Satellite.PermissionsTest do test "protected columns", cxt do perms = perms_build( + cxt, [ ~s[GRANT INSERT (id, text) ON #{table(@comments)} TO 'editor'], - ~s[GRANT UPDATE (text) ON #{table(@comments)} TO 'editor'] + ~s[GRANT UPDATE (text) ON #{table(@comments)} TO 'editor'], + @global_assign ], [ - Roles.role("editor") + Roles.role("editor", "assign-1") ] ) @@ -415,16 +499,18 @@ defmodule Electric.Satellite.PermissionsTest do test "moves between auth scopes", cxt do perms = perms_build( + cxt, [ ~s[GRANT UPDATE ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT SELECT ON #{table(@issues)} TO 'reader'] + ~s[GRANT SELECT ON #{table(@issues)} TO 'reader'], + @projects_assign ], [ # update rights on p1 & p3 - Roles.role("editor", @projects, "p1"), - Roles.role("editor", @projects, "p3"), + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", @projects, "p3", "assign-1"), # read-only role on project p2 - Roles.role("reader", @projects, "p2") + Roles.role("reader", @projects, "p2", "assign-1") ] ) @@ -455,13 +541,15 @@ defmodule Electric.Satellite.PermissionsTest do test "write in scope tree", cxt do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')] + ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')], + @projects_assign ], [ - Roles.role("editor", @projects, "p1") + Roles.role("editor", @projects, "p1", "assign-1") ] ) @@ -495,9 +583,10 @@ defmodule Electric.Satellite.PermissionsTest do describe "intermediate roles" do # roles that are created on the client and then used within the same tx before triggers have # run on pg - setup(_cxt) do + setup(cxt) do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'manager')], ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'manager')], @@ -508,12 +597,13 @@ defmodule Electric.Satellite.PermissionsTest do ~s[GRANT ALL ON #{table(@projects)} TO 'project_admin'], ~s[GRANT ALL ON #{table(@project_memberships)} TO 'project_admin'], # the assign rule for the 'manager' role - ~s[ASSIGN (#{table(@projects)}, #{table(@project_memberships)}.role) TO #{table(@project_memberships)}.user_id] + @projects_assign, + @global_assign ], [ # start with the ability to create projects and memberships - Roles.role("project_admin"), - Roles.role("manager", @projects, "p1", assign_id: "assign-1") + Roles.role("manager", @projects, "p1", "assign-1", row_id: ["pm1"]), + Roles.role("project_admin", "assign-2") ] ) @@ -659,7 +749,9 @@ defmodule Electric.Satellite.PermissionsTest do Chgs.tx([ Chgs.delete(@project_memberships, %{ "id" => "pm100", - "project_id" => "p100" + "project_id" => "p100", + "user_id" => Auth.user_id(), + "role" => "manager" }), Chgs.insert(@issues, %{"id" => "i101", "project_id" => "p100"}) ]) @@ -758,19 +850,240 @@ defmodule Electric.Satellite.PermissionsTest do end end + # TODO: implement where clauses on client side + for module <- [PermissionsHelpers.Server] do + describe "#{module.name()}: where clauses" do + setup(cxt) do + {:ok, cxt} = unquote(module).setup(cxt) + {:ok, Map.put(Map.new(cxt), :module, unquote(module))} + end + + test "simple user_id", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT ALL ON #{table(@comments)} TO AUTHENTICATED WHERE (row.author_id::text = auth.user_id)] + ], + [] + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{ + "id" => "c100", + "issue_id" => "i3", + "author_id" => "78c4d92e-a0a7-4c6a-b25a-44e26eb33e4c" + }) + ]) + ) + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{ + "id" => "c100", + "issue_id" => "i3", + "author_id" => Auth.user_id() + }) + ]) + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + # issue i3 belongs to project p2 + Chgs.tx([ + Chgs.update( + @comments, + %{"id" => "c4", "issue_id" => "i3", "author_id" => Auth.user_id()}, + %{ + "comment" => "changed" + } + ) + ]) + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + # issue i3 belongs to project p2 + Chgs.tx([ + Chgs.update( + @comments, + %{"id" => "c4", "issue_id" => "i3", "author_id" => Auth.user_id()}, + %{ + "author_id" => "a5158d97-8e45-408d-81c9-f28e2fe4f54c" + } + ) + ]) + ) + ) + end + + test "local role granting", cxt do + # if an assign has a where clause then local roles should honour that + # and only grant the role if the where clause passes + # reset the db because we're repeating the permissions setup + cxt = cxt.module.reset(cxt) + + perms = + cxt.module.perms( + cxt, + [ + # project level perms + ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'manager')], + ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'manager')], + # read only to viewer + ~s[GRANT READ ON #{table(@issues)} TO (#{table(@projects)}, 'viewer')], + ~s[GRANT READ ON #{table(@comments)} TO (#{table(@projects)}, 'viewer')], + # global roles allowing create project and assign members + ~s[GRANT ALL ON #{table(@projects)} TO 'admin'], + ~s[GRANT ALL ON #{table(@project_memberships)} TO 'admin'], + ~s[GRANT ALL ON site_admins TO 'admin'], + + # global roles with a join table + ~s[GRANT ALL ON #{table(@regions)} TO 'site.admin'], + ~s[GRANT ALL ON #{table(@offices)} TO 'site.admin'], + ~s[ELECTRIC ASSIGN (#{table(@projects)}, #{table(@project_memberships)}.role) TO #{table(@project_memberships)}.user_id IF (ROW.valid)], + ~s[ELECTRIC ASSIGN 'site.admin' TO #{table(@project_memberships)}.user_id IF (ROW.role = 'site.admin')], + @global_assign, + ~s[ASSIGN site_admins.role TO site_admins.user_id] + ], + [ + Roles.role("admin", "assign-2") + ] + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@issues, %{ + "id" => "i100", + "project_id" => "p1" + }) + ]) + ) + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "user_id" => Auth.user_id(), + "project_id" => "p1", + "role" => "manager", + "valid" => false + }), + Chgs.insert(@issues, %{ + "id" => "i100", + "project_id" => "p1" + }) + ]) + ) + ) + + assert {:ok, perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "user_id" => Auth.user_id(), + "project_id" => "p1", + "role" => "manager", + "valid" => true + }), + Chgs.insert(@issues, %{ + "id" => "i100", + "project_id" => "p1" + }) + ]) + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.delete(@project_memberships, %{ + "id" => "pm100", + "user_id" => Auth.user_id(), + "project_id" => "p1", + "role" => "manager", + "valid" => true + }), + Chgs.insert(@issues, %{ + "id" => "i101", + "project_id" => "p1" + }) + ]) + ) + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@regions, %{ + "id" => "rg200" + }) + ]) + ) + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + # insert a special 'site.admin' role + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "user_id" => Auth.user_id(), + "project_id" => "p1", + "role" => "site.admin", + "valid" => false + }), + Chgs.insert(@regions, %{ + "id" => "rg200" + }) + ]) + ) + end + end + end + describe "transient permissions" do setup(cxt) do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT SELECT ON #{table(@issues)} TO (#{table(@projects)}, 'reader')] + ~s[GRANT SELECT ON #{table(@issues)} TO (#{table(@projects)}, 'reader')], + @projects_assign ], [ - Roles.role("editor", @projects, "p1", assign_id: "assign-01"), + Roles.role("editor", @projects, "p1", "assign-1"), # read-only role on project p2 - Roles.role("reader", @projects, "p2", assign_id: "assign-01"), - Roles.role("editor", @projects, "p3", assign_id: "assign-01") + Roles.role("reader", @projects, "p2", "assign-1"), + Roles.role("editor", @projects, "p3", "assign-1") ] ) @@ -792,7 +1105,7 @@ defmodule Electric.Satellite.PermissionsTest do assert {:ok, _perms} = cxt.perms |> Perms.add_transient( - assign_id: "assign-01", + assign_id: "assign-1", target_relation: @issues, target_id: ["i3"], scope_id: ["p1"], @@ -814,7 +1127,7 @@ defmodule Electric.Satellite.PermissionsTest do assert {:error, _} = cxt.perms |> Perms.add_transient( - assign_id: "assign-01", + assign_id: "assign-1", target_relation: @issues, target_id: ["i4"], scope_id: ["p1"], @@ -836,7 +1149,7 @@ defmodule Electric.Satellite.PermissionsTest do assert {:error, _} = cxt.perms |> Perms.add_transient( - assign_id: "assign-01", + assign_id: "assign-1", target_relation: @issues, target_id: ["i3"], scope_id: ["p1"], @@ -857,17 +1170,20 @@ defmodule Electric.Satellite.PermissionsTest do test "removes changes we don't have permissions to see", cxt do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], ~s[GRANT READ ON #{table(@issues)} TO (#{table(@projects)}, 'reader')], ~s[GRANT READ ON #{table(@comments)} TO (#{table(@projects)}, 'reader')], - ~s[GRANT ALL ON #{table(@workspaces)} TO 'global_admin'] + ~s[GRANT ALL ON #{table(@workspaces)} TO 'global_admin'], + @projects_assign, + @global_assign ], [ - Roles.role("editor", @projects, "p1"), - Roles.role("reader", @projects, "p2"), - Roles.role("global_admin") + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("reader", @projects, "p2", "assign-1"), + Roles.role("global_admin", "assign-2") ] ) @@ -899,11 +1215,13 @@ defmodule Electric.Satellite.PermissionsTest do test "ignores column limits in grants", cxt do perms = perms_build( + cxt, [ - ~s[GRANT READ (id, title) ON #{table(@issues)} TO 'editor'] + ~s[GRANT READ (id, title) ON #{table(@issues)} TO 'editor'], + @global_assign ], [ - Roles.role("editor") + Roles.role("editor", "assign-1") ] ) @@ -922,13 +1240,15 @@ defmodule Electric.Satellite.PermissionsTest do test "incorporates in-tx additions to scope", cxt do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')] + ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')], + @projects_assign ], [ - Roles.role("editor", @projects, "p1") + Roles.role("editor", @projects, "p1", "assign-1") ] ) @@ -950,13 +1270,15 @@ defmodule Electric.Satellite.PermissionsTest do test "incorporates in-tx removals from scope", cxt do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')] + ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], + @projects_assign ], [ - Roles.role("editor", @projects, "p1"), - Roles.role("editor", @projects, "p2") + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", @projects, "p2", "assign-1") ] ) @@ -1037,16 +1359,19 @@ defmodule Electric.Satellite.PermissionsTest do test "removal from a scope but with global permissions", cxt do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], ~s[GRANT ALL ON #{table(@issues)} TO 'admin'], - ~s[GRANT ALL ON #{table(@comments)} TO 'admin'] + ~s[GRANT ALL ON #{table(@comments)} TO 'admin'], + @projects_assign, + @global_assign ], [ - Roles.role("editor", @projects, "p1"), - Roles.role("editor", @projects, "p2"), - Roles.role("admin") + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", @projects, "p2", "assign-1"), + Roles.role("admin", "assign-2") ] ) @@ -1076,5 +1401,77 @@ defmodule Electric.Satellite.PermissionsTest do assert filtered_tx.changes == expected_changes end + + test "where clauses on grant", cxt do + perms = + perms_build( + cxt, + [ + ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor') ], + ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor') WHERE (ROW.author_id = auth.user_id)], + ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor') WHERE (ROW.is_public)], + @projects_assign + ], + [ + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", @projects, "p2", "assign-1") + ] + ) + + changes = [ + Chgs.update(@issues, %{"id" => "i1", "project_id" => "p1"}, %{"text" => "updated"}), + Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p1"}), + Chgs.insert(@issues, %{"id" => "i101", "project_id" => "p2"}), + # author_id is us + Chgs.update( + @comments, + %{"id" => "c1", "issue_id" => "i1", "author_id" => Auth.user_id()}, + %{"text" => "updated"} + ), + # author is not us, so should be filtered + Chgs.update( + @comments, + %{"id" => "c2", "issue_id" => "i1", "author_id" => Auth.not_user_id()}, + %{"text" => "updated"} + ), + # matches the is_public clause + Chgs.update(@reactions, %{"id" => "r1", "comment_id" => "c1", "is_public" => true}, %{ + "text" => "updated" + }), + # change of is_public fails ROW.is_public test which tests old and new values + Chgs.update(@reactions, %{"id" => "r2", "comment_id" => "c1", "is_public" => true}, %{ + "text" => "updated", + "is_public" => false + }), + Chgs.insert(@reactions, %{"id" => "r200", "comment_id" => "c1", "is_public" => true}) + ] + + {filtered_tx, []} = Permissions.filter_read(perms, cxt.tree, Chgs.tx(changes)) + + assert filtered_tx.changes == [ + Chgs.update(@issues, %{"id" => "i1", "project_id" => "p1"}, %{"text" => "updated"}), + Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p1"}), + Chgs.insert(@issues, %{"id" => "i101", "project_id" => "p2"}), + # author_id is us + Chgs.update( + @comments, + %{"id" => "c1", "issue_id" => "i1", "author_id" => Auth.user_id()}, + %{"text" => "updated"} + ), + # matches the is_public clause + Chgs.update( + @reactions, + %{"id" => "r1", "comment_id" => "c1", "is_public" => true}, + %{ + "text" => "updated" + } + ), + Chgs.insert(@reactions, %{ + "id" => "r200", + "comment_id" => "c1", + "is_public" => true + }) + ] + end end end diff --git a/components/electric/test/support/ddlx_helpers.ex b/components/electric/test/support/ddlx_helpers.ex index ca84cd51e8..ebebcb9b52 100644 --- a/components/electric/test/support/ddlx_helpers.ex +++ b/components/electric/test/support/ddlx_helpers.ex @@ -46,4 +46,8 @@ defmodule ElectricTest.DDLXHelpers do def quote_table({schema, table}) do ~s["#{schema}"."#{table}"] end + + def quote_table(%{schema: schema, name: table}) do + ~s["#{schema}"."#{table}"] + end end diff --git a/components/electric/test/support/mock_schema_loader.ex b/components/electric/test/support/mock_schema_loader.ex index 3905c1b887..0f8478ac5f 100644 --- a/components/electric/test/support/mock_schema_loader.ex +++ b/components/electric/test/support/mock_schema_loader.ex @@ -5,6 +5,17 @@ defmodule Electric.Postgres.MockSchemaLoader do Schema } + alias Electric.Satellite.SatPerms + + defmacro __using__(_opts) do + quote do + alias Electric.Postgres.MockSchemaLoader + alias Electric.Postgres.Extension.SchemaLoader + end + end + + defstruct versions: [], opts: [], global_perms: [], user_perms: [] + def oid_loader(type, schema, name) do {:ok, Enum.join(["#{type}", schema, name], ".") |> :erlang.phash2(50_000)} end @@ -44,7 +55,7 @@ defmodule Electric.Postgres.MockSchemaLoader do def start_link(opts, args \\ []) do {module, spec} = agent_spec(opts, args) - {:ok, state} = connect([], spec) + {:ok, state} = connect(spec, []) {module, state} end @@ -139,14 +150,14 @@ defmodule Electric.Postgres.MockSchemaLoader do {:agent, pid} end - def receive_tx({versions, opts}, %{"txid" => _txid, "txts" => _txts} = row, version) do + def receive_tx(%{opts: opts} = state, %{"txid" => _txid, "txts" => _txts} = row, version) do key = tx_key(row) - {versions, Map.update(opts, :txids, %{key => version}, &Map.put(&1, key, version))} + %{state | opts: Map.update(opts, :txids, %{key => version}, &Map.put(&1, key, version))} end # ignore rows that don't define a txid, txts key - def receive_tx({versions, opts}, _row, _version) do - {versions, opts} + def receive_tx(state, _row, _version) do + state end def electrify_table({__MODULE__, state}, {schema, table}) do @@ -158,9 +169,17 @@ defmodule Electric.Postgres.MockSchemaLoader do {:agent, pid} end - def electrify_table({versions, opts}, {schema, table}) do - {versions, - Map.update(opts, :tables, %{{schema, table} => true}, &Map.put(&1, {schema, table}, true))} + def electrify_table(%{opts: opts} = state, {schema, table}) do + %{ + state + | opts: + Map.update( + opts, + :tables, + %{{schema, table} => true}, + &Map.put(&1, {schema, table}, true) + ) + } end defp tx_key(%{"txid" => txid, "txts" => txts}) do @@ -169,12 +188,12 @@ defmodule Electric.Postgres.MockSchemaLoader do @behaviour SchemaLoader - @impl true - def connect(_conn_config, {:agent, pid}) do + @impl SchemaLoader + def connect({:agent, pid}, _conn_config) do {:ok, {:agent, pid}} end - def connect(conn_config, {:agent, opts, args}) do + def connect({:agent, opts, args}, conn_config) do name = Keyword.get(args, :name) pid = name && GenServer.whereis(name) @@ -182,44 +201,45 @@ defmodule Electric.Postgres.MockSchemaLoader do # use existing agent {:ok, {:agent, name}} else - with {:ok, conn} <- connect(conn_config, opts), + with {:ok, conn} <- connect(opts, conn_config), {:ok, pid} <- Agent.start_link(fn -> conn end, args) do {:ok, {:agent, name || pid}} end end end - def connect(conn_config, opts) do + def connect(opts, conn_config) do {versions, opts} = opts |> Map.new() |> Map.pop(:versions, []) notify(opts, {:connect, conn_config}) - {:ok, {versions, opts}} + {:ok, %__MODULE__{versions: versions, opts: opts}} end - @impl true + @impl SchemaLoader def load({:agent, pid}) do Agent.get(pid, &load/1) end - def load({[], opts}) do + def load(%{versions: [], opts: opts}) do notify(opts, :load) {:ok, SchemaLoader.Version.new(nil, Schema.new())} end - def load({[%{version: version, schema: schema} | _versions], opts}) do + def load(%{versions: [%{version: version, schema: schema} | _versions], opts: opts}) do + notify(opts, :load) notify(opts, {:load, version, schema}) {:ok, SchemaLoader.Version.new(version, schema)} end - @impl true + @impl SchemaLoader def load({:agent, pid}, version) do Agent.get(pid, &load(&1, version)) end - def load({versions, opts}, version) do + def load(%{versions: versions, opts: opts}, version) do case Enum.find(versions, &(&1.version == version)) do %Migration{schema: schema} -> notify(opts, {:load, version, schema}) @@ -231,7 +251,7 @@ defmodule Electric.Postgres.MockSchemaLoader do end end - @impl true + @impl SchemaLoader def save({:agent, pid}, version, schema, stmts) do with :ok <- Agent.update(pid, fn state -> @@ -242,24 +262,24 @@ defmodule Electric.Postgres.MockSchemaLoader do end end - def save({versions, opts}, version, schema, stmts) do + def save(%{versions: versions, opts: opts} = state, version, schema, stmts) do notify(opts, {:save, version, schema, stmts}) - {:ok, {[mock_version(version, schema, stmts) | versions], opts}, + {:ok, %{state | versions: [mock_version(version, schema, stmts) | versions]}, SchemaLoader.Version.new(version, schema)} end - @impl true + @impl SchemaLoader def relation_oid({:agent, pid}, type, schema, name) do Agent.get(pid, &relation_oid(&1, type, schema, name)) end - def relation_oid({_versions, %{oid_loader: oid_loader}}, type, schema, name) + def relation_oid(%{opts: %{oid_loader: oid_loader}}, type, schema, name) when is_function(oid_loader, 3) do oid_loader.(type, schema, name) end - def relation_oid({_versions, opts}, type, schema, name) do + def relation_oid(%{opts: opts}, type, schema, name) do notify(opts, {:relation_oid, type, schema, name}) with %{} = oids <- get_in(opts, [:oids, type]), @@ -270,22 +290,22 @@ defmodule Electric.Postgres.MockSchemaLoader do end end - @impl true + @impl SchemaLoader def refresh_subscription({:agent, pid}, name) do Agent.get(pid, &refresh_subscription(&1, name)) end - def refresh_subscription({_versions, opts}, name) do + def refresh_subscription(%{opts: opts}, name) do notify(opts, {:refresh_subscription, name}) :ok end - @impl true + @impl SchemaLoader def migration_history({:agent, pid}, after_version) do Agent.get(pid, &migration_history(&1, after_version)) end - def migration_history({versions, opts}, after_version) do + def migration_history(%{versions: versions, opts: opts}, after_version) do notify(opts, {:migration_history, after_version}) migrations = @@ -300,18 +320,18 @@ defmodule Electric.Postgres.MockSchemaLoader do {:ok, migrations} end - @impl true + @impl SchemaLoader def known_migration_version?({:agent, pid}, version) do Agent.get(pid, &known_migration_version?(&1, version)) end - def known_migration_version?({versions, opts}, version) do + def known_migration_version?(%{versions: versions, opts: opts}, version) do notify(opts, {:known_migration_version?, version}) Enum.any?(versions, &(&1.version == version)) end - @impl true + @impl SchemaLoader def internal_schema(_state) do Schema.new() end @@ -320,7 +340,7 @@ defmodule Electric.Postgres.MockSchemaLoader do Agent.get(pid, &electrified_tables/1) end - def electrified_tables({[version | _versions], _opts}) do + def electrified_tables(%{versions: [version | _versions]}) do {:ok, Schema.table_info(version.schema)} end @@ -328,12 +348,12 @@ defmodule Electric.Postgres.MockSchemaLoader do {:ok, []} end - @impl true + @impl SchemaLoader def table_electrified?({:agent, pid}, {schema, name}) do Agent.get(pid, &table_electrified?(&1, {schema, name})) end - def table_electrified?({_versions, opts} = state, {schema, name}) do + def table_electrified?(%{opts: opts} = state, {schema, name}) do if Map.get(opts.tables, {schema, name}) do {:ok, true} else @@ -343,12 +363,12 @@ defmodule Electric.Postgres.MockSchemaLoader do end end - @impl true + @impl SchemaLoader def index_electrified?({:agent, pid}, {schema, name}) do Agent.get(pid, &index_electrified?(&1, {schema, name})) end - def index_electrified?({[version | _versions], _opts}, {schema, name}) do + def index_electrified?(%{versions: [version | _versions]}, {schema, name}) do {:ok, Enum.any?( Schema.indexes(version.schema, include_constraints: false), @@ -360,12 +380,16 @@ defmodule Electric.Postgres.MockSchemaLoader do send(parent, {__MODULE__, msg}) end - @impl true + defp notify(_, _msg) do + :ok + end + + @impl SchemaLoader def tx_version({:agent, pid}, row) do Agent.get(pid, &tx_version(&1, row)) end - def tx_version({versions, opts}, %{"txid" => txid, "txts" => txts} = row) do + def tx_version(%{versions: versions, opts: opts}, %{"txid" => txid, "txts" => txts} = row) do notify(opts, {:tx_version, txid, txts}) key = tx_key(row) @@ -392,4 +416,167 @@ defmodule Electric.Postgres.MockSchemaLoader do {:ok, version} end end + + @impl SchemaLoader + def global_permissions({:agent, pid}) do + Agent.get(pid, &global_permissions(&1)) + end + + def global_permissions(%{global_perms: []}) do + {:ok, initial_global_perms()} + end + + def global_permissions(%{global_perms: [perms | _]}) do + {:ok, perms} + end + + @impl SchemaLoader + def global_permissions({:agent, pid}, id) do + Agent.get(pid, &global_permissions(&1, id)) + end + + def global_permissions(%{global_perms: [], opts: opts}, 1) do + notify(opts, :global_permissions) + {:ok, initial_global_perms()} + end + + def global_permissions(%{global_perms: []}, id) do + {:error, "global perms with id #{id} not found"} + end + + def global_permissions(%{global_perms: perms, opts: opts}, id) do + notify(opts, {:global_permissions, id}) + + case Enum.find(perms, &(&1.id == id)) do + nil -> {:error, "global perms with id #{id} not found"} + perms -> {:ok, perms} + end + end + + @impl SchemaLoader + def user_permissions({:agent, pid}, user_id) do + Agent.get_and_update(pid, fn state -> + case user_permissions(state, user_id) do + {:ok, state, perms} -> + {{:ok, {:agent, pid}, perms}, state} + + error -> + {error, state} + end + end) + end + + def user_permissions(%{user_perms: user_perms, opts: opts} = state, user_id) do + notify(opts, {:user_permissions, user_id}) + + case(Enum.find(user_perms, &(&1.user_id == user_id))) do + nil -> + id = next_user_perms_id(state) + + {:ok, global} = global_permissions(state) + perms = %SatPerms{id: id, user_id: user_id, rules: global} + {:ok, %{state | user_perms: [perms | user_perms]}, perms} + + perms -> + {:ok, state, perms} + end + end + + @impl SchemaLoader + def user_permissions({:agent, pid}, user_id, perms_id) do + Agent.get(pid, &user_permissions(&1, user_id, perms_id)) + end + + def user_permissions(%{user_perms: user_perms, opts: opts}, user_id, perms_id) do + notify(opts, {:user_permissions, user_id, perms_id}) + + case(Enum.find(user_perms, &(&1.user_id == user_id && &1.id == perms_id))) do + nil -> + {:error, "perms id #{perms_id} not found for user #{user_id}"} + + perms -> + {:ok, perms} + end + end + + @impl SchemaLoader + def save_global_permissions({:agent, pid}, rules) do + Agent.get_and_update(pid, fn state -> + case save_global_permissions(state, rules) do + {:ok, state} -> + {{:ok, {:agent, pid}}, state} + + error -> + {error, state} + end + end) + end + + def save_global_permissions( + %{global_perms: global_perms, opts: opts} = state, + %SatPerms.Rules{} = rules + ) do + notify(opts, {:save_global_permissions, rules}) + + # duplicate all the current user perms with the updated rules, as per the pg version + {user_perms, _id} = + state.user_perms + |> Enum.filter(&(&1.rules.id == rules.parent_id)) + |> Enum.uniq_by(& &1.user_id) + |> Enum.map_reduce(next_user_perms_id(state), fn user_perms, id -> + {%{user_perms | id: id, rules: rules}, id + 1} + end) + + {:ok, + %{state | user_perms: user_perms ++ state.user_perms, global_perms: [rules | global_perms]}} + end + + @impl SchemaLoader + def save_user_permissions({:agent, pid}, user_id, roles) do + Agent.get_and_update(pid, fn state -> + case save_user_permissions(state, user_id, roles) do + {:ok, state, perms} -> + {{:ok, {:agent, pid}, perms}, state} + + error -> + {error, state} + end + end) + end + + def save_user_permissions( + %{user_perms: user_perms, opts: opts} = state, + user_id, + %SatPerms.Roles{} = perms + ) do + notify(opts, {:save_user_permissions, user_id, perms}) + %{rules_id: rules_id, parent_id: parent_id, roles: roles} = perms + + global = + cond do + rules_id == 1 -> initial_global_perms() + global = Enum.find(state.global_perms, &(&1.id == rules_id)) -> global + true -> nil + end + + if global do + if parent_id && !Enum.find(user_perms, &(&1.id == parent_id)) do + {:error, "invalid parent permissions id #{parent_id}"} + else + id = next_user_perms_id(state) + + perms = %SatPerms{id: id, user_id: user_id, rules: global, roles: roles} + {:ok, %{state | user_perms: [perms | user_perms]}, perms} + end + else + {:error, "invalid global permissions id #{rules_id}"} + end + end + + defp next_user_perms_id(%{user_perms: []}), do: 1 + defp next_user_perms_id(%{user_perms: [%{id: id} | _]}), do: id + 1 + + defp initial_global_perms do + %SatPerms.Rules{id: 1} + end end diff --git a/components/electric/test/support/permissions_helpers.ex b/components/electric/test/support/permissions_helpers.ex index 839b80a21a..36444770b6 100644 --- a/components/electric/test/support/permissions_helpers.ex +++ b/components/electric/test/support/permissions_helpers.ex @@ -1,9 +1,143 @@ defmodule ElectricTest.PermissionsHelpers do + defmodule Schema do + alias Electric.Postgres.MockSchemaLoader + alias Electric.Postgres.Extension.SchemaLoader + + def migrations do + [ + {"01", + [ + "create table regions (id uuid primary key, name text)", + "create table offices (id uuid primary key, region_id uuid not null references regions (id))", + "create table workspaces (id uuid primary key)", + "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", + "create table issues (id uuid primary key, project_id uuid not null references projects (id), description text)", + "create table comments (id uuid primary key, issue_id uuid not null references issues (id), comment text, owner text, author_id uuid references users (id))", + "create table reactions (id uuid primary key, comment_id uuid not null references comments (id), is_public bool)", + "create table users (id uuid primary key, role text not null default 'normie')", + "create table teams (id uuid primary key)", + "create table tags (id uuid primary key, tag text not null)", + "create table addresses (id uuid primary key, user_id uuid not null references users (id), address text)", + """ + create table issue_tags ( + id uuid primary key, + issue_id uuid not null references issues (id), + tag_id uuid not null references tags (id) + ) + """, + """ + create table project_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + project_id uuid not null references projects (id), + role text not null, + valid bool + ) + """, + """ + create table team_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + team_id uuid not null references teams (id), + team_role text not null + ) + """, + """ + create table site_admins ( + id uuid primary key, + user_id uuid not null references users (id), + role text not null + ) + """, + """ + create table admin_users ( + id uuid primary key, + user_id uuid not null references users (id) + ) + """, + """ + create table compound_root ( + id1 uuid, + id2 uuid, + primary key (id1, id2) + ) + """, + """ + create table compound_level1 ( + id1 uuid, + id2 uuid, + root_id1 uuid not null, + root_id2 uuid not null, + value1 text, + value2 text, + primary key (id1, id2), + foreign key (root_id1, root_id2) references compound_root (id1, id2) + ) + """, + """ + create table compound_level2 ( + id1 uuid, + id2 uuid, + level1_id1 uuid not null, + level1_id2 uuid not null, + value1 text, + value2 text, + primary key (id1, id2), + foreign key (level1_id1, level1_id2) references compound_level1 (id1, id2) + ) + """, + """ + create table compound_memberships ( + id uuid primary key, + root_id1 uuid not null, + root_id2 uuid not null, + user_id uuid not null references users (id), + role text not null, + foreign key (root_id1, root_id2) references compound_root (id1, id2) + ) + """, + """ + create table lotsoftypes ( + id uuid primary key, + user_id uuid not null, + parent_id uuid not null, + name text, + value text, + amount integer, + valid bool, + percent float, + ilist integer[], + slist text[], + inserted_at timestamp with time zone + ) + """ + ]} + ] + end + + def loader(migrations \\ migrations()) do + loader_spec = + MockSchemaLoader.backend_spec(migrations: migrations) + + {:ok, _loader} = SchemaLoader.connect(loader_spec, []) + end + + def load(migrations \\ migrations()) do + {:ok, loader} = loader(migrations) + + {:ok, _schema_version} = SchemaLoader.load(loader) + end + end + defmodule Auth do def user_id do "92bafe18-a818-4a3f-874f-590324140478" end + def not_user_id do + "e0a09d39-d620-4a28-aa18-8d3eacc5da4e" + end + def user(id \\ user_id()) do %Electric.Satellite.Auth{user_id: id} end @@ -49,9 +183,10 @@ defmodule ElectricTest.PermissionsHelpers do Permissions.new(auth, Transient.name()) end - def update(perms, ddlx, roles) do + def update(perms, schema_version, ddlx, roles) do Permissions.update( perms, + schema_version, to_rules(ddlx), roles ) @@ -74,21 +209,29 @@ defmodule ElectricTest.PermissionsHelpers do ddl -> "ELECTRIC " <> ddl end) |> Enum.map(&Electric.DDLX.parse!/1) - |> Enum.flat_map(&Electric.DDLX.Command.to_protobuf/1) - |> Enum.map_reduce(%{assign: 1, grant: 1}, fn - # give each ddlx statement an id - %P.Assign{} = assign, %{assign: id} = s -> - {%{assign | id: "assign-#{id}"}, %{s | assign: id + 1}} - - %P.Grant{} = grant, %{grant: id} = s -> - {%{grant | id: "grant-#{id}"}, %{s | grant: id + 1}} - end) + |> Enum.reduce( + {%P.Rules{}, {1, 1}}, + fn %{action: %{assigns: assigns, grants: grants}}, {rules, {assign_id, grant_id}} -> + # give all the rules deterministic ids based on order + # which makes it easier to assign roles to rules in tests + {assigns, assign_id} = + Enum.map_reduce(assigns, assign_id, fn assign, id -> + {%{assign | id: "assign-#{id}"}, id + 1} + end) + + {grants, grant_id} = + Enum.map_reduce(grants, grant_id, fn grant, id -> + {%{grant | id: "grant-#{id}"}, id + 1} + end) + + {%{ + rules + | assigns: rules.assigns ++ assigns, + grants: rules.grants ++ grants + }, {assign_id, grant_id}} + end + ) |> then(&elem(&1, 0)) - |> Enum.group_by(fn - %P.Assign{} -> :assigns - %P.Grant{} -> :grants - end) - |> then(&struct(%P.Rules{}, &1)) end end @@ -103,7 +246,9 @@ defmodule ElectricTest.PermissionsHelpers do end defmodule Chgs do + alias Electric.DDLX.Command alias Electric.Replication.Changes + alias Electric.Postgres.Extension def tx(changes, attrs \\ []) do %Changes.Transaction{changes: changes} @@ -129,6 +274,23 @@ defmodule ElectricTest.PermissionsHelpers do |> put_change_attrs(attrs) end + def ddlx(attrs) when is_list(attrs) do + attrs + |> Command.ddlx() + |> ddlx() + end + + def ddlx(ddlx) do + bytes = Protox.encode!(ddlx) |> IO.iodata_to_binary() + + %Changes.NewRecord{ + relation: Extension.ddlx_relation(), + record: %{ + "ddlx" => bytes + } + } + end + defp put_tx_attrs(tx, attrs) do Map.put(tx, :lsn, LSN.new(attrs[:lsn])) end @@ -143,17 +305,20 @@ defmodule ElectricTest.PermissionsHelpers do defmodule Roles do alias Electric.Satellite.SatPerms, as: P - def role(role_name) do - %P.Role{role: role_name} + def role(role_name, assign_id) do + %P.Role{role: role_name, assign_id: assign_id} end - def role(role_name, table, id, attrs \\ []) do - %P.Role{ - assign_id: attrs[:assign_id], - role: role_name, - user_id: Keyword.get(attrs, :user_id, Auth.user_id()), - scope: %P.Scope{table: relation(table), id: List.wrap(id)} - } + def role(role_name, table, id, assign_id, attrs \\ []) do + struct( + %P.Role{ + assign_id: assign_id, + role: role_name, + user_id: Keyword.get(attrs, :user_id, Auth.user_id()), + scope: %P.Scope{table: relation(table), id: List.wrap(id)} + }, + attrs + ) end defp relation({schema, name}) do @@ -168,6 +333,7 @@ defmodule ElectricTest.PermissionsHelpers do @behaviour Electric.Satellite.Permissions.Graph + alias Electric.Postgres.Extension.SchemaLoader alias Electric.Replication.Changes alias Electric.Satellite.Permissions alias Electric.Postgres.Schema.FkGraph @@ -176,31 +342,31 @@ defmodule ElectricTest.PermissionsHelpers do @root :__root__ - def new(vs, fk_edges) do - {__MODULE__, {data_tree(vs), fk_graph(fk_edges)}} + def new(vs, schema) do + {__MODULE__, {data_tree(vs), fk_graph(schema), schema}} end - defp fk_graph(fk_edges) do - FkGraph.new(fk_edges) + defp fk_graph(%SchemaLoader.Version{schema: schema}) do + FkGraph.for_schema(schema) end defp graph(attrs \\ []) do Permissions.Graph.graph(attrs) end - def add_vertex({__MODULE__, {graph, fks}}, v) do + def add_vertex({__MODULE__, {graph, fks, schema}}, v) do graph = Graph.add_vertex(graph, v) - {__MODULE__, {graph, fks}} + {__MODULE__, {graph, fks, schema}} end - def delete_vertex({__MODULE__, {graph, fks}}, v) do + def delete_vertex({__MODULE__, {graph, fks, schema}}, v) do graph = Graph.delete_vertex(graph, v) - {__MODULE__, {graph, fks}} + {__MODULE__, {graph, fks, schema}} end - def add_edge({__MODULE__, {graph, fks}}, a, b) do + def add_edge({__MODULE__, {graph, fks, schema}}, a, b) do graph = Graph.add_edge(graph, a, b) - {__MODULE__, {graph, fks}} + {__MODULE__, {graph, fks, schema}} end defp data_tree(vs) do @@ -209,11 +375,15 @@ defmodule ElectricTest.PermissionsHelpers do graph end + defp build_data_tree({table, id, children}, {parent, graph}) when is_list(children) do + build_data_tree({table, id, %{}, children}, {parent, graph}) + end + defp build_data_tree({table, id}, {parent, graph}) do - build_data_tree({table, id, []}, {parent, graph}) + build_data_tree({table, id, %{}, []}, {parent, graph}) end - defp build_data_tree({_table, _id, children} = v, {parent, graph}) do + defp build_data_tree({_table, _id, _attrs, children} = v, {parent, graph}) do graph = Graph.add_edge(graph, v(v), v(parent)) {_v, graph} = Enum.reduce(children, {v, graph}, &build_data_tree/2) @@ -222,15 +392,15 @@ defmodule ElectricTest.PermissionsHelpers do defp v(@root), do: @root - defp v({table, id, _children}) do - {table, [id]} + defp v({table, id, _attrs, _children}) do + {table, List.wrap(id)} end def scope_id(_state, {_, _} = root, {_, _} = root, id) when is_list(id) do [{id, [{root, id}]}] end - def scope_id({graph, fks}, {_, _} = root, {_, _} = relation, id) when is_list(id) do + def scope_id({graph, fks, _schema}, {_, _} = root, {_, _} = relation, id) when is_list(id) do graph |> Permissions.Graph.traverse_fks(fk_path(fks, root, relation), relation, id) |> Enum.flat_map(fn @@ -240,7 +410,8 @@ defmodule ElectricTest.PermissionsHelpers do end @impl Electric.Satellite.Permissions.Graph - def scope_path({graph, fks}, {_, _} = root, {_, _} = relation, id) when is_list(id) do + def scope_path({graph, fks, _schema}, {_, _} = root, {_, _} = relation, id) + when is_list(id) do graph |> Permissions.Graph.traverse_fks(fk_path(fks, root, relation), relation, id) |> Enum.flat_map(fn @@ -250,7 +421,11 @@ defmodule ElectricTest.PermissionsHelpers do end @impl Electric.Satellite.Permissions.Graph - def modified_fks({_graph, fks} = state, {_, _} = root, %Changes.UpdatedRecord{} = update) do + def modified_fks( + {_graph, fks, _schema} = state, + {_, _} = root, + %Changes.UpdatedRecord{} = update + ) do %Changes.UpdatedRecord{ changed_columns: changed_columns, old_record: old, @@ -259,7 +434,7 @@ defmodule ElectricTest.PermissionsHelpers do } = update case FkGraph.foreign_keys(fks, root, relation) do - [] -> + nil -> [] foreign_keys -> @@ -291,7 +466,7 @@ defmodule ElectricTest.PermissionsHelpers do end @impl Electric.Satellite.Permissions.Graph - def parent({_graph, fks}, {_, _} = root, relation, record) when is_map(record) do + def parent({_graph, fks, _schema}, {_, _} = root, relation, record) when is_map(record) do with [^relation, parent_rel | _] <- FkGraph.path(fks, root, relation), [_ | _] = relations <- FkGraph.foreign_keys(fks, root, relation), {^parent_rel, fk_cols} <- Enum.find(relations, &match?({^parent_rel, _}, &1)) do @@ -302,34 +477,42 @@ defmodule ElectricTest.PermissionsHelpers do end @impl Electric.Satellite.Permissions.Graph - def apply_change({graph, fks} = state, roots, change) do + def apply_change({graph, fks, schema} = state, roots, change) do updated = Enum.reduce(roots, graph, fn root, graph -> case change do - %Changes.DeletedRecord{relation: relation, old_record: %{"id" => id}} -> - Graph.delete_vertex(graph, {relation, [id]}) + %Changes.DeletedRecord{relation: relation, old_record: old} -> + {:ok, pk_cols} = SchemaLoader.Version.primary_keys(schema, relation) + pks = Enum.map(pk_cols, &Map.fetch!(old, &1)) + + Graph.delete_vertex(graph, {relation, pks}) + + %Changes.NewRecord{relation: relation, record: record} -> + {:ok, pk_cols} = SchemaLoader.Version.primary_keys(schema, relation) + pks = Enum.map(pk_cols, &Map.fetch!(record, &1)) - %Changes.NewRecord{relation: relation, record: %{"id" => id} = record} -> case parent(state, root, relation, record) do nil -> - Graph.add_vertex(graph, {relation, [id]}) + Graph.add_vertex(graph, {relation, pks}) parent -> validate_fk!(graph, parent) - Graph.add_edge(graph, {relation, [id]}, parent) + Graph.add_edge(graph, {relation, pks}, parent) end # we copy the satellite and treat all updates as upserts %Changes.UpdatedRecord{} = change -> - %{relation: relation, old_record: old, record: %{"id" => id} = new} = change + %{relation: relation, old_record: old, record: new} = change case modified_fks(state, root, change) do [] -> graph modified_keys -> - child = {relation, [id]} + {:ok, pk_cols} = SchemaLoader.Version.primary_keys(schema, relation) + pks = Enum.map(pk_cols, &Map.fetch!(old, &1)) + child = {relation, pks} Enum.reduce(modified_keys, graph, fn {^relation, _old_id, _new_id}, graph -> @@ -355,7 +538,7 @@ defmodule ElectricTest.PermissionsHelpers do end end) - {updated, fks} + {updated, fks, schema} end defp validate_fk!(graph, parent) do @@ -365,11 +548,6 @@ defmodule ElectricTest.PermissionsHelpers do end end - @impl Electric.Satellite.Permissions.Graph - def relation_path({_graph, fks}, root, relation) do - fk_path(fks, root, relation) - end - defp fk_path(_fks, root, root) do [root] end @@ -383,9 +561,104 @@ defmodule ElectricTest.PermissionsHelpers do Electric.Utils.inspect_relation(relation) end - def perms_build(grants, roles, attrs \\ []) do + def perms_build(cxt, grants, roles, attrs \\ []) do + %{schema_version: schema_version} = cxt + attrs |> Perms.new() - |> Perms.update(grants, roles) + |> Perms.update(schema_version, grants, roles) + end + + defmodule Proto do + alias Electric.DDLX.Command + alias Electric.Satellite.SatPerms + + def table(schema \\ "public", name) do + %SatPerms.Table{schema: schema, name: name} + end + + def scope(schema \\ "public", name) do + table(schema, name) + end + + def role(name) do + %SatPerms.RoleName{role: {:application, name}} + end + + def authenticated() do + %SatPerms.RoleName{role: {:predefined, :AUTHENTICATED}} + end + + def anyone() do + %SatPerms.RoleName{role: {:predefined, :ANYONE}} + end + + def assign(attrs) do + SatPerms.Assign |> struct(attrs) |> Command.put_id() + end + + def unassign(attrs) do + SatPerms.Unassign |> struct(attrs) |> Command.put_id() + end + + def grant(attrs) do + SatPerms.Grant |> struct(attrs) |> Command.put_id() + end + + def revoke(attrs) do + SatPerms.Revoke |> struct(attrs) |> Command.put_id() + end + + def sqlite(stmt) do + %SatPerms.Sqlite{stmt: stmt} |> Command.put_id() + end + + def encode(struct) do + Protox.encode!(struct) |> IO.iodata_to_binary() + end + end + + defmodule Server do + use Electric.Postgres.MockSchemaLoader + + alias ElectricTest.PermissionsHelpers.{ + Tree + } + + alias Electric.Satellite.Permissions + + def setup(cxt) do + %{migrations: migrations, data: data} = cxt + + loader_spec = MockSchemaLoader.backend_spec(migrations: migrations) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + {:ok, schema_version} = SchemaLoader.load(loader) + + {:ok, tree: Tree.new(data, schema_version), loader: loader, schema_version: schema_version} + end + + def reset(cxt) do + cxt + end + + def name, do: "Server" + + def perms(cxt, grants, roles, attrs \\ []) do + ElectricTest.PermissionsHelpers.perms_build(cxt, grants, roles, attrs) + end + + def table(relation) do + Electric.Utils.inspect_relation(relation) + end + + def apply_change({Tree, tree}, roots, tx) do + tree = Tree.apply_change(tree, roots, tx) + {Tree, tree} + end + + def validate_write(perms, tree, tx) do + Permissions.validate_write(perms, tree, tx) + end end end diff --git a/docs/api/ddlx.md b/docs/api/ddlx.md index 3bdc566339..d1341692db 100644 --- a/docs/api/ddlx.md +++ b/docs/api/ddlx.md @@ -10,7 +10,8 @@ ElectricSQL extends the PostgreSQL language with the following DDLX statements. Use these in your [migrations](../usage/data-modelling/migrations.md) to [electrify tables](../usage/data-modelling/electrification.md) and expose data by granting [permissions](../usage/data-modelling/permissions.md) to roles and assigning roles to [authenticated](../usage/auth/index.md) users. :::caution Work in progress -The syntax and features described in this page are not fully implemented. Currently, DDLX rules are limited to [enabling electrification](#enable). See [Roadmap -> DDLX rules](../reference/roadmap.md#ddlx-rules) for more context. +The syntax and features described in this page are not fully implemented. There are some notes below on which parts are not implemented yet. +See [Roadmap -> DDLX rules](../reference/roadmap.md#ddlx-rules) for more context. ::: ## Electrification @@ -78,16 +79,25 @@ See for more information Grants permissions to roles. +:::caution Work in progress +The column_name statement is not implemented yet, but is coming soon. +We have included it here as it's helpful in explaining how the permissions will work overall. +::: + ```sql -ELECTRIC GRANT { { SELECT | INSERT | UPDATE | DELETE } - ( column_name [, ...] ) - [, ...] | ALL [ PRIVILEGES ] ( column_name [, ...] ) - | READ ( column_name [, ...] ) - | WRITE ( column_name [, ...] )} - ON [ TABLE ] table_name [, ...] - TO role_name [, ...] - [ USING scope_path ] - [ CHECK ( check_expression )]; +ELECTRIC GRANT + { SELECT | INSERT | UPDATE | DELETE | READ | WRITE | ALL [ PRIVILEGES ] } [ ( column_name [, ...] ) ] + ON [ TABLE ] table_name + TO role + [ WHERE ( check_expression )]; +``` + +Where `role` is: + +```sql +{ ( scope_table_name, 'role_name' ) + | { 'role_name' | AUTHENTICATED | ANYONE } +} ``` Grants ones of these four permissions on the table `table_name` to the role `role_name`: @@ -109,26 +119,31 @@ You can grant a permission on a whole table or only on specific columns by givin This is very similar to the standard PostgreSQL `GRANT` for tables, but extends the syntax to give more fine grained control over who can do what. These are the main differences: 1. you can, optionally, define a scope in which the grant applies -2. you can add a `CHECK` constraint to make permissions dependent on the content of rows +2. you can add a `WHERE` constraint to make permissions dependent on the content of rows 3. the roles referred to are ElectricSQL specific role names rather than usual Postgres roles 4. these roles are assigned to users with the [`ASSIGN`](#assign) statement below 5. only four permissions (directly, or via their aliases) can be granted with this statement The optional scope table `scope_table_name` may be the same table as `table_name` or another table. Using a scope lets you limit where this grant applies e.g. you can grant permissions on the content of a project to only admins of that specific project. -As well as role names you create, there are a couple of built in roles automatically provided by ElectricSQL: +As well as role names you create, there are a couple of built-in roles automatically provided by ElectricSQL: - `AUTHENTICATED` - `ANYONE` Users have a set of roles. Every user will have the `ANYONE` role and authenticated users will have the `AUTHENTICATED` role. -If you add a `CHECK` clause, then when the permission is used the `check_expression` will be evaluated against any existing row and any data being written. If it evaluates as false then the operation will fail. +If you add a `WHERE` clause, then when the permission is used the `check_expression` will be evaluated against any existing row and any data being written. If it evaluates as false then the operation will fail. -The `check_expression` also has the value `auth.user_id` in scope which will hold the user id of the current authenticated user. This can be used to validate that a column contains the current user id, which is very useful for having a reliable reference to the user who inserted a row. +The `check_expression` is a sql boolean expression. This function will have various pre-defined variables available to validate the action. + +- AUTH the auth state for the connection is available for every operation. Has the user_id which may be null defined by the authentication token and also the claims field which provides all the claims from the JWT. +- NEW available for INSERT and UPDATE operations, NULL in DELETE operations +- OLD available for DELETE and UPDATE operations, NULL in INSERT operations +- ROW available for SELECT operations. :::note -If you have added additional claims to the authentication JWT then they can also be referenced in the `check_expression`. By convention, they should be added as a `data` claim, which is available at `auth.data`. +If you have added additional claims to the authentication JWT then they can also be referenced in the `check_expression`. They are available at `auth.claims`. This can be used to extend authorisation and provide additional discrimination between users that is not modelled in the data and hence not available to the `ASSIGN` mechanism. However, if possible, it is generally best to use `ASSIGN` rather than add complex extra claims to the JWT. This is because the state of the permissions via `ASSIGN` will be consistent with the contents of the database (i.e.: part of the same snapshot) whereas those in the JWT are tied to the lifecycle of the client's connection and are thus less responsive and not guaranteed to be consistent with the contents of the database. ::: @@ -136,11 +151,9 @@ This can be used to extend authorisation and provide additional discrimination b #### Parameters - **`column_name`** - you can provide one or more column names that this permission will apply to. -If you don't give any column names then the grant applies to the whole table +If you don't give any column names then the grant applies to the whole table. - **`table_name`** - the name of an existing electrified table table on which to grant the permission. -- **`role_name`** - the name of a role that has been assigned to users with an `ELECTRIC ASSIGN` statement. Role names are strings enclosed in single quotes. For scoped roles this must be in the form `'scope_table:role'` -- **`scope_path`** - if there is any ambiguity about from where to read the id for the scope that a row belongs to then this can be used to specify it. If there is a single foreign key constraint in `table_name` pointing to the `scope_table_name` table then this can be inferred and is not necessary. If there is more than one foreign key pointing to the `scope_table_name` table then `scope_path` should be the name of the column in `table_name` to use. If there is no foreign key in the table `table_name` pointing directly to the `scope_table_name` table you can specify a path -indicating where to find the foreign key by walking through intermediate foreign keys, there is an example below +- **`role_name`** - the name of a role that has been assigned to users with an `ELECTRIC ASSIGN` statement. - **`check_expression`** - a sql expression that will be evaluated when the permission is used #### Examples @@ -173,7 +186,7 @@ This grant with a scope gives all permissions on the `projects` table to users w ```sql ELECTRIC GRANT ALL ON projects - TO 'projects:admin'; + TO (projects, 'admin'); ``` This grant lets users who have the role `member` in a project read the project's issues. Here `project_id` refers to a column on the `issues` table that is a foreign key pointing to a project. @@ -181,8 +194,7 @@ This grant lets users who have the role `member` in a project read the project's ```sql ELECTRIC GRANT READ ON issues - TO 'projects:member' - USING project_id; + TO (projects, 'member'); ``` This is similar to the grant above. It lets a project member read comments on issues in a project, but the comments table doesn't itself have a foreign key pointing to the project so the `USING` parameter provides a path to where to find it. @@ -190,8 +202,7 @@ This is similar to the grant above. It lets a project member read comments on is ```sql ELECTRIC GRANT READ ON comments - TO 'projects:member' - USING issue_id/project_id; + TO (projects, member'); ``` Here an `admin` can add project members with any role to the `project_members` join table, but a member can only add people as `member` or `guest`. The `CHECK` statement limits what they can do. @@ -199,29 +210,29 @@ Here an `admin` can add project members with any role to the `project_members` j ```sql ELECTRIC GRANT INSERT ON project_members - TO 'projects:admin'; + TO (projects, 'admin'); ELECTRIC GRANT READ ON project_members - TO 'projects:member'; + TO (projects, 'member'); ELECTRIC GRANT INSERT ON project_members - TO 'projects:member' - CHECK ( - new.role_name = 'member' - OR new.role_name = 'guest' + TO (projects, 'member') + WHERE ( + NEW.role_name = 'member' + OR NEW.role_name = 'guest' ); ``` -Here any authenticated user can create a new project if they correctly set the owner_id of the new project to their user_id: +Here any authenticated user can create a new project if they correctly set the `owner_id` of the new project to their `user_id`: ```sql ELECTRIC GRANT INSERT ON projects - TO 'AUTHENTICATED' - CHECK ( - new.owner_id = auth.user_id + TO AUTHENTICATED + WHERE ( + NEW.owner_id = AUTH.user_id ); ``` @@ -233,22 +244,45 @@ ELECTRIC GRANT READ ( description ) ON issues - TO 'projects:member' - USING project_id; + TO (projects, 'member'); ``` +#### Updating grants + +The current grant state for a given permission, table and role is determined by the last `GRANT` command issued. Grants are not merged together. + +For example, after this sequence of grant commands: + +```sql +ELECTRIC GRANT READ ON issues TO (projects, 'member'); +ELECTRIC GRANT READ ON comments TO (projects, 'member'); +ELECTRIC GRANT READ (title, description, date) ON issues TO (projects, 'member'); +ELECTRIC GRANT READ (title, status) ON issues TO (projects, 'member'); +``` + +The `(projects, 'member')` role will have `READ` permissions to the entire `comments` table but only the `title` and `status` columns of `issues` (within the `projects` scope). + +If you then ran `ELECTRIC GRANT READ ON issues TO (projects, 'member')` the access would again widen to all columns of `issues`. + +The `WHERE` expression uses the same logic as the column specification, the last write for a specific permission, table and role tuple wins. + ### `REVOKE` Revokes previously granted permissions. ```sql -ELECTRIC REVOKE { { SELECT | INSERT | UPDATE | DELETE } - ( column_name [, ...] ) - [, ...] | ALL [ PRIVILEGES ] ( column_name [, ...] ) - | READ ( column_name [, ...] ) - | WRITE ( column_name [, ...] )} - ON [ TABLE ] table_name [, ...] - FROM role_name [, ...]; +ELECTRIC REVOKE + { SELECT | INSERT | UPDATE | DELETE | READ | WRITE | ALL [ PRIVILEGES ] } + ON [ TABLE ] table_name + FROM role; +``` + +Where `role` is: + +```sql +{ ( scope_table_name, 'role_name' ) + | { 'role_name' | AUTHENTICATED | ANYONE } +} ``` You can specify one of these permissions: @@ -260,14 +294,6 @@ You can specify one of these permissions: Or you can use `ALL`, `READ` or `WRITE` as for [`GRANT`](#grant) above. -:::note -Like the native PostgreSQL version of `GRANT` there is a subtlety to revoking grants for columns that is worth noting. A whole row permission, granted by not specifying any column names is a separate permission from a collection of column specific permissions. - -This means that if you have previously granted a whole row permission you cannot then revoke a single column, it would be a no-op. You would have to instead revoke the permission for the whole table and add back the permissions for the rows you do want. - -Likewise, you can't revoke a column level permission by revoking access to the whole table. You need to issue revoke statement that matches the column spec of the previous grant. -::: - #### Parameters - **`column_name`** The names of the columns you want to remove this permission for. @@ -281,11 +307,11 @@ This shows the granting and revoking of permissions using matching `GRANT` and ` ```sql ELECTRIC GRANT ALL ON projects - TO 'projects:admin'; + TO (projects, 'admin'); ELECTRIC REVOKE ALL ON projects - FROM 'projects:admin'; + FROM (projects, 'admin'); ``` As `ALL` acts as an alias for all the other permissions this will result in global `'admin'`s having the `INSERT`, `SELECT` and `UPDATE` permissions on the table `records`. @@ -300,18 +326,14 @@ ELECTRIC REVOKE DELETE FROM 'admin'; ``` -Whole row grants are different from individual column grants so removing a column will not affect the whole table grant. Here users with the global `'admin'` role will still be able to update the name column. This is consistent with the standard behaviour for column specific grants in Postgres. +Revocation works at the permissions level for a given table and role. Revoking e.g. `READ` from a table for a given role will remove that role's read access to the table, no matter what the column specification for the read was. The code below will completely remove `UPDATE` rights for the `admin` role. ```sql -ELECTRIC GRANT UPDATE - ON records - TO 'admin'; +-- grant partial update rights to `admin`s +ELECTRIC GRANT UPDATE (name) ON records TO 'admin'; -ELECTRIC REVOKE UPDATE ( - name - ) - ON records - FROM 'admin'; +-- remove update rights from admin +ELECTRIC REVOKE UPDATE ON records FROM 'admin'; ``` ### `ASSIGN` @@ -321,7 +343,6 @@ Assigns a role to an [authenticated user](../usage/auth/index.md). ```sql ELECTRIC ASSIGN role_definition TO table_name.user_fk - [ USING scope_path ] [ IF if_statement ]; ``` @@ -343,7 +364,7 @@ dynamically read from a column in the `table_name`. These are string literals either with or without a scope table. - `'admin'` - global admin role -- `'projects:admin'` - project admin role, scoped to the `projects` table +- `(projects, 'admin')` - project admin role, scoped to the `projects` table ##### Dynamic role definitions @@ -352,13 +373,12 @@ These specify a database column to read the role value from, using tuple syntax - `users.role_name` - read a global (unscoped) role name from the `users.role_name` column - `(projects, memberships.role_name)` - read the role name from the `memberships.role_name` column and then concatenate with the `projects` scope -In the first example above the global role assigned will be read from the column `role_name` in the table `users`. In the second example the scoped role is read from the column `role_name` in the table `memberships` and then concatenated with the `projects` scope. So, for example, if the `memberships.role_name` column contained the string `'admin'` then the scoped role assigned would be equivalent to the literal `'projects:admin'`. +In the first example above the global role assigned will be read from the column `role_name` in the table `users`. In the second example the scoped role is read from the column `role_name` in the table `memberships` and then concatenated with the `projects` scope. So, for example, if the `memberships.role_name` column contained the string `'admin'` then the scoped role assigned would be equivalent to the literal `(projects, 'admin')`. :::note -You can always user the longer syntax for role definitions if you prefer or are writing them programmatically. +You can always use the longer syntax for role definitions if you prefer or are writing them programmatically. - `'admin'` can be written as `(NULL, 'admin')` -- `'projects:admin'` can be written as `(projects, 'admin')` - `users.role_name` can be written as `(NULL, users.role_name)` ::: @@ -367,7 +387,6 @@ You can always user the longer syntax for role definitions if you prefer or are - **`role_definition`** - the definition of a role as described above - **`table_name`** - the name of an electrified table that holds the users foreign keys to assign roles to - **`user_fk`** - the name of the column holding the foreign key of the users to be assigned the role -- **scope_path** - if there is any ambiguity about how to link from `table_name` to the `scope_table_name`, this can be used to specify the foreign key path. If there is a single foreign key constraint in `table_name` pointing to the scope_table_name table then this is not necessary, ElectricSQL will work out which column to use. If there is more than one foreign key pointing to the scope_table_name table then scope_path should be the name of the column in table_name to use. If there is no foreign key in the table table_name pointing directly to the scope_table_name table you can specify a path indicating where to find the foreign key by walking through intermediate foreign keys - **`if_statement`** - optionally add a statement that will be evaluated against the row in `table_name`. The assignment rule will only assign the role if it evaluates as true. This is useful to assign roles dependent on things like booleans or specific string values #### Examples @@ -415,10 +434,10 @@ Then the user with ID `21ba776e-cced-46de-9bb7-631dc9043287` would be granted ad In the next example, explicitly named roles are assigned to users using different fields on the same table. ```sql -ELECTRIC ASSIGN 'deliveries:driver' +ELECTRIC ASSIGN (deliveries, 'driver') TO deliveries.driver_id; -ELECTRIC ASSIGN 'deliveries:customer' +ELECTRIC ASSIGN (deliveries, 'customer') TO deliveries.customer_id; ``` @@ -464,12 +483,14 @@ Apart from the `IF` clause, an `UNASSIGN` statement must match its the correspon ```sql ELECTRIC ASSIGN project_members.role TO project_members.user_id; + ELECTRIC UNASSIGN project_members.role FROM project_members.user_id; ELECTRIC ASSIGN 'record.reader' TO user_permissions.user_id IF ( can_read_records ); + ELECTRIC UNASSIGN 'record.reader' FROM user_permissions.user_id; ``` @@ -496,11 +517,13 @@ This allows you to propagate migrations to local devices and work around any mis #### Parameters -- **`sqlite_statement`** - a string holding a valid SQLite statements; seperate multiple statements with `;` delimiters +- **`sqlite_statement`** - a string holding a valid SQLite statements; separate multiple statements with `;` delimiters #### Examples ```sql -ELECTRIC SQLITE - 'CREATE TABLE local_only (id TEXT primary key);' +-- use PG's dollar quoted strings to avoid having to escape single quotes in the SQLite statements +ELECTRIC SQLITE $sqlite$ + CREATE TABLE local_only (id TEXT PRIMARY KEY); + $sqlite$; ``` diff --git a/docs/intro/sync-controls.md b/docs/intro/sync-controls.md index 0cb2cb035d..e5ef0bf283 100644 --- a/docs/intro/sync-controls.md +++ b/docs/intro/sync-controls.md @@ -21,7 +21,7 @@ ALTER TABLE projects Then *users* are assigned *roles* based on their authentication state (usually by matching the `user_id` in their [authentication token](../usage/auth/index.md) with foreign keys to your users table): ```sql -ELECTRIC ASSIGN 'projects:owner' +ELECTRIC ASSIGN (projects, 'owner') TO projects.owner_id; ``` @@ -30,7 +30,7 @@ ELECTRIC ASSIGN 'projects:owner' ```sql ELECTRIC GRANT ALL ON projects - TO 'projects:owner'; + TO (projects, 'owner'); ``` Data only replicates onto a user's device if that user has permission to read it and only replicates off from their device if they have permission to write it. diff --git a/docs/usage/data-modelling/migrations.md b/docs/usage/data-modelling/migrations.md index 65e1ddaf03..88632314b5 100644 --- a/docs/usage/data-modelling/migrations.md +++ b/docs/usage/data-modelling/migrations.md @@ -34,12 +34,12 @@ For example, assuming you have an existing table called `projects`, you can elec ALTER TABLE projects ENABLE ELECTRIC; -ELECTRIC ASSIGN 'projects:owner' +ELECTRIC ASSIGN (projects, 'owner') TO projects.owner_id; ELECTRIC GRANT ALL ON projects - TO 'projects:owner'; + TO (projects, 'owner'); ELECTRIC GRANT SELECT ON projects diff --git a/docs/usage/data-modelling/permissions.md b/docs/usage/data-modelling/permissions.md index fd53b50dc6..f72fd8e694 100644 --- a/docs/usage/data-modelling/permissions.md +++ b/docs/usage/data-modelling/permissions.md @@ -22,11 +22,11 @@ In the example below, we grant `ALL` permissions on projects to the project owne ```sql ELECTRIC GRANT ALL ON projects - TO 'projects:owner'; + TO (projects, 'owner'); ELECTRIC GRANT SELECT ON projects - TO 'projects:member'; + TO (projects, 'member'); ``` See for more details on how to grant and revoke permissions. @@ -38,10 +38,10 @@ Assign **roles** to **users** using the [`ASSIGN`](../../api/ddlx.md#assign) sta In the example below, we assign the role of project owner to the user whose [authenticated `user_id`](../auth/index.md) matches the project's `owner_id` column. And we use a join table of `project_memberships` to assign the role of project member. ```sql -ELECTRIC ASSIGN 'projects:owner' +ELECTRIC ASSIGN (projects, 'owner') TO projects.owner_id; -ELECTRIC ASSIGN 'projects:member' +ELECTRIC ASSIGN (projects, 'member') TO project_memberships.user_id; ``` @@ -83,20 +83,15 @@ That's what **permission scopes** are for. They simplify your access rules in a ```sql ELECTRIC GRANT ALL ON issues - TO 'projects:owner'; + TO (projects, 'owner'); ELECTRIC GRANT ALL ON comments - TO 'projects:owner' - USING issue_id/project_id; + TO (projects, 'owner'); -ELECTRIC GRANT READ - ON users - TO 'projects:owner' - USING comment_author_fkey/issue_id/project_id; ``` -Here the first statement assumes an unambiguous foreign key path between the `issues` and `projects` tables. The second statement demonstrates explicitly specifying the foreign key traversal path. The third demonstrates specifying part of the scope path using a named fkey (the `comment_author_fkey`, which belongs to the `comments` table not the `users` table). +In both cases, there must exist an unambiguous foreign key path between both the `issues` and `comments` tables and `projects`. Electric will use this foreign key path to place updates to rows in both the `issues` and `comments` table into the scope of a particular project and then determine the current user's role within that project in order to determine the validity of a read or write. ## Directionality diff --git a/e2e/tests/01.05_electric_can_recreate_publication.lux b/e2e/tests/01.05_electric_can_recreate_publication.lux index 11a37df793..e42fe3290b 100644 --- a/e2e/tests/01.05_electric_can_recreate_publication.lux +++ b/e2e/tests/01.05_electric_can_recreate_publication.lux @@ -21,19 +21,17 @@ !SELECT schemaname, tablename FROM pg_publication_tables \ WHERE pubname = 'electric_publication' ORDER BY tablename; ??electric | acknowledged_client_lsns - ??electric | assignments ??public | baz ??electric | ddl_commands + ??electric | ddlx_commands ??electric | electrified ??public | foo - ??electric | grants ??public | items - ??electric | roles ??electric | shadow__public__baz ??electric | shadow__public__foo ??electric | shadow__public__items ??electric | transaction_marker - ??(13 rows) + ??(11 rows) # Make sure Electric consumes all migrations from the replication stream before stopping it. [shell electric] @@ -63,19 +61,17 @@ !SELECT schemaname, tablename FROM pg_publication_tables \ WHERE pubname = 'electric_publication' ORDER BY tablename; ??electric | acknowledged_client_lsns - ??electric | assignments ??public | baz ??electric | ddl_commands + ??electric | ddlx_commands ??electric | electrified ??public | foo - ??electric | grants ??public | items - ??electric | roles ??electric | shadow__public__baz ??electric | shadow__public__foo ??electric | shadow__public__items ??electric | transaction_marker - ??(13 rows) + ??(11 rows) [cleanup] [invoke teardown] diff --git a/e2e/tests/06.02_permissions_change_propagation.lux b/e2e/tests/06.02_permissions_change_propagation.lux new file mode 100644 index 0000000000..172078921f --- /dev/null +++ b/e2e/tests/06.02_permissions_change_propagation.lux @@ -0,0 +1,96 @@ +[doc Permissions changes are propagated to client connection] +[include _shared.luxinc] + +[invoke setup] + +[global migration_version_1=20231109154018] +[global migration_version_2=20240226114300] +[global user_id1=95f21e62-4b90-49c3-874a-174eb17e58cf] +[global user_id2=31377df9-c659-493e-b26f-1ce5fbb0b2df] +[global session_id=004d3e42-d072-4a60-9513-93ddd843d478] +[global project_id=99adf0a5-b3c6-45d7-9986-582e76db4556] + + +[shell proxy_1] + [invoke log "run migration $migration_version_1 on postgres"] + """! + BEGIN; + CALL electric.migration_version('$migration_version_1'); + CREATE TABLE "projects" ( + id uuid NOT NULL PRIMARY KEY + ); + CREATE TABLE "issues" ( + id uuid NOT NULL PRIMARY KEY, + project_id uuid NOT NULL REFERENCES projects (id) + ); + CREATE TABLE "users" ( + id uuid NOT NULL PRIMARY KEY + ); + CREATE TABLE "project_memberships" ( + id uuid NOT NULL PRIMARY KEY, + project_id uuid NOT NULL REFERENCES projects (id), + user_id uuid NOT NULL REFERENCES users (id), + role text NOT NULL + ); + + ALTER TABLE "projects" ENABLE ELECTRIC; + ALTER TABLE "issues" ENABLE ELECTRIC; + ALTER TABLE "users" ENABLE ELECTRIC; + ALTER TABLE "project_memberships" ENABLE ELECTRIC; + + INSERT INTO users (id) VALUES ('$user_id1'); + INSERT INTO users (id) VALUES ('$user_id2'); + INSERT INTO projects (id) VALUES ('$project_id'); + COMMIT; + """ + ?$psql + + +[shell electric] + ?? [info] Applying migration $migration_version_1 + +[newshell user_1_ws1] + -$fail_pattern + [invoke start_elixir_test 1] + [invoke client_session $user_id1 $session_id] + + !alias Electric.Satellite.{SatRelation, SatRelationColumn, SatOpInsert, SatOpUpdate, SatOpRow} + ?$eprompt + +[shell proxy_1] + [invoke log "run migration $migration_version_1 on postgres"] + """! + BEGIN; + CALL electric.migration_version('$migration_version_2'); + ELECTRIC ASSIGN (projects, project_memberships.role) TO project_memberships.user_id; + ELECTRIC GRANT ALL ON projects TO (projects, 'member'); + ELECTRIC GRANT ALL ON issues TO (projects, 'member'); + COMMIT; + """ + ?$psql + +[shell electric] + ?user_id=$user_id1 .+ Global permissions updated for connection + +[shell pg_1] + !INSERT INTO project_memberships (id, project_id, user_id, role) VALUES ('c197a4ef-0f22-4af1-acb1-bf7200e64900', '$project_id', '$user_id1', 'member'); + ?$psql + +[shell electric] + ?user_id=$user_id1 .+ User permissions updated for connection + +## role for non-connected user isn't being accepted by connection +[shell pg_1] + !INSERT INTO project_memberships (id, project_id, user_id, role) VALUES ('386fff23-181e-4386-85a2-9a430795a23c', '$project_id', '$user_id2', 'member'); + ?$psql + +[shell electric] + -user_id=$user_id2 .+ User permissions updated for connection + ?%Electric.Replication.Changes.NewRecord{relation: {"public", "project_memberships"}, \ + record: %{"id" => "386fff23-181e-4386-85a2-9a430795a23c", \ + "project_id" => "99adf0a5-b3c6-45d7-9986-582e76db4556", \ + "role" => "member", \ + "user_id" => "$user_id2"} + +[cleanup] + [invoke teardown] diff --git a/e2e/tests/compose.yaml b/e2e/tests/compose.yaml index 90872355c9..5c5a50a60c 100644 --- a/e2e/tests/compose.yaml +++ b/e2e/tests/compose.yaml @@ -19,6 +19,7 @@ services: ELECTRIC_WRITE_TO_PG_MODE: "${ELECTRIC_WRITE_TO_PG_MODE:-logical_replication}" LOGICAL_PUBLISHER_HOST: electric_1 PG_PROXY_LOG_LEVEL: info + ELECTRIC_FEATURES: "proxy_ddlx_grant=true:proxy_ddlx_assign=true" ports: - "5133:5133" # proxy access diff --git a/protocol/satellite.proto b/protocol/satellite.proto index e4e7b71d7c..f31e066898 100644 --- a/protocol/satellite.proto +++ b/protocol/satellite.proto @@ -641,7 +641,7 @@ message SatPerms { } message Scope { Table table = 1; - string id = 2; + repeated string id = 2; } enum Privilege { DELETE = 0; @@ -659,16 +659,27 @@ message SatPerms { string application = 2; } } + message ColumnList { + repeated string names = 5; + } message Grant { string id = 1; Table table = 2; RoleName role = 3; - repeated Privilege privileges = 4; - repeated string columns = 5; + Privilege privilege = 4; + optional ColumnList columns = 5; optional Table scope = 6; optional Path path = 7; optional string check = 8; } + message Revoke { + string id = 1; + Table table = 2; + RoleName role = 3; + Privilege privilege = 4; + optional Table scope = 6; + optional Path path = 7; + } message Assign { string id = 1; Table table = 2; @@ -678,24 +689,54 @@ message SatPerms { optional Table scope = 6; optional string if = 7; } - message Role { + message Unassign { string id = 1; + Table table = 2; + optional string user_column = 3; + optional string role_column = 4; + optional string role_name = 5; + optional Table scope = 6; + } + + message Sqlite { + string stmt = 1; + } + + message Role { + repeated string row_id = 1; string role = 2; string user_id = 3; string assign_id = 4; optional Scope scope = 5; } + message DDLX { + repeated Grant grants = 1; + repeated Revoke revokes = 2; + repeated Assign assigns = 3; + repeated Unassign unassigns = 4; + repeated Sqlite sqlite = 5; + } + // split the rules and roles info into distinct messages so they can be // serialized separately message Rules { - repeated Grant grants = 1; - repeated Assign assigns = 2; + uint64 id = 1; + optional uint64 parent_id = 2; + repeated Grant grants = 3; + repeated Assign assigns = 4; } + message Roles { - repeated Role roles = 2; + uint64 id = 1; + optional uint64 parent_id = 2; + uint64 rules_id = 3; + repeated Role roles = 4; } + // this id is the id of the user permissions, this struct is the user + // permissions fused with the global permissions at that point in time int64 id = 1; + string user_id = 2; Rules rules = 3; - Roles roles = 4; + repeated Role roles = 4; }