From ae8b532334d980aec4e04e5cbc0f67628f70a9c6 Mon Sep 17 00:00:00 2001 From: Garry Hill Date: Tue, 5 Mar 2024 10:51:22 +0000 Subject: [PATCH] Generate triggers to apply permissions in sqlite --- .../src/_generated/protocol/satellite.ts | 21 +- .../extension/schema_loader/version.ex | 23 + .../lib/electric/postgres/schema/fk_graph.ex | 136 +- .../lib/electric/satellite/permissions.ex | 11 +- .../electric/satellite/permissions/client.ex | 884 +++++++ .../satellite/permissions/client/format.ex | 138 ++ .../satellite/permissions/client/sqlite.ex | 49 + .../electric/satellite/permissions/grant.ex | 12 + .../electric/satellite/protobuf_messages.ex | 71 +- .../postgres/schema/fk_graph_test.exs | 27 +- .../satellite/permissions/client_test.exs | 312 +++ .../satellite/permissions/helper_test.exs | 18 +- .../satellite/permissions/join_table_test.exs | 87 +- .../satellite/permissions/trigger_test.exs | 37 +- .../permissions/write_buffer_test.exs | 52 +- .../electric/satellite/permissions_test.exs | 2095 ++++++++++------- .../test/support/permissions_helpers.ex | 470 +++- protocol/satellite.proto | 6 + 18 files changed, 3431 insertions(+), 1018 deletions(-) create mode 100644 components/electric/lib/electric/satellite/permissions/client.ex create mode 100644 components/electric/lib/electric/satellite/permissions/client/format.ex create mode 100644 components/electric/lib/electric/satellite/permissions/client/sqlite.ex create mode 100644 components/electric/test/electric/satellite/permissions/client_test.exs diff --git a/clients/typescript/src/_generated/protocol/satellite.ts b/clients/typescript/src/_generated/protocol/satellite.ts index 1f4611eef6..f2ff116705 100644 --- a/clients/typescript/src/_generated/protocol/satellite.ts +++ b/clients/typescript/src/_generated/protocol/satellite.ts @@ -687,6 +687,14 @@ export interface SatPerms { userId: string; rules: SatPerms_Rules | undefined; roles: SatPerms_Role[]; + /** + * `triggers` is the sql code to install these permissions as triggers in + * the local db. + * The assumption is that the entire message is compressed before sending + * over the wire so just include the trigger sql directly rather than + * compress it separately. + */ + triggers: string; } export enum SatPerms_Privilege { @@ -4312,7 +4320,7 @@ export const SatShapeDataEnd = { messageTypeRegistry.set(SatShapeDataEnd.$type, SatShapeDataEnd); function createBaseSatPerms(): SatPerms { - return { $type: "Electric.Satellite.SatPerms", id: Long.ZERO, userId: "", rules: undefined, roles: [] }; + return { $type: "Electric.Satellite.SatPerms", id: Long.ZERO, userId: "", rules: undefined, roles: [], triggers: "" }; } export const SatPerms = { @@ -4331,6 +4339,9 @@ export const SatPerms = { for (const v of message.roles) { SatPerms_Role.encode(v!, writer.uint32(34).fork()).ldelim(); } + if (message.triggers !== "") { + writer.uint32(42).string(message.triggers); + } return writer; }, @@ -4369,6 +4380,13 @@ export const SatPerms = { message.roles.push(SatPerms_Role.decode(reader, reader.uint32())); continue; + case 5: + if (tag !== 42) { + break; + } + + message.triggers = reader.string(); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -4390,6 +4408,7 @@ export const SatPerms = { ? SatPerms_Rules.fromPartial(object.rules) : undefined; message.roles = object.roles?.map((e) => SatPerms_Role.fromPartial(e)) || []; + message.triggers = object.triggers ?? ""; return message; }, }; diff --git a/components/electric/lib/electric/postgres/extension/schema_loader/version.ex b/components/electric/lib/electric/postgres/extension/schema_loader/version.ex index 914f7a208b..3ef88af345 100644 --- a/components/electric/lib/electric/postgres/extension/schema_loader/version.ex +++ b/components/electric/lib/electric/postgres/extension/schema_loader/version.ex @@ -121,6 +121,29 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Version do end end + def direct_fks( + %__MODULE__{} = version, + {_, _} = relation, + {target_schema, target_table} = target + ) do + with {:ok, table_schema} = table(version, relation) do + table_schema.constraints + |> Stream.filter(&match?({:foreign, _}, &1.constraint)) + # |> Enum.find(&(&1.pk_table.schema == target_schema && &1.pk_table.name == target_table)) + |> Enum.find(fn %{constraint: {:foreign, %{pk_table: %{schema: sname, name: tname}}}} -> + sname == target_schema && tname == target_table + end) + |> case do + nil -> + {:error, + "no foreign key found from #{Electric.Utils.inspect_relation(relation)} to #{Electric.Utils.inspect_relation(target)}"} + + %{constraint: {:foreign, %{fk_cols: fk_cols, pk_cols: pk_cols}}} -> + {:ok, fk_cols, pk_cols} + end + end + end + @spec fk_graph(t()) :: Graph.t() def fk_graph(%__MODULE__{fk_graph: fk_graph}) do fk_graph diff --git a/components/electric/lib/electric/postgres/schema/fk_graph.ex b/components/electric/lib/electric/postgres/schema/fk_graph.ex index feda8423be..211238d989 100644 --- a/components/electric/lib/electric/postgres/schema/fk_graph.ex +++ b/components/electric/lib/electric/postgres/schema/fk_graph.ex @@ -10,26 +10,61 @@ defmodule Electric.Postgres.Schema.FkGraph do key relations in a separate map of `%{relation() => %{relation() => [column_name()]}}`. """ alias Electric.Postgres.Schema.Proto + alias Electric.Postgres.Extension.SchemaLoader + + defstruct [:graph, fks: %{}, pks: %{}] + + @type relation() :: Electric.Postgres.relation() + @type name() :: Electric.Postgres.name() + @type fks() :: [name(), ...] + @type pks() :: [name(), ...] + @type join() :: + {:many_to_one, {relation(), fks()}, {relation(), pks()}} + | {:one_to_many, {relation(), pks()}, {relation(), fks()}} + + @type t() :: %__MODULE__{ + graph: Graph.t(), + fks: %{relation() => %{relation() => fks()}}, + pks: %{relation() => pks()} + } + @type edge() :: {relation(), relation(), label: fks()} | {relation(), relation(), fks()} - defstruct [:graph, fks: %{}] + @spec for_schema(SchemaLoader.Version.t()) :: t() + def for_schema(%SchemaLoader.Version{schema: schema}) do + for_schema(schema) + end def for_schema(%Proto.Schema{tables: tables}) do - tables - |> Stream.flat_map(fn %Proto.Table{constraints: constraints, name: name} -> - constraints - |> Stream.filter(&match?(%{constraint: {:foreign, _}}, &1)) - |> Enum.map(fn %{constraint: {:foreign, fk}} -> - {{name.schema, name.name}, {fk.pk_table.schema, fk.pk_table.name}, fk.fk_cols} + fks = + tables + |> Enum.flat_map(fn %Proto.Table{constraints: constraints, name: name} -> + constraints + |> Stream.filter(&match?(%{constraint: {:foreign, _}}, &1)) + |> Enum.map(fn %{constraint: {:foreign, fk}} -> + {{name.schema, name.name}, {fk.pk_table.schema, fk.pk_table.name}, fk.fk_cols} + end) + end) + + pks = + tables + |> Stream.flat_map(fn %Proto.Table{constraints: constraints, name: name} -> + constraints + |> Stream.filter(&match?(%{constraint: {:primary, _}}, &1)) + |> Enum.map(fn %{constraint: {:primary, pk}} -> + {{name.schema, name.name}, pk.keys} + end) end) - end) - |> new() + |> Map.new() + + new(fks, pks) end defp new_graph do Graph.new(type: :undirected, vertex_identifier: & &1) end - def new(edges) do + @spec new([edge()], %{relation() => pks()}) :: t() + def new(edges, pks) do {graph, fks} = edges |> Enum.reduce({new_graph(), %{}}, fn edge, {graph, fks} -> @@ -43,7 +78,7 @@ defmodule Electric.Postgres.Schema.FkGraph do } end) - %__MODULE__{graph: graph, fks: fks} + %__MODULE__{graph: graph, fks: fks, pks: pks} end defp normalise_edge({{_, _} = v1, {_, _} = v2, label: fk_columns}) when is_list(fk_columns) do @@ -54,35 +89,35 @@ defmodule Electric.Postgres.Schema.FkGraph do {v1, v2, fk_columns} end + @doc """ + Give the foreign keys on table `relation` that place it in the scope defined by `root`. + """ + @spec foreign_keys(t(), relation(), relation()) :: fks() | nil # [VAX-1626] we don't support recursive relations def foreign_keys(%__MODULE__{}, {_, _} = root, root) do - [] + nil end def foreign_keys(%__MODULE__{fks: fks} = fk_graph, {_, _} = root, {_, _} = relation) do # we guard against looking for a fk ref to the same table above so relation_path/3 is always - # going to return a list of at least 2 items or nil if there is no route between the two + # going to return a list of at least 2 items or `nil` if there is no route between the two # tables - case path(fk_graph, root, relation) do - [r1 | _] -> - case Map.get(fks, r1, nil) do - table_fks when is_map(table_fks) -> - # this gives us a list of the fks pointing out of this table - # we now need to find which of those live within the `root` scope - Enum.filter(table_fks, fn {fk_relation, _fk_cols} -> - is_list(path(fk_graph, root, fk_relation)) - end) - - _ -> - [] - end - - nil -> - [] + with [r1 | _] <- path(fk_graph, root, relation), + table_fks when is_map(table_fks) <- Map.get(fks, r1, nil) do + # this gives us a list of the fks pointing out of this table + # we now need to find which of those live within the `root` scope + Enum.filter(table_fks, fn {fk_relation, _fk_cols} -> + is_list(path(fk_graph, root, fk_relation)) + end) end end + @doc """ + Get a relation path from the `target` table to the `root` table, defined by a foreign key + constraints between all tables in the path. + """ + @spec path(t(), root :: relation(), target :: relation()) :: [relation(), ...] | nil def path(%__MODULE__{}, {_, _} = root, root) do [root] end @@ -90,4 +125,47 @@ defmodule Electric.Postgres.Schema.FkGraph do def path(%__MODULE__{graph: graph}, {_, _} = root, {_, _} = relation) do Graph.get_shortest_path(graph, relation, root) end + + @doc """ + Get the foreign key path information between the `root` table and the given `relation`. + + Each entry in the path is either + - `{:many_to_one, {from_table, foreign_key_columns}, {to_table, primary_key_columns}}`, or + - `{:one_to_many, {from_table, primary_key_columns}, {to_table, foreign_key_columns}}` + + depending on the relation between the two tables. + """ + @spec fk_path(t(), relation(), relation()) :: [join()] | nil + def fk_path(%__MODULE__{} = fk_graph, {_, _} = root, {_, _} = relation) do + with [_ | _] = path <- path(fk_graph, root, relation) do + path + |> Enum.chunk_every(2, 1, :discard) + |> Enum.map(fn [a, b] -> join(fk_graph, a, b) end) + end + end + + @doc """ + Given the two tables `source` and `target` describe the fk relation between them, either + `:many_to_one` or `:one_to_many`. + + See `fk_path/3` above. + """ + @spec join(t(), relation(), relation()) :: join() + def join(%__MODULE__{fks: fks, pks: pks}, {_, _} = source, {_, _} = target) do + cond do + fks = get_in(fks, [source, target]) -> + {:many_to_one, {source, fks}, {target, Map.fetch!(pks, target)}} + + fks = get_in(fks, [target, source]) -> + {:one_to_many, {source, Map.fetch!(pks, source)}, {target, fks}} + end + end + + @doc """ + Return the primary key columns for the given relation. + """ + @spec primary_keys(t(), relation()) :: {:ok, pks()} | :error + def primary_keys(%__MODULE__{pks: pks}, {_, _} = table) do + Map.fetch(pks, table) + end end diff --git a/components/electric/lib/electric/satellite/permissions.ex b/components/electric/lib/electric/satellite/permissions.ex index 1ef9ca7084..8689bd9433 100644 --- a/components/electric/lib/electric/satellite/permissions.ex +++ b/components/electric/lib/electric/satellite/permissions.ex @@ -347,6 +347,15 @@ defmodule Electric.Satellite.Permissions do } end + @doc """ + Generate list of `#{Permissions.Role}` structs for all our currently assigned roles plus the + `Anyone` and `Authenticated` roles (if applicable). + """ + @spec assigned_roles(t()) :: [Role.t()] + def assigned_roles(perms) do + build_roles(perms.source.roles, perms.auth, perms.source.rules.assigns) + end + @doc """ Pass the transaction to the write buffer so it can reset itself when its pending writes have completed the loop back from pg and are now in the underlying shape graph. @@ -684,7 +693,7 @@ defmodule Electric.Satellite.Permissions do end {:error, - "user does not have permission to " <> + "permissions: user does not have permission to " <> action <> Electric.Utils.inspect_relation(relation)} end diff --git a/components/electric/lib/electric/satellite/permissions/client.ex b/components/electric/lib/electric/satellite/permissions/client.ex new file mode 100644 index 0000000000..eef90ca268 --- /dev/null +++ b/components/electric/lib/electric/satellite/permissions/client.ex @@ -0,0 +1,884 @@ +defmodule Electric.Satellite.Permissions.Client do + @moduledoc ~S""" + Generates triggers that will enforce the the given set of permissions locally at the database + level. + + ## Applying to database + + Before applying this permissions SQL, the client *MUST* clean up any existing triggers: + + transaction(fn tx -> + if table_exists("__electric_permissions_triggers") do + for {name, type} <- query(tx, "SELECT name, type FROM __electric_permissions_triggers") do + case type do + "trigger" -> execute(tx, "DROP TRIGGER IF EXISTS #{name};") + "function" -> execute(tx, "DROP FUNCTION IF EXISTS #{name} CASCADE;") + end + end + end + execute(tx, trigger_sql) + end) + + + to remove any existing permissions triggers. + + It makes sense to use a transaction to wrap the perms trigger creation, the txn type should + probably be `IMMEDIATE` (for SQLite) to prevent any other writes to the db while we're swapping + perms triggers. + + ## Notes for PG version + + - we only need to record the trigger function, not the trigger itself as the `DROP FUNCTION ... + CASCADE` statement will automatically drop the associated trigger. + """ + + alias Electric.Postgres.Extension.SchemaLoader + alias Electric.Postgres.Schema.FkGraph + alias Electric.Satellite.Permissions + + import Electric.Satellite.Permissions.Client.Format + + @dialect Electric.Satellite.Permissions.Client.SQLite + + @doc false + # public only to enable direct testing of the scope query code + def scope_query(schema, root, table, where_clause) when is_function(where_clause, 1) do + schema + |> compile_schema() + |> build_scope_query(root, table, where_clause) + |> format() + end + + defp build_scope_query(schema, root, table, where_clause, select_clause \\ nil) + + defp build_scope_query(schema, root, root, where_clause, select_clause) do + {:ok, pks} = pks(schema, root) + + where = + Enum.map(pks, fn pk -> + [ref(root, pk, @dialect), " = ", where_clause.(pk)] + end) + |> and_() + + lines([ + [ + "SELECT ", + select_clause || select_pk(root, schema) + ], + indent([ + ["FROM ", @dialect.table(root)], + "WHERE", + indent(where), + "LIMIT 1" + ]) + ]) + end + + defp build_scope_query(schema, root, table, where_clause, select_clause) do + fk_path = FkGraph.fk_path(schema.fks, root, table) + # to handle join tables, we need to know where to start the lookup. if the first element of + # the fk path is a reverse lookup, from the pk to an fk (which belongs to a join table), in + # which case we need to construct the final where clause differently to exclude the table with + # the pk + {origin, fks, pks} = + case fk_path do + [{:one_to_many, {_, _fks}, {t, pks}} | _] -> + {t, pks, pks} + + [{:many_to_one, {_t, fks}, {t, pks}} | _] -> + {t, fks, pks} + end + + joins = + fk_path + |> Stream.drop(1) + |> Enum.reverse() + |> Enum.map(fn {_, {a, fks}, {b, pks}} -> + clauses = + fks + |> Stream.zip(pks) + |> Stream.map(fn {fk, pk} -> [ref(a, fk, @dialect), " = ", ref(b, pk, @dialect)] end) + |> and_() + + lines([["LEFT JOIN ", @dialect.table(a), " ON "], indent(clauses)]) + end) + + where = + pks + |> Stream.zip(fks) + |> Stream.map(fn {pk, fk} -> [ref(origin, pk, @dialect), " = ", where_clause.(fk)] end) + |> and_() + + lines([ + [ + "SELECT ", + select_clause || select_pk(root, schema) + ], + indent([ + ["FROM ", @dialect.table(root)] + ]), + indent(joins), + indent([ + ["WHERE"], + indent(where), + "LIMIT 1" + ]) + ]) + end + + defp compile_schema(%{fks: _, tables: _, columns: _} = schema) do + schema + end + + defp compile_schema(%SchemaLoader.Version{} = schema_version) do + %{ + fks: FkGraph.for_schema(schema_version), + tables: Map.keys(schema_version.tables), + columns: + Map.new(schema_version.tables, fn {name, table_schema} -> + {name, Enum.map(table_schema.columns, & &1.name)} + end) + } + end + + @doc """ + Generate SQL to install permissions-enforcing triggers in a SQLite database. + + When updating a client from some set of old permissions to some updated set, pass the old + permissions as the first argument. + + This will generate some cleanup queries that will garbage collect locally defined roles. + """ + @spec permissions_triggers(Permissions.t() | nil, Permissions.t(), SchemaLoader.Version.t()) :: + String.t() + def permissions_triggers(previous_perms \\ nil, perms, schema_version) do + schema = compile_schema(schema_version) + + Enum.concat([ + create_local_tables(), + clear_triggers_table(), + permissions_cleanup(previous_perms, perms), + table_triggers(perms, schema), + assign_triggers(perms, schema) + ]) + |> format() + end + + defp table_triggers(perms, schema) do + Stream.map(schema.tables, &table_triggers(&1, perms, schema)) + end + + defp table_triggers(table, perms, schema) do + table_grants = + perms.source.rules.grants + |> Stream.map(&Permissions.Grant.new/1) + # can remove these because if they exist they'll be hard-coded into the tests + # and this list of grants is only used to test for local roles + |> Stream.reject(&(&1.role in [:ANYONE, :AUTHENTICATED])) + |> Enum.filter(&(&1.table == table)) + + Stream.map([:INSERT, :UPDATE, :DELETE], fn action -> + %{scoped: scoped, unscoped: unscoped} = + Map.get(perms.roles, {table, action}, %{scoped: [], unscoped: []}) + + scoped_grants = + table_grants + |> Enum.filter(&(&1.privilege == action)) + |> Enum.group_by(& &1.scope) + + {unscoped_grants, scoped_grants} = Map.pop(scoped_grants, nil, []) + + # if we have an unscoped role in our role grant list for this action (on this table) + # then we have permission (if the column list and the where clause match) + tests = + Enum.concat([ + unscoped_trigger_tests(unscoped, unscoped_grants, perms, schema, table, action), + scoped_trigger_tests(scoped, scoped_grants, perms, schema, table, action) + ]) + + trigger_conditional = + case tests do + [] -> + nil + + [_ | _] -> + lines([ + "NOT (", + indent([ + "SELECT CASE", + indent([ + lines(Enum.map(tests, &when_/1)), + "ELSE FALSE" + ]), + "END" + ]), + ")" + ]) + end + + additional_triggers = + Enum.concat([ + scope_move_triggers(scoped, scoped_grants, perms, schema, table, action) + ]) + + lines([ + @dialect.create_trigger( + table: table, + event: action, + condition: trigger_conditional, + body: + @dialect.rollback( + "permissions: does not have matching #{action} permissions on #{@dialect.table(table)}" + ) + ) + | additional_triggers + ]) + end) + |> Enum.concat(global_triggers(table, schema)) + end + + defp global_triggers(table, schema) do + {:ok, pks} = pks(schema, table) + + trigger_name = @dialect.trigger_name(table, :UPDATE, ["protect_pk"]) + + [ + @dialect.create_trigger( + name: trigger_name, + table: table, + event: :UPDATE, + of: pks, + body: + @dialect.rollback( + "permissions: invalid update of primary key on #{@dialect.table(table)}" + ) + ) + ] + end + + defp scope_move_triggers([_ | _] = role_grants, grants, perms, schema, table, :UPDATE) do + scope_groups = Enum.group_by(role_grants, & &1.grant.scope) + + perms_scopes = + scope_groups + |> MapSet.new(fn {k, _} -> k end) + |> MapSet.union(MapSet.new(grants, fn {k, _} -> k end)) + + {:ok, pks} = pks(schema, table) + + grants = + perms.source.rules.grants + |> Stream.map(&Permissions.Grant.new/1) + |> Permissions.Grant.for_table(table) + |> Permissions.Grant.for_privilege(:UPDATE) + + Enum.flat_map(perms_scopes, fn scope -> + case FkGraph.fk_path(schema.fks, scope, table) do + nil -> + [] + + [{_, {^table, fks}, _} | _] -> + grants = + grants + |> Permissions.Grant.for_scope(scope) + |> Enum.filter(fn + %{columns: :all} -> + true + + %{columns: columns} -> + # unless the grant allows the update to all the fks columns then it can't allow the scope move + Enum.all?(fks, &MapSet.member?(columns, &1)) + end) + + scope_cols = + Enum.map(Enum.with_index(pks), fn {pk, i} -> + "#{ref(scope, pk, @dialect)} AS #{pk_alias(i)}" + end) + + scope_query = + build_scope_query( + schema, + scope, + table, + fn col -> ["NEW.", quot(col)] end, + lst(scope_cols, & &1) + ) + + local_roles = local_role_query_scoped(grants, scope, schema, :UPDATE) + role_grants = Map.get(scope_groups, scope, []) + + tomb_scope_name = fn role -> + "__tomb__#{role.assign_id}" + end + + assigned_roles = + Enum.map(role_grants, fn %{role: %{scope: {^scope, scope_id}} = role} -> + lines([ + "((#{lst(scope_id, &val/1)}) = (SELECT #{scope_id |> Enum.with_index() |> lst(&pk_alias/1)} FROM __scope__))", + "AND (#{json(role.id)} NOT IN (SELECT row_id FROM #{quot(tomb_scope_name.(role))}))" + ]) + end) + + tombs = + role_grants + |> Enum.uniq_by(& &1.role.assign_id) + |> Enum.map(fn %{role: role} -> + lines([ + "#{quot(tomb_scope_name.(role))} AS (", + indent([ + "SELECT row_id FROM #{@dialect.table(local_roles_tombstone_table())} WHERE assign_id IS #{val(role.assign_id)}" + ]), + " )" + ]) + end) + + guard = + if Enum.empty?(local_roles) && Enum.empty?(assigned_roles) do + [] + else + lte = + lines([ + "WITH", + indent([ + " __scope__ AS (", + indent([scope_query]), + ")," + ]), + indent(tombs), + "SELECT CASE", + indent( + Enum.concat([ + Enum.map(assigned_roles, &when_/1), + Enum.map(local_roles, &when_/1), + ["ELSE FALSE"] + ]) + ), + "END" + ]) + + lines([ + "NOT (", + indent([lte]), + ")" + ]) + end + + trigger_name = trigger_name(table, :UPDATE, @dialect, ["scope_move"]) + + [ + @dialect.create_trigger( + name: trigger_name, + table: table, + event: :UPDATE, + of: fks, + condition: guard, + body: + @dialect.rollback( + "permissions: does not have matching UPDATE permissions in new scope on #{@dialect.table(table)}" + ) + ) + ] + end + end) + end + + defp scope_move_triggers(_scoped, _grants, _perms, _schema, _table, _action) do + [] + end + + defp column_protection(base_test, grant, schema, table, action) do + case grant.columns do + :all -> + [base_test] + + allowed_columns -> + {:ok, columns} = cols(schema, table) + + disallowed_columns = Enum.reject(columns, &MapSet.member?(allowed_columns, &1)) + + [ + lines([ + "(", + indent([column_test(disallowed_columns, action)]), + ") AND (", + indent([base_test]), + ")" + ]) + ] + end + end + + defp column_test(disallowed_columns, :INSERT) do + [ + disallowed_columns + |> Enum.map(&"NEW.#{&1} IS NULL") + |> and_() + ] + end + + defp column_test(disallowed_columns, :UPDATE) do + lines( + disallowed_columns + |> Enum.map(&~s[NEW."#{&1}" IS OLD."#{&1}"]) + |> and_() + ) + end + + defp unscoped_trigger_tests(role_grants, grants, perms, schema, table, action) do + Stream.concat([ + Stream.map(role_grants, &unscoped_trigger_test(&1, perms, schema, table, action)), + local_role_query_unscoped(grants, schema, table, action) + ]) + end + + # TODO: where clause (for all) + defp unscoped_trigger_test(%{role: %type{}} = role_grant, _perms, schema, table, action) + when type in [Permissions.Role.Authenticated, Permissions.Role.Anyone] do + lines([ + column_protection( + ["TRUE"], + role_grant.grant, + schema, + table, + action + ) + ]) + end + + defp unscoped_trigger_test(%{role: role} = role_grant, _perms, schema, table, action) do + lines([ + column_protection( + lines([ + "#{json(role.id)} NOT IN (", + indent([ + "SELECT row_id FROM #{@dialect.table(local_roles_tombstone_table())} WHERE assign_id IS #{val(role.assign_id)}" + ]), + ")" + ]), + role_grant.grant, + schema, + table, + action + ) + ]) + end + + defp scoped_trigger_tests(role_grants, grants, perms, schema, table, action) do + scope_groups = Enum.group_by(role_grants, & &1.grant.scope) + + scopes = + scope_groups + |> MapSet.new(fn {k, _} -> k end) + |> MapSet.union(MapSet.new(grants, fn {k, _} -> k end)) + + Enum.flat_map(scopes, fn scope -> + prefix = assign_trigger_prefix(action) + + scope_query = + build_scope_query(schema, scope, table, fn col -> [prefix, ".", quot(col)] end) + + cases = + Stream.concat([ + scope_groups + |> Map.get(scope, []) + |> Enum.map(&scoped_trigger_test(&1, perms, schema, table, action)), + grants + |> Map.get(scope, []) + |> local_role_query_scoped(scope, schema, action) + ]) + + [ + lines([ + "WITH __scope__ AS (", + indent([scope_query]), + ") SELECT CASE", + indent(Enum.map(cases, &when_/1) ++ ["ELSE FALSE"]), + "END" + ]) + ] + end) + end + + # TODO: where clause (for all) + defp scoped_trigger_test(role_grant, _perms, schema, table, action) do + %{role: %{scope: {root, scope_id}} = role} = role_grant + + scope_cols = scope_cols(root, schema) + + [ + lines([ + "WITH __tomb__ AS (", + indent([ + "SELECT row_id FROM #{@dialect.table(local_roles_tombstone_table())}", + indent(["WHERE assign_id IS #{val(role.assign_id)}"]) + ]), + ")", + "SELECT (", + indent([ + column_protection( + lines([ + "(", + indent([ + "(#{lst(scope_id, &val/1)}) = (SELECT #{lst(scope_cols, "/1)} FROM __scope__)", + "AND (#{json(role.id)} NOT IN (SELECT row_id FROM __tomb__))" + ]), + ")" + ]), + role_grant.grant, + schema, + table, + action + ) + ]), + ")" + ]) + ] + end + + defp pk_alias({_, i}) do + pk_alias(i) + end + + defp pk_alias(i) do + "pk#{i}" + end + + defp scope_cols(table, schema) do + {:ok, pks} = pks(schema, table) + + pks + |> Enum.with_index() + |> Enum.map(&pk_alias/1) + end + + defp local_role_query_scoped(grants, scope_table, schema, action) do + cols = scope_cols(scope_table, schema) + + Enum.map(grants, fn grant -> + column_protection( + lines([ + "SELECT 1 FROM #{@dialect.table(local_roles_table())}", + indent([ + "WHERE (scope = #{val(@dialect.table(scope_table, false))})", + "AND (scope_id = (SELECT json_array(#{lst(cols, "/1)}) FROM __scope__))", + "AND (role = #{val(grant.role)})" + ]) + ]), + grant, + schema, + grant.table, + action + ) + end) + end + + defp local_role_query_unscoped(grants, schema, table, action) do + Enum.map(grants, fn grant -> + column_protection( + lines([ + ["SELECT 1 FROM ", @dialect.table(local_roles_table())], + indent([ + "WHERE (scope IS NULL) AND (role IS #{val(grant.role)})" + ]) + ]), + grant, + schema, + table, + action + ) + end) + end + + defp create_local_tables do + # replace with [proper migration](https://linear.app/electric-sql/issue/VAX-1385/internal-schema-migration-for-client-side-db-schema) + # Not adding indexes for the roles table: + # CREATE INDEX IF NOT EXISTS "#{@local_roles_table}_role_idx" ON "#{@local_roles_table}" (role); + # CREATE INDEX IF NOT EXISTS "#{@local_roles_table}_scope_idx" ON "#{@local_roles_table}" (scope); + # although those columns are used in the local role lookups because in all likelihood the + # number of local roles will be very small (they only exist until the role addition comes back + # again from pg) + [ + """ + CREATE TABLE IF NOT EXISTS #{@dialect.table(local_roles_table())} ( + assign_id TEXT NOT NULL, + row_id TEXT NOT NULL, + scope TEXT, + scope_id TEXT, + role TEXT NOT NULL, + PRIMARY KEY (assign_id, row_id) + ); + + CREATE TABLE IF NOT EXISTS #{@dialect.table(local_roles_tombstone_table())} ( + assign_id TEXT NOT NULL, + row_id TEXT NOT NULL, + PRIMARY KEY (assign_id, row_id) + ); + + CREATE TABLE IF NOT EXISTS #{@dialect.table(triggers_and_functions_table())} ( + name TEXT NOT NULL, + type TEXT NOT NULL, + PRIMARY KEY (name, type) + ); + """ + ] + end + + defp clear_triggers_table do + ["DELETE FROM #{@dialect.table(triggers_and_functions_table())};\n"] + end + + defp permissions_cleanup(nil, _perms) do + [] + end + + defp permissions_cleanup(old_perms, new_perms) do + old_roles = MapSet.new(old_perms.source.roles, &role_id/1) + new_roles = MapSet.new(new_perms.source.roles, &role_id/1) + removed_roles = MapSet.difference(old_roles, new_roles) + added_roles = MapSet.difference(new_roles, old_roles) + + Enum.concat([ + [ + "-- @permissions_cleanup BEGIN" + ], + # once a role has been removed, the new generated triggers will no longer include it + # so we can remove the tombstone entries that were blocking it locally + Enum.map(removed_roles, &cleanup_local_role(&1, local_roles_tombstone_table())), + # once a role that was added locally makes the loop back and arrives in the defined + # permissions then we can remove the local role entries that were granting access because + # they're now encoded in the triggers + Enum.map(added_roles, &cleanup_local_role(&1, local_roles_table())), + [ + "-- @permissions_cleanup END" + ] + ]) + end + + defp role_id(role) do + %{"assign_id" => role.assign_id, "row_id" => Jason.encode!(role.row_id)} + end + + defp cleanup_local_role(role, table) do + filter = + Enum.map_join(role, " AND ", fn {col, val} -> + "(#{col} = #{val(val)})" + end) + + "DELETE FROM #{@dialect.table(table)} WHERE #{filter};" + end + + defp assign_triggers(perms, schema) do + Stream.flat_map(perms.source.rules.assigns, &assign_triggers(&1, perms, schema)) + end + + defp assign_triggers(assign, perms, schema) when not is_nil(perms.auth.user_id) do + # FIXME: should only run when user id of membership table = ME + user_id = perms.auth.user_id + + Enum.map([:INSERT, :UPDATE, :DELETE], fn action -> + role = + case assign.role_column do + nil -> + assign.role_name + + column -> + ~s[#{assign_trigger_prefix(action)}."#{column}"] + end + + body = + case action do + :INSERT -> + [ + { + [], + lines([ + "INSERT INTO #{@dialect.table(local_roles_table())}", + indent([ + "(assign_id, row_id, scope, scope_id, role)" + ]), + "VALUES (", + indent([ + "#{val(assign.id)},", + "#{assign_row_id(assign, schema, action)},", + "#{assign_scope(assign, schema, action)},", + "#{assign_scope_id(assign, schema, action)},", + "#{role}" + ]), + ");" + ]) + } + ] + + :DELETE -> + existing_roles = Enum.filter(perms.source.roles, &(&1.assign_id == assign.id)) + + guards = + case existing_roles do + [] -> + [] + + existing_roles -> + {:ok, pks} = pks(schema, {assign.table.schema, assign.table.name}) + + Enum.map_join(existing_roles, " OR ", fn role -> + paren([ + "json_array", + lst(pks, &"OLD.#{quot(&1)}") |> paren(), + " = ", + json(role.row_id) + ]) + end) + |> paren() + |> IO.iodata_to_binary() + |> List.wrap() + end + + [ + { + [], + lines([ + "DELETE FROM #{@dialect.table(local_roles_table())}", + indent([ + "WHERE assign_id IS #{val(assign.id)}", + indent([ + "AND row_id IS #{assign_row_id(assign, schema, action)};" + ]) + ]) + ]) + }, + { + guards, + lines([ + "INSERT INTO #{@dialect.table(local_roles_tombstone_table())}", + indent(["(assign_id, row_id)"]), + "VALUES (", + indent([ + "#{val(assign.id)},", + "#{assign_row_id(assign, schema, action)}" + ]), + ");" + ]) + } + ] + + :UPDATE -> + case assign.role_column do + nil -> + [] + + column -> + [ + { + [], + lines([ + "UPDATE #{@dialect.table(local_roles_table())}", + indent([ + "SET role = NEW.#{quot(column)}", + "WHERE assign_id IS #{val(assign.id)}", + indent([ + "AND row_id IS #{assign_row_id(assign, schema, action)};" + ]) + ]) + ]) + } + ] + end + end + + body + |> Enum.with_index() + |> Enum.map(fn {{guards, stmt}, n} -> + trigger_name = @dialect.trigger_name(assign.table, action, ["assign", assign.id, "#{n}"]) + + when_guard = + [ + "(#{assign_trigger_prefix(action)}.#{quot(assign.user_column)} IS #{val(user_id)})" + | guards + ] + |> and_() + + @dialect.create_trigger( + name: trigger_name, + table: assign.table, + event: action, + condition: lines(["(", indent(when_guard), ")"]), + body: stmt + ) + end) + end) + end + + defp assign_triggers(_assign, _perms, _schema) do + [] + end + + defp assign_row_id(assign, schema, action) do + {:ok, pks} = pks(schema, assign.table.schema, assign.table.name) + prefix = assign_trigger_prefix(action) + pk_cols = lst(pks, &~s[#{prefix}."#{&1}"]) + + ["json_array", paren(pk_cols)] + end + + defp assign_scope(%{scope: nil} = _assign, _schema, _action) do + "NULL" + end + + defp assign_scope(assign, _schema, _action) do + assign.scope + |> @dialect.table(false) + |> val() + end + + defp assign_scope_id(%{scope: nil} = _assign, _schema, _action) do + "NULL" + end + + defp assign_scope_id(assign, schema, action) do + %{ + table: %{schema: sname, name: tname}, + scope: %{schema: scope_schema, name: scope_table} + } = assign + + scope = {scope_schema, scope_table} + table = {sname, tname} + + prefix = assign_trigger_prefix(action) + + [{:many_to_one, {^table, fks}, _} | _] = FkGraph.fk_path(schema.fks, scope, table) + + fk_cols = lst(fks, &~s[#{prefix}."#{&1}"]) + + ["json_array", paren(fk_cols)] + end + + defp assign_trigger_prefix(action) do + case action do + :INSERT -> "NEW" + :UPDATE -> "OLD" + :DELETE -> "OLD" + end + end + + defp select_pk({_, _} = table, schema) do + {:ok, pks} = pks(schema, table) + + pks + |> Enum.with_index() + |> lst(fn {pk, i} -> + [ref(table, pk, @dialect), " AS ", pk_alias(i)] + end) + end + + defp pks(schema, sname, tname) do + pks(schema, {sname, tname}) + end + + defp pks(schema, table) do + FkGraph.primary_keys(schema.fks, table) + end + + defp cols(schema, table) do + Map.fetch(schema.columns, table) + end +end diff --git a/components/electric/lib/electric/satellite/permissions/client/format.ex b/components/electric/lib/electric/satellite/permissions/client/format.ex new file mode 100644 index 0000000000..42a297b122 --- /dev/null +++ b/components/electric/lib/electric/satellite/permissions/client/format.ex @@ -0,0 +1,138 @@ +defmodule Electric.Satellite.Permissions.Client.Format do + @electric "electric" + @local_roles_table "local_roles" + @local_roles_tombstone_table "local_roles_tombstone" + @triggers_and_functions_table "permissions_triggers" + + @trigger_prefix "__electric_perms" + + def local_roles_table, do: {@electric, @local_roles_table} + def local_roles_tombstone_table, do: {@electric, @local_roles_tombstone_table} + def triggers_and_functions_table, do: {@electric, @triggers_and_functions_table} + + # pg triggers cannot be namespaced, they inherit their namespace from the table they're attached + # to so we need this prefix even in pg + def trigger_prefix, do: @trigger_prefix + + def trigger_name(table, action, dialect, suffixes) do + Enum.join([@trigger_prefix, "#{dialect.table(table, false)}_#{action}" | suffixes], "_") + end + + # quote name + def quot(name) when is_binary(name), do: ~s["#{name}"] + + def json(obj), do: obj |> Jason.encode!() |> val() + + # list of things, mapped using mapper + def lst(list, mapper) when is_list(list) and is_function(mapper, 1) do + list |> Stream.map(mapper) |> Enum.intersperse(", ") + end + + def and_(clauses) do + clauses + |> Enum.to_list() + |> do_op("AND") + end + + defp do_op([clause], _op) do + lines([clause]) + end + + defp do_op(clauses, op) do + lines( + clauses + |> Enum.to_list() + |> intersperse_op(op) + ) + end + + defp intersperse_op([], _op), do: [] + defp intersperse_op([clause], op), do: [[op, " (", clause, ")"]] + + defp intersperse_op([c1, c2 | rest], op) do + [["(", c1, ")"], [op, " (", c2, ")"] | intersperse_op(rest, op)] + end + + def when_(test) do + lines(["WHEN (", indent([test]), ") THEN TRUE"]) + end + + def paren(inner), do: ["(", inner, ")"] + + def ref(table, col, dialect), do: [dialect.table(table), ".", quot(col)] + + def val(s) when is_binary(s), do: "'#{:binary.replace(s, "'", "''", [:global])}'" + def val(n) when is_integer(n) or is_float(n), do: "#{n}" + + def lines(lines, indent \\ 0) + + def lines([], _indent) do + [] + end + + def lines(lines, indent) do + {:lines, indent, lines} + end + + def indent(lines) do + lines(lines, 1) + end + + def format(lines) do + lines + |> format_lines(0) + |> IO.iodata_to_binary() + end + + defp format_lines(lines, cursor) when is_list(lines) do + format_lines({:lines, 0, lines}, cursor) + end + + defp format_lines({:lines, indent, {:lines, _, _} = inner}, cursor) do + format_lines({:lines, indent, [inner]}, cursor) + end + + defp format_lines({:lines, indent, lines}, cursor) when is_list(lines) do + lines + |> Stream.map(&format_line(&1, indent + cursor)) + |> Stream.reject(&is_nil/1) + |> Enum.intersperse("\n") + end + + defp format_line([{:lines, indent, _lines} = lines | rest], cursor) do + [format_lines(lines, cursor + indent) | format_lines(rest, cursor)] + end + + defp format_line({:lines, _indent, _lines} = lines, cursor) do + format_lines(lines, cursor) + end + + defp format_line([], _cursor) do + nil + end + + defp format_line(nil, _cursor) do + nil + end + + defp format_line(line, cursor) when is_list(line) or is_binary(line) do + [tab(cursor), line] + end + + defp tab(0), do: [] + defp tab(n), do: [" " | tab(n - 1)] + + def prefix({:lines, indent, [first | lines]}, prefix) do + {:lines, indent, [[prefix, first] | lines]} + end + + def join_optional(elements, join \\ " ") do + elements + |> Enum.reject(&is_nil/1) + |> Enum.join(join) + end + + def optional(nil, _), do: nil + + def optional(val, wrapper), do: wrapper.(val) +end diff --git a/components/electric/lib/electric/satellite/permissions/client/sqlite.ex b/components/electric/lib/electric/satellite/permissions/client/sqlite.ex new file mode 100644 index 0000000000..10d1c935f5 --- /dev/null +++ b/components/electric/lib/electric/satellite/permissions/client/sqlite.ex @@ -0,0 +1,49 @@ +defmodule Electric.Satellite.Permissions.Client.SQLite do + import Electric.Satellite.Permissions.Client.Format + + def create_trigger(args) do + name = + Keyword.get_lazy(args, :name, fn -> + trigger_name(args[:table], args[:event]) + end) + + lines([ + "--", + "", + "INSERT INTO #{table(triggers_and_functions_table())} (name, type) VALUES (#{val(name)}, 'trigger');", + "", + "CREATE TRIGGER #{quot(name)}", + indent([ + join_optional([ + Keyword.get(args, :when, "BEFORE"), + args[:event], + optional(args[:of], fn of -> ["OF ", lst(of, "/1)] end), + "ON", + table(args[:table]) + ]), + "FOR EACH ROW", + optional(args[:condition], &prefix(&1, "WHEN ")) + ]), + "BEGIN", + indent(List.wrap(args[:body])), + "END;", + "" + ]) + end + + def rollback(message) do + "SELECT RAISE(ROLLBACK, #{val(message)});" + end + + def table(table, quot \\ true) + + def table(table, true), do: table(table, false) |> quot() + + def table({"electric", table}, false), do: "__electric_#{table}" + def table({_schema, table}, false), do: table + def table(%{schema: _schema, name: table}, false), do: table + + def trigger_name(table, action, suffixes \\ []) do + trigger_name(table, action, __MODULE__, suffixes) + end +end diff --git a/components/electric/lib/electric/satellite/permissions/grant.ex b/components/electric/lib/electric/satellite/permissions/grant.ex index 691bce79d7..e29301a885 100644 --- a/components/electric/lib/electric/satellite/permissions/grant.ex +++ b/components/electric/lib/electric/satellite/permissions/grant.ex @@ -60,4 +60,16 @@ defmodule Electric.Satellite.Permissions.Grant do def columns_valid?(%__MODULE__{columns: allowed}, %MapSet{} = columns) do MapSet.subset?(columns, allowed) end + + def for_table(grants, {_, _} = table) do + Enum.filter(grants, &(&1.table == table)) + end + + def for_privilege(grants, priv) do + Enum.filter(grants, &(&1.privilege == priv)) + end + + def for_scope(grants, {_, _} = scope) do + Enum.filter(grants, &(&1.scope == scope)) + end end diff --git a/components/electric/lib/electric/satellite/protobuf_messages.ex b/components/electric/lib/electric/satellite/protobuf_messages.ex index 65ca4abcc7..a37db6a64a 100644 --- a/components/electric/lib/electric/satellite/protobuf_messages.ex +++ b/components/electric/lib/electric/satellite/protobuf_messages.ex @@ -12988,7 +12988,7 @@ end, defmodule Electric.Satellite.SatPerms do @moduledoc false - defstruct id: 0, user_id: "", rules: nil, roles: [] + defstruct id: 0, user_id: "", rules: nil, roles: [], triggers: "" ( ( @@ -13003,7 +13003,12 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_id(msg) |> encode_user_id(msg) |> encode_rules(msg) |> encode_roles(msg) + [] + |> encode_id(msg) + |> encode_user_id(msg) + |> encode_rules(msg) + |> encode_roles(msg) + |> encode_triggers(msg) end ) @@ -13064,6 +13069,18 @@ ArgumentError -> reraise Protox.EncodingError.new(:roles, "invalid field value"), __STACKTRACE__ end + end, + defp encode_triggers(acc, msg) do + try do + if msg.triggers == "" do + acc + else + [acc, "*", Protox.Encode.encode_string(msg.triggers)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:triggers, "invalid field value"), __STACKTRACE__ + end end ] @@ -13130,6 +13147,11 @@ {[roles: msg.roles ++ [Electric.Satellite.SatPerms.Role.decode!(delimited)]], rest} + {5, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[triggers: Protox.Decode.validate_string(delimited)], rest} + {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) {[], rest} @@ -13190,7 +13212,8 @@ 1 => {:id, {:scalar, 0}, :int64}, 2 => {:user_id, {:scalar, ""}, :string}, 3 => {:rules, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Rules}}, - 4 => {:roles, :unpacked, {:message, Electric.Satellite.SatPerms.Role}} + 4 => {:roles, :unpacked, {:message, Electric.Satellite.SatPerms.Role}}, + 5 => {:triggers, {:scalar, ""}, :string} } end @@ -13203,6 +13226,7 @@ id: {1, {:scalar, 0}, :int64}, roles: {4, :unpacked, {:message, Electric.Satellite.SatPerms.Role}}, rules: {3, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Rules}}, + triggers: {5, {:scalar, ""}, :string}, user_id: {2, {:scalar, ""}, :string} } end @@ -13247,6 +13271,15 @@ name: :roles, tag: 4, type: {:message, Electric.Satellite.SatPerms.Role} + }, + %{ + __struct__: Protox.Field, + json_name: "triggers", + kind: {:scalar, ""}, + label: :optional, + name: :triggers, + tag: 5, + type: :string } ] end @@ -13380,6 +13413,35 @@ [] ), + ( + def field_def(:triggers) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "triggers", + kind: {:scalar, ""}, + label: :optional, + name: :triggers, + tag: 5, + type: :string + }} + end + + def field_def("triggers") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "triggers", + kind: {:scalar, ""}, + label: :optional, + name: :triggers, + tag: 5, + type: :string + }} + end + + [] + ), def field_def(_) do {:error, :no_such_field} end @@ -13416,6 +13478,9 @@ def default(:roles) do {:error, :no_default_value} end, + def default(:triggers) do + {:ok, ""} + end, def default(_) do {:error, :no_such_field} end diff --git a/components/electric/test/electric/postgres/schema/fk_graph_test.exs b/components/electric/test/electric/postgres/schema/fk_graph_test.exs index 62b9cea9a6..d8cd2023b6 100644 --- a/components/electric/test/electric/postgres/schema/fk_graph_test.exs +++ b/components/electric/test/electric/postgres/schema/fk_graph_test.exs @@ -15,16 +15,19 @@ defmodule Electric.Postgres.Schema.FkGraphTest do setup do graph = - FkGraph.new([ - {@orders, @restaurants, ["restaurant_id"]}, - {@order_riders, @orders, ["order_id"]}, - {@order_riders, @riders, ["rider_id"]}, - {@addresses, @users, ["user_id"]}, - # very realistic many-to-many between wheels and bikes... - {@bikes, @riders, ["rider_id"]}, - {@bike_wheels, @bikes, ["bike_id"]}, - {@bike_wheels, @wheels, ["wheel_id"]} - ]) + FkGraph.new( + [ + {@orders, @restaurants, ["restaurant_id"]}, + {@order_riders, @orders, ["order_id"]}, + {@order_riders, @riders, ["rider_id"]}, + {@addresses, @users, ["user_id"]}, + # very realistic many-to-many between wheels and bikes... + {@bikes, @riders, ["rider_id"]}, + {@bike_wheels, @bikes, ["bike_id"]}, + {@bike_wheels, @wheels, ["wheel_id"]} + ], + %{} + ) {:ok, graph: graph} end @@ -59,9 +62,9 @@ defmodule Electric.Postgres.Schema.FkGraphTest do end test "returns nil if no fk is found on the table", cxt do - assert [] = FkGraph.foreign_keys(cxt.graph, @orders, @users) + refute FkGraph.foreign_keys(cxt.graph, @orders, @users) # @wheels does not have a fk, because of the many-to-many - assert [] = FkGraph.foreign_keys(cxt.graph, @orders, @wheels) + refute FkGraph.foreign_keys(cxt.graph, @orders, @wheels) end end end diff --git a/components/electric/test/electric/satellite/permissions/client_test.exs b/components/electric/test/electric/satellite/permissions/client_test.exs new file mode 100644 index 0000000000..3b1aae5101 --- /dev/null +++ b/components/electric/test/electric/satellite/permissions/client_test.exs @@ -0,0 +1,312 @@ +defmodule Electric.Satellite.Permissions.ClientTest do + use ExUnit.Case, async: true + use Electric.Postgres.MockSchemaLoader + + alias Electric.Satellite.Permissions + + alias ElectricTest.PermissionsHelpers + alias ElectricTest.PermissionsHelpers.Auth + alias ElectricTest.PermissionsHelpers.Chgs + alias ElectricTest.PermissionsHelpers.Client + alias ElectricTest.PermissionsHelpers.Roles + alias ElectricTest.PermissionsHelpers.Sqlite + + import ElectricTest.PermissionsHelpers + + @comments {"public", "comments"} + @issues {"public", "issues"} + @offices {"public", "offices"} + @project_memberships {"public", "project_memberships"} + @projects {"public", "projects"} + @reactions {"public", "reactions"} + @regions {"public", "regions"} + @users {"public", "users"} + @addresses {"public", "addresses"} + @workspaces {"public", "workspaces"} + + @compound_root {"public", "compound_root"} + @compound_level1 {"public", "compound_level1"} + @compound_level2 {"public", "compound_level2"} + # @compound_memberships {"public", "compound_memberships"} + + setup do + {:ok, loader} = PermissionsHelpers.Schema.loader() + {:ok, schema_version} = SchemaLoader.load(loader) + {:ok, conn} = Exqlite.Sqlite3.open(":memory:") + + conn = + Enum.reduce(PermissionsHelpers.Schema.migrations(), conn, fn {_version, stmts}, conn -> + for stmt <- stmts do + :ok = Exqlite.Sqlite3.execute(conn, stmt) + end + + conn + end) + + data = [ + {@regions, "rg1", [{@offices, "o1"}, {@offices, "o2"}]}, + {@regions, "rg2", [{@offices, "o3"}, {@offices, "o4"}]}, + {@workspaces, "w1", + [ + {@projects, "p1", + [ + {@issues, "i1", + [ + {@comments, "c1", [{@reactions, "r1"}, {@reactions, "r2"}, {@reactions, "r3"}]}, + {@comments, "c2", [{@reactions, "r4"}]} + ]}, + {@issues, "i2", [{@comments, "c5"}]}, + {@project_memberships, "pm1", %{"user_id" => Auth.user_id(), "role" => "member"}, []} + ]}, + {@projects, "p2", + [ + {@issues, "i3", + [ + {@comments, "c3", [{@reactions, "r5"}, {@reactions, "r6"}, {@reactions, "r7"}]}, + {@comments, "c4", [{@reactions, "r8"}]} + ]}, + {@issues, "i4"} + ]}, + {@projects, "p3", [{@issues, "i5", [{@comments, "c6"}]}]}, + {@projects, "p4", [{@issues, "i6", []}]} + ]}, + {@compound_root, ["cmr1_1", "cmr2_1"], + [ + { + @compound_level1, + ["cml1_1", "cml2_1"], + [{@compound_level2, ["cmll1_1", "cmll2_1"], []}] + } + ]}, + {@users, [Auth.user_id()], [{@addresses, ["ad1"]}]} + ] + + conn = Sqlite.build_tree(conn, data, schema_version) + + {:ok, + conn: conn, + schema_version: schema_version, + loader: loader, + data: data, + migrations: PermissionsHelpers.Schema.migrations()} + end + + test "scope_query/3", cxt do + # the use of the fk in the map here is because in the triggers, you would be looking up from + # the trigger row, with e.g. `NEW`, so the final clause would be e.g. `NEW.region_id` for the + # first test case here. + tests = [ + {@regions, @offices, %{"region_id" => "rg1"}, ["rg1"]}, + {@workspaces, @reactions, %{"comment_id" => "c4"}, ["w1"]}, + {@projects, @reactions, %{"comment_id" => "c4"}, ["p2"]}, + {@issues, @reactions, %{"comment_id" => "c3"}, ["i3"]}, + {@projects, @project_memberships, %{"project_id" => "p1"}, ["p1"]}, + {@projects, @projects, %{"id" => "p1"}, ["p1"]}, + {@compound_root, @compound_level1, %{"root_id1" => "cmr1_1", "root_id2" => "cmr2_1"}, + ["cmr1_1", "cmr2_1"]}, + {@compound_root, @compound_level2, %{"level1_id1" => "cml1_1", "level1_id2" => "cml2_1"}, + ["cmr1_1", "cmr2_1"]}, + {@projects, @users, %{"user_id" => Auth.user_id()}, ["p1"]}, + {@projects, @addresses, %{"user_id" => Auth.user_id()}, ["p1"]} + ] + + for {root, table, id, scope_id} <- tests do + query = + Permissions.Client.scope_query( + cxt.schema_version, + root, + table, + fn col -> ["'", Map.fetch!(id, col), "'"] end + ) + + {:ok, stmt} = Exqlite.Sqlite3.prepare(cxt.conn, query) + + assert {:row, ^scope_id} = Exqlite.Sqlite3.step(cxt.conn, stmt) + end + end + + describe "permissions triggers" do + setup(cxt) do + Client.setup(cxt) + end + + test "rejects updates to primary keys", cxt do + perms = + Client.perms( + cxt, + [ + ~s[GRANT ALL ON #{table(@issues)} TO 'editor'], + ~s[ASSIGN #{table(@users)}.role TO #{table(@users)}.id] + ], + [ + Roles.role("editor", "assign-1") + ] + ) + + assert {:error, _} = + Client.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@issues, %{"id" => "i1"}, %{"id" => "i100"}) + ]) + ) + end + end + + describe "local roles cleanup" do + setup(cxt) do + Client.setup(cxt) + end + + defp cleanup_sql(query) do + query + |> IO.iodata_to_binary() + |> String.split("\n") + |> Enum.drop_while(fn + "-- @permissions_cleanup BEGIN" -> false + _ -> true + end) + |> Enum.take_while(fn + "-- @permissions_cleanup END" -> false + _ -> true + end) + |> Enum.reject(fn + "--" <> _ -> true + _ -> false + end) + end + + test "deletes tombstone entries for removed global roles", cxt do + ddlx = + [ + ~s[GRANT ALL ON #{table(@issues)} TO 'editor'], + ~s[ASSIGN #{table(@users)}.role TO #{table(@users)}.id] + ] + + old_perms = + perms_build( + cxt, + ddlx, + [ + Roles.role("editor", "assign-1", row_id: ["user-1"]) + ] + ) + + # we lost a global role + perms = perms_build(cxt, ddlx, []) + + cleanup = + old_perms + |> Permissions.Client.permissions_triggers(perms, cxt.schema_version) + |> cleanup_sql() + + assert Enum.find(cleanup, fn query -> + query == + ~s|DELETE FROM "__electric_local_roles_tombstone" WHERE (assign_id = 'assign-1') AND (row_id = '["user-1"]');| + end) + end + + test "deletes tombstone entries for removed scoped roles", cxt do + ddlx = + [ + ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], + ~s[ASSIGN (#{table(@projects)}, #{table(@project_memberships)}.role) TO #{table(@project_memberships)}.user_id] + ] + + old_perms = + perms_build( + cxt, + ddlx, + [ + Roles.role("editor", @projects, "p1", "assign-1", row_id: ["pm-1"]), + Roles.role("editor", @projects, "p2", "assign-1", row_id: ["pm-2"]) + ] + ) + + # we lost a scoped role in project.p2 + perms = + perms_build( + cxt, + ddlx, + [ + Roles.role("editor", @projects, "p1", "assign-1") + ] + ) + + cleanup = + old_perms + |> Permissions.Client.permissions_triggers(perms, cxt.schema_version) + |> cleanup_sql() + + assert Enum.find(cleanup, fn query -> + query == + ~s|DELETE FROM "__electric_local_roles_tombstone" WHERE (assign_id = 'assign-1') AND (row_id = '["pm-2"]');| + end) + end + + test "deletes local versions of added global roles", cxt do + ddlx = + [ + ~s[GRANT ALL ON #{table(@issues)} TO 'editor'], + ~s[ASSIGN #{table(@users)}.role TO #{table(@users)}.id] + ] + + old_perms = perms_build(cxt, ddlx, []) + + # we gained a global role + perms = + perms_build(cxt, ddlx, [ + Roles.role("editor", "assign-1", row_id: ["user-1"]) + ]) + + cleanup = + old_perms + |> Permissions.Client.permissions_triggers(perms, cxt.schema_version) + |> cleanup_sql() + + assert Enum.find(cleanup, fn query -> + query == + ~s|DELETE FROM "__electric_local_roles" WHERE (assign_id = 'assign-1') AND (row_id = '["user-1"]');| + end) + end + + test "deletes local versions of added scoped roles", cxt do + ddlx = + [ + ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], + ~s[ASSIGN (#{table(@projects)}, #{table(@project_memberships)}.role) TO #{table(@project_memberships)}.user_id] + ] + + old_perms = + perms_build( + cxt, + ddlx, + [ + Roles.role("editor", @projects, "p1", "assign-1", row_id: ["pm-1"]) + ] + ) + + # we gained a scoped role in project.p2 + perms = + perms_build( + cxt, + ddlx, + [ + Roles.role("editor", @projects, "p1", "assign-1", row_id: ["pm-1"]), + Roles.role("editor", @projects, "p2", "assign-1", row_id: ["pm-2"]) + ] + ) + + cleanup = + old_perms + |> Permissions.Client.permissions_triggers(perms, cxt.schema_version) + |> cleanup_sql() + + assert Enum.find(cleanup, fn query -> + query == + ~s|DELETE FROM "__electric_local_roles" WHERE (assign_id = 'assign-1') AND (row_id = '["pm-2"]');| + end) + end + end +end diff --git a/components/electric/test/electric/satellite/permissions/helper_test.exs b/components/electric/test/electric/satellite/permissions/helper_test.exs index f3ab0b6864..ba23e3c4c2 100644 --- a/components/electric/test/electric/satellite/permissions/helper_test.exs +++ b/components/electric/test/electric/satellite/permissions/helper_test.exs @@ -3,7 +3,8 @@ defmodule Electric.Satellite.Permissions.HelperTest do alias ElectricTest.PermissionsHelpers.{ Chgs, - Tree + Tree, + Schema } alias Electric.Satellite.{Permissions.Graph} @@ -16,11 +17,12 @@ defmodule Electric.Satellite.Permissions.HelperTest do @issues {"public", "issues"} @comments {"public", "comments"} @reactions {"public", "reactions"} - @project_memberships {"public", "project_memberships"} @tags {"public", "tags"} @issue_tags {"public", "issue_tags"} setup do + {:ok, schema_version} = Schema.load() + tree = Tree.new( [ @@ -55,17 +57,7 @@ defmodule Electric.Satellite.Permissions.HelperTest do {@tags, "t1", [{@issue_tags, "it1", []}, {@issue_tags, "it2", []}]}, {@tags, "t2", []} ], - [ - {@comments, @issues, ["issue_id"]}, - {@issues, @projects, ["project_id"]}, - {@offices, @regions, ["region_id"]}, - {@project_memberships, @projects, ["project_id"]}, - {@projects, @workspaces, ["workspace_id"]}, - {@reactions, @comments, ["comment_id"]}, - # tasty join table - {@issue_tags, @tags, ["tag_id"]}, - {@issue_tags, @issues, ["issue_id"]} - ] + schema_version ) {:ok, tree: tree} diff --git a/components/electric/test/electric/satellite/permissions/join_table_test.exs b/components/electric/test/electric/satellite/permissions/join_table_test.exs index 8346ec727e..11acd0608c 100644 --- a/components/electric/test/electric/satellite/permissions/join_table_test.exs +++ b/components/electric/test/electric/satellite/permissions/join_table_test.exs @@ -5,6 +5,8 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do Chgs, Perms, Roles, + Schema, + Server, Tree } @@ -18,7 +20,6 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do @addresses {"public", "addresses"} @customers {"public", "customers"} @dishes {"public", "dishes"} - @order_dishes {"public", "order_dishes"} @order_riders {"public", "order_riders"} @orders {"public", "orders"} @restaurants {"public", "restaurants"} @@ -39,52 +40,48 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do Tree.delete_vertex(tree, v) end - def add_order(tree, restaurant_id, order_id) do - Tree.add_edge(tree, {@orders, [order_id]}, {@restaurants, [restaurant_id]}) + def add_order(module, tree, restaurant_id, order_id) do + module.apply_change( + tree, + [@restaurants], + Chgs.insert(@orders, %{"id" => order_id, "restaurant_id" => restaurant_id}) + ) end describe "simple join table" do setup do - loader_spec = - MockSchemaLoader.backend_spec( - migrations: [ - {"01", - [ - "create table restaurants (id uuid primary key)", - "create table orders (id uuid primary key)", - "create table riders (id uuid primary key)", - """ - create table order_riders ( - id uuid primary key, - order_id uuid not null references orders (id), - rider_id uuid not null references riders (id) - ) - """ - ]} - ] - ) + migrations = [ + {"01", + [ + "create table restaurants (id uuid primary key)", + "create table orders (id uuid primary key, restaurant_id uuid not null references restaurants (id))", + "create table riders (id uuid primary key)", + """ + create table order_riders ( + id uuid primary key, + order_id uuid not null references orders (id), + rider_id uuid not null references riders (id) + ) + """ + ]} + ] - {:ok, loader} = SchemaLoader.connect(loader_spec, []) + data = [ + {@restaurants, "rt1", []}, + {@orders, "or1", []}, + {@orders, "or2", []}, + {@riders, "rd1", []}, + {@riders, "rd2", []} + ] + + {:ok, loader} = Schema.loader(migrations) {:ok, schema_version} = SchemaLoader.load(loader) - tree = - Tree.new( - [ - {@restaurants, "rt1", []}, - {@orders, "or1", []}, - {@orders, "or2", []}, - {@riders, "rd1", []}, - {@riders, "rd2", []} - ], - [ - {@orders, @restaurants, ["restaurant_id"]}, - {@order_riders, @orders, ["order_id"]}, - {@order_riders, @riders, ["rider_id"]} - ] - ) + tree = Tree.new(data, schema_version) - tree = add_order(tree, "rt1", "or1") - {:ok, tree: tree, loader: loader, schema_version: schema_version} + tree = add_order(Server, tree, "rt1", "or1") + + {:ok, tree: tree, data: data, loader: loader, schema_version: schema_version} end test "scope_id resolves across join tables", cxt do @@ -214,17 +211,7 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do {@riders, "d2", []}, {@riders, "d3", []} ], - [ - {@addresses, @customers, ["customer_id"]}, - {@dishes, @restaurants, ["restaurant_id"]}, - {@order_dishes, @dishes, ["dish_id"]}, - {@order_dishes, @orders, ["order_id"]}, - {@order_riders, @orders, ["order_id"]}, - {@order_riders, @riders, ["rider_id"]}, - {@orders, @addresses, ["address_id"]}, - {@orders, @customers, ["customer_id"]}, - {@orders, @restaurants, ["restaurant_id"]} - ] + schema_version ) {:ok, _} = start_supervised(Perms.Transient) diff --git a/components/electric/test/electric/satellite/permissions/trigger_test.exs b/components/electric/test/electric/satellite/permissions/trigger_test.exs index 05c466bcc0..c88f9124df 100644 --- a/components/electric/test/electric/satellite/permissions/trigger_test.exs +++ b/components/electric/test/electric/satellite/permissions/trigger_test.exs @@ -16,9 +16,6 @@ defmodule Electric.Satellite.Permissions.TriggerTest do @workspaces {"public", "workspaces"} @projects {"public", "projects"} - @issues {"public", "issues"} - @comments {"public", "comments"} - @reactions {"public", "reactions"} @project_memberships {"public", "project_memberships"} setup do @@ -41,32 +38,6 @@ defmodule Electric.Satellite.Permissions.TriggerTest do role text not null ) """ - # "create table regions (id uuid primary key)", - # "create table offices (id uuid primary key, region_id uuid not null references regions (id))", - # "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", - # "create table users (id uuid primary key)", - # "create table teams (id uuid primary key)", - # """ - # create table team_memberships ( - # id uuid primary key, - # user_id uuid not null references users (id), - # team_id uuid not null references teams (id), - # team_role text not null - # ) - # """, - # """ - # create table site_admins ( - # id uuid primary key, - # user_id uuid not null references users (id), - # site_role text not null - # ) - # """, - # """ - # create table my_default.admin_users ( - # id uuid primary key, - # user_id uuid not null references users (id) - # ) - # """ ]} ] ) @@ -84,13 +55,7 @@ defmodule Electric.Satellite.Permissions.TriggerTest do {@projects, "p3", []} ]} ], - [ - {@comments, @issues, ["issue_id"]}, - {@issues, @projects, ["project_id"]}, - {@project_memberships, @projects, ["project_id"]}, - {@projects, @workspaces, ["workspace_id"]}, - {@reactions, @comments, ["comment_id"]} - ] + schema_version ) {:ok, _} = start_supervised(Perms.Transient) diff --git a/components/electric/test/electric/satellite/permissions/write_buffer_test.exs b/components/electric/test/electric/satellite/permissions/write_buffer_test.exs index f11f28f363..512b34ea2d 100644 --- a/components/electric/test/electric/satellite/permissions/write_buffer_test.exs +++ b/components/electric/test/electric/satellite/permissions/write_buffer_test.exs @@ -3,9 +3,10 @@ defmodule Electric.Satellite.Permissions.WriteBufferTest do alias Electric.Satellite.Permissions.WriteBuffer alias Electric.Satellite.Permissions.Graph + alias ElectricTest.PermissionsHelpers.Auth alias ElectricTest.PermissionsHelpers.Chgs + alias ElectricTest.PermissionsHelpers.Schema alias ElectricTest.PermissionsHelpers.Tree - alias ElectricTest.PermissionsHelpers.Auth @workspaces {"public", "workspaces"} @projects {"public", "projects"} @@ -14,7 +15,7 @@ defmodule Electric.Satellite.Permissions.WriteBufferTest do @tags {"public", "tags"} @issue_tags {"public", "issue_tags"} - def upstream(fks) do + def upstream(schema_version) do Tree.new( [ {@workspaces, "w1", @@ -34,17 +35,27 @@ defmodule Electric.Satellite.Permissions.WriteBufferTest do ]}, {@workspaces, "w2", []} ], - fks + schema_version ) end setup do - upstream = - upstream([ - {@comments, @issues, ["issue_id"]}, - {@issues, @projects, ["project_id"]}, - {@projects, @workspaces, ["workspace_id"]} - ]) + migrations = [ + {"01", + [ + "create table workspaces (id uuid primary key)", + "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", + "create table issues (id uuid primary key, project_id uuid not null references projects (id), description text)", + "create table comments (id uuid primary key, issue_id uuid not null references issues (id), comment text, owner text)", + "create table reactions (id uuid primary key, comment_id uuid not null references comments (id))", + "create table tags (id uuid primary key, tag text not null)", + "create table issue_tags (id uuid primary key, issue_id uuid not null references issues (id), tag_id uuid not null references tags (id))" + ]} + ] + + {:ok, schema_version} = Schema.load(migrations) + + upstream = upstream(schema_version) write_buffer = WriteBuffer.with_upstream(WriteBuffer.new(Auth.user()), upstream) @@ -266,6 +277,21 @@ defmodule Electric.Satellite.Permissions.WriteBufferTest do describe "join table" do setup do + migrations = [ + {"01", + [ + "create table workspaces (id uuid primary key)", + "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", + "create table issues (id uuid primary key, project_id uuid not null references projects (id), description text)", + "create table comments (id uuid primary key, issue_id uuid not null references issues (id), comment text, owner text)", + "create table reactions (id uuid primary key, comment_id uuid not null references comments (id))", + "create table tags (id uuid primary key, tag text not null)", + "create table issue_tags (id uuid primary key, issue_id uuid not null references issues (id), tag_id uuid not null references tags (id))" + ]} + ] + + {:ok, schema_version} = Schema.load(migrations) + upstream = Tree.new( [ @@ -295,13 +321,7 @@ defmodule Electric.Satellite.Permissions.WriteBufferTest do {@tags, "t1", [{@issue_tags, "it1", []}, {@issue_tags, "it9", []}]}, {@tags, "t2", []} ], - [ - {@issue_tags, @tags, ["tag_id"]}, - {@issue_tags, @issues, ["issue_id"]}, - {@comments, @issues, ["issue_id"]}, - {@issues, @projects, ["project_id"]}, - {@projects, @workspaces, ["workspace_id"]} - ] + schema_version ) write_buffer = WriteBuffer.with_upstream(WriteBuffer.new(Auth.user()), upstream) diff --git a/components/electric/test/electric/satellite/permissions_test.exs b/components/electric/test/electric/satellite/permissions_test.exs index 8df13e3bcc..8bb9694924 100644 --- a/components/electric/test/electric/satellite/permissions_test.exs +++ b/components/electric/test/electric/satellite/permissions_test.exs @@ -1,6 +1,8 @@ defmodule Electric.Satellite.PermissionsTest do use ExUnit.Case, async: true + alias ElectricTest.PermissionsHelpers + alias ElectricTest.PermissionsHelpers.{ Auth, Chgs, @@ -11,867 +13,1307 @@ defmodule Electric.Satellite.PermissionsTest do } alias Electric.Postgres.Extension.SchemaLoader - alias Electric.Postgres.MockSchemaLoader alias Electric.Satellite.{Permissions, Permissions.MoveOut} alias Electric.Replication.Changes import ElectricTest.PermissionsHelpers - @users {"public", "users"} - @regions {"public", "regions"} + @comments {"public", "comments"} + @issues {"public", "issues"} @offices {"public", "offices"} - @workspaces {"public", "workspaces"} + @project_memberships {"public", "project_memberships"} @projects {"public", "projects"} - @issues {"public", "issues"} - @comments {"public", "comments"} @reactions {"public", "reactions"} - @project_memberships {"public", "project_memberships"} + @regions {"public", "regions"} + @site_admins {"public", "site_admins"} + @users {"public", "users"} + @workspaces {"public", "workspaces"} + + @compound_root {"public", "compound_root"} + @compound_level1 {"public", "compound_level1"} + @compound_level2 {"public", "compound_level2"} + @compound_memberships {"public", "compound_memberships"} + @projects_assign ~s[ELECTRIC ASSIGN (#{table(@projects)}, #{table(@project_memberships)}.role) TO #{table(@project_memberships)}.user_id] @global_assign ~s[ELECTRIC ASSIGN #{table(@users)}.role TO #{table(@users)}.id] - setup do - loader_spec = - MockSchemaLoader.backend_spec( - migrations: [ - {"01", - [ - "create table regions (id uuid primary key)", - "create table offices (id uuid primary key, region_id uuid not null references regions (id))", - "create table workspaces (id uuid primary key)", - "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", - "create table issues (id uuid primary key, project_id uuid not null references projects (id))", - "create table comments (id uuid primary key, issue_id uuid not null references issues (id))", - "create table reactions (id uuid primary key, comment_id uuid not null references comments (id))", - "create table users (id uuid primary key, role text not null default 'normie')", - "create table teams (id uuid primary key)", - """ - create table project_memberships ( - id uuid primary key, - user_id uuid not null references users (id), - project_id uuid not null references projects (id), - project_role text not null - ) - """, - """ - create table team_memberships ( - id uuid primary key, - user_id uuid not null references users (id), - team_id uuid not null references teams (id), - team_role text not null - ) - """, - """ - create table site_admins ( - id uuid primary key, - user_id uuid not null references users (id), - site_role text not null - ) - """, - """ - create table my_default.admin_users ( - id uuid primary key, - user_id uuid not null references users (id) - ) - """ - ]} - ] - ) + defmacrop assert_write_rejected(test) do + # permissions failure messages are prefixed with `"permissions:"` so we're double checking + # that the error is caused by the permissions checks themselves, not by some other data error + # this is particularly important for the sqlite backed tests + quote do + assert {:error, "permissions:" <> _} = unquote(test) + end + end - {:ok, loader} = SchemaLoader.connect(loader_spec, []) + setup do + {:ok, loader} = PermissionsHelpers.Schema.loader() {:ok, schema_version} = SchemaLoader.load(loader) - tree = - Tree.new( - [ - {@regions, "r1", [{@offices, "o1"}, {@offices, "o2"}]}, - {@regions, "r2", [{@offices, "o3"}, {@offices, "o4"}]}, - {@workspaces, "w1", - [ - {@projects, "p1", - [ - {@issues, "i1", - [ - {@comments, "c1", - [{@reactions, "r1"}, {@reactions, "r2"}, {@reactions, "r3"}]}, - {@comments, "c2", [{@reactions, "r4"}]} - ]}, - {@issues, "i2", [{@comments, "c5"}]}, - {@project_memberships, "pm1", []} - ]}, - {@projects, "p2", - [ - {@issues, "i3", - [ - {@comments, "c3", - [{@reactions, "r5"}, {@reactions, "r6"}, {@reactions, "r7"}]}, - {@comments, "c4", [{@reactions, "r8"}]} - ]}, - {@issues, "i4"} - ]}, - {@projects, "p3", [{@issues, "i5", []}]}, - {@projects, "p4", [{@issues, "i6", []}]} - ]} - ], - [ - {@comments, @issues, ["issue_id"]}, - {@issues, @projects, ["project_id"]}, - {@offices, @regions, ["region_id"]}, - {@project_memberships, @projects, ["project_id"]}, - {@projects, @workspaces, ["workspace_id"]}, - {@reactions, @comments, ["comment_id"]} - ] - ) + data = [ + {@regions, "rg1", [{@offices, "o1"}, {@offices, "o2"}]}, + {@regions, "rg2", [{@offices, "o3"}, {@offices, "o4"}]}, + {@workspaces, "w1", + [ + {@projects, "p1", + [ + {@issues, "i1", + [ + {@comments, "c1", [{@reactions, "r1"}, {@reactions, "r2"}, {@reactions, "r3"}]}, + {@comments, "c2", [{@reactions, "r4"}]} + ]}, + {@issues, "i2", [{@comments, "c5"}]}, + {@project_memberships, "pm1", %{"user_id" => Auth.user_id(), "role" => "member"}, []} + ]}, + {@projects, "p2", + [ + {@issues, "i3", + [ + {@comments, "c3", [{@reactions, "r5"}, {@reactions, "r6"}, {@reactions, "r7"}]}, + {@comments, "c4", [{@reactions, "r8"}]} + ]}, + {@issues, "i4"} + ]}, + {@projects, "p3", [{@issues, "i5", [{@comments, "c6"}]}]}, + {@projects, "p4", [{@issues, "i6", []}]} + ]}, + {@compound_root, ["cmr1_1", "cmr2_1"], + [ + { + @compound_level1, + ["cml1_1", "cml2_1"], + [{@compound_level2, ["cmll1_1", "cmll2_1"], []}] + } + ]}, + {@users, [Auth.user_id()]}, + {@site_admins, ["sa1"], %{"role" => "site.admin", "user_id" => Auth.user_id()}, []} + ] + + tree = Tree.new(data, schema_version) {:ok, _} = start_supervised(Perms.Transient) - {:ok, tree: tree, loader: loader, schema_version: schema_version} + {:ok, + tree: tree, + loader: loader, + schema_version: schema_version, + data: data, + migrations: PermissionsHelpers.Schema.migrations()} end - describe "validate_write/3" do - test "scoped role, scoped grant", cxt do - perms = - perms_build( - cxt, - [ - ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], - @projects_assign - ], - [ - Roles.role("editor", @projects, "p2", "assign-1") - ] + for module <- [PermissionsHelpers.Server, PermissionsHelpers.Client] do + describe "#{module.name()}:" do + setup(cxt) do + {:ok, cxt} = unquote(module).setup(cxt) + {:ok, Map.put(Map.new(cxt), :module, unquote(module))} + end + + test "scoped role, scoped grant", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], + @projects_assign + ], + [ + Roles.role("editor", @projects, "p2", "assign-1") + ] + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + # issue i1 belongs to project p1 + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i1"}) + ]) + ) ) - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - # issue i1 belongs to project p1 - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i1"}) - ]) - ) - - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - # issue i3 belongs to project p2 - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i3"}) - ]) - ) - - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - # issue i3 belongs to project p2 - Chgs.tx([ - Chgs.update(@comments, %{"id" => "c4", "issue_id" => "i3"}, %{ - "comment" => "changed" - }) - ]) - ) - end + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + # issue i3 belongs to project p2 + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i3"}) + ]) + ) - test "unscoped role, scoped grant", cxt do - perms = - perms_build( - cxt, - [ - ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], - @global_assign - ], - [ - Roles.role("editor", "assign-1") - ] + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + # issue i3 belongs to project p2 + Chgs.tx([ + Chgs.update(@comments, %{"id" => "c4", "issue_id" => "i3"}, %{ + "comment" => "changed" + }) + ]) + ) + end + + test "unscoped role, scoped grant", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], + @global_assign + ], + [ + Roles.role("editor", "assign-1") + ] + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + # issue i1 belongs to project p1 + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i1"}) + ]) + ) ) - - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - # issue i1 belongs to project p1 - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i1"}) - ]) - ) - end - - test "scoped role, unscoped grant", cxt do - perms = - perms_build( - cxt, - [ - ~s[GRANT ALL ON #{table(@comments)} TO 'editor'], - @projects_assign - ], - [ - # we have an editor role within project p2 - Roles.role("editor", @projects, "p2", "assign-1") - ] + end + + test "scoped role, unscoped grant", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT ALL ON #{table(@comments)} TO 'editor'], + @projects_assign + ], + [ + # we have an editor role within project p2 + Roles.role("editor", @projects, "p2", "assign-1") + ] + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + # issue i1 belongs to project p1 + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i1"}) + ]) + ) ) - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - # issue i1 belongs to project p1 - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i1"}) - ]) - ) - - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - # issue i3 belongs to project p2 but the grant is global - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i3"}) - ]) - ) - end - - test "grant for different table", cxt do - perms = - perms_build( - cxt, - [ - ~s[GRANT SELECT ON #{table(@comments)} TO 'editor'], - ~s[GRANT ALL ON #{table(@reactions)} TO 'editor'], - @global_assign - ], - [ - Roles.role("editor", "assign-1") - ] + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + # issue i3 belongs to project p2 but the grant is global + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i3"}) + ]) + ) ) - - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i1"}) - ]) - ) - - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@reactions, %{"id" => "r100"}) - ]) - ) - end - - test "unscoped role, unscoped grant", cxt do - perms = - perms_build( - cxt, - [ - ~s[GRANT UPDATE ON #{table(@comments)} TO 'editor'], - @global_assign - ], - [ - Roles.role("editor", "assign-1") - ] + end + + test "grant for different table", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT SELECT ON #{table(@comments)} TO 'editor'], + ~s[GRANT ALL ON #{table(@reactions)} TO 'editor'], + @global_assign + ], + [ + Roles.role("editor", "assign-1") + ] + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i1"}) + ]) + ) ) - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.update( - @comments, - %{"id" => "c100", "issue_id" => "i1", "text" => "old"}, - %{ - "text" => "changed" - } - ) - ]) - ) - - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i1"}) - ]) - ) - end + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@reactions, %{"id" => "r100", "comment_id" => "c1"}) + ]) + ) + end + + test "unscoped role, unscoped grant", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT UPDATE ON #{table(@comments)} TO 'editor'], + @global_assign + ], + [ + Roles.role("editor", "assign-1") + ] + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update( + @comments, + %{"id" => "c100", "issue_id" => "i1", "comment" => "old"}, + %{ + "comment" => "changed" + } + ) + ]) + ) - test "scoped role, change outside of scope", cxt do - perms = - perms_build( - cxt, - [ - ~s[GRANT UPDATE ON #{table(@comments)} TO 'editor'], - ~s[GRANT ALL ON #{table(@regions)} TO 'admin'], - @projects_assign, - @global_assign - ], - [ - Roles.role("editor", @projects, "p2", "assign-1"), - Roles.role("admin", "assign-2") - ] + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i1"}) + ]) + ) ) + end + + test "scoped role, change outside of scope", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT UPDATE ON #{table(@comments)} TO 'editor'], + ~s[GRANT ALL ON #{table(@regions)} TO 'admin'], + @projects_assign, + @global_assign + ], + [ + Roles.role("editor", @projects, "p2", "assign-1"), + Roles.role("admin", "assign-2") + ] + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@regions, %{"id" => "r1", "name" => "region"}, %{ + "name" => "updated region" + }) + ]) + ) + end + + test "role with no matching assign", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT UPDATE ON #{table(@comments)} TO (#{table(@projects)}, 'editor')] + ], + [ + Roles.role("editor", @projects, "p1", "non-existant") + ] + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@comments, %{"id" => "c1", "comment" => "old comment"}, %{ + "comment" => "new comment" + }) + ]) + ) + ) + end + + test "overlapping global and scoped perms", cxt do + # Test that even though the global perm doesn't grant + # the required permissions, the scoped perms are checked + # as well. The rule is that if *any* grant gives the perm + # then we have it, so we need to check every applicable grant + # until we run out of get permission. + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT UPDATE (description) ON #{table(@issues)} TO (projects, 'editor')], + ~s[GRANT UPDATE (title) ON #{table(@issues)} TO 'editor'], + @projects_assign, + @global_assign + ], + [ + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", "assign-2") + ] + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@issues, %{"id" => "i1"}, %{ + "description" => "updated" + }) + ]) + ) + end + + test "AUTHENTICATED w/user_id", cxt do + perms = + cxt.module.perms( + cxt, + ~s[GRANT ALL ON #{table(@comments)} TO AUTHENTICATED], + [] + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c10", "issue_id" => "i1"}) + ]) + ) + end + + test "AUTHENTICATED w/o permission", cxt do + perms = + cxt.module.perms( + cxt, + ~s[GRANT UPDATE ON #{table(@comments)} TO AUTHENTICATED], + [] + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c10", "issue_id" => "i1"}) + ]) + ) + ) + end + + test "AUTHENTICATED w/o user_id", cxt do + perms = + cxt.module.perms( + cxt, + ~s[GRANT ALL ON #{table(@comments)} TO AUTHENTICATED], + [], + auth: Auth.nobody() + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c10", "issue_id" => "i1"}) + ]) + ) + ) + end + + test "ANYONE w/o user_id", cxt do + perms = + cxt.module.perms( + cxt, + ~s[GRANT ALL ON #{table(@comments)} TO ANYONE], + [], + auth: Auth.nobody() + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c10", "issue_id" => "i1"}) + ]) + ) + end + + test "unscoped protected columns", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT INSERT (id, comment, issue_id) ON #{table(@comments)} TO 'editor'], + ~s[GRANT UPDATE (comment) ON #{table(@comments)} TO 'editor'], + @global_assign + ], + [ + Roles.role("editor", "assign-1") + ] + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{ + "id" => "c10", + "issue_id" => "i1", + "comment" => "something" + }) + ]) + ) - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.update(@regions, %{"id" => "r1", "name" => "region"}, %{ - "name" => "updated region" - }) - ]) - ) - end - - test "role with no matching assign", cxt do - perms = - perms_build( - cxt, - [ - ~s[GRANT UPDATE ON #{table(@comments)} TO (#{table(@projects)}, 'editor')] - ], - [ - Roles.role("editor", @projects, "p1", "non-existant") - ] + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{ + "id" => "c11", + "issue_id" => "i1", + "comment" => "something", + "owner" => "invalid" + }) + ]) + ) ) - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.update(@comments, %{"id" => "c1", "comment" => "old comment"}, %{ - "comment" => "new comment" - }) - ]) - ) - end + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@comments, %{"id" => "c10"}, %{"comment" => "updated"}) + ]) + ) - test "overlapping global and scoped perms", cxt do - # Test that even though the global perm doesn't grant - # the required permissions, the scoped perms are checked - # as well. The rule is that if *any* grant gives the perm - # then we have it, so we need to check every applicable grant - # until we run out of get permission. - perms = - perms_build( - cxt, - [ - ~s[GRANT UPDATE (description) ON #{table(@issues)} TO (projects, 'editor')], - ~s[GRANT UPDATE (title) ON #{table(@issues)} TO 'editor'], - @projects_assign, - @global_assign - ], - [ - Roles.role("editor", @projects, "p1", "assign-1"), - Roles.role("editor", "assign-2") - ] + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@comments, %{"id" => "c10"}, %{ + "comment" => "updated", + "owner" => "changed" + }) + ]) + ) ) + end + + test "scoped protected columns", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT INSERT (id, comment, issue_id) ON #{table(@comments)} TO (projects, 'editor')], + ~s[GRANT UPDATE (comment) ON #{table(@comments)} TO (projects, 'editor')], + @projects_assign + ], + [ + Roles.role("editor", @projects, "p1", "assign-1") + ] + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{ + "id" => "c10", + "issue_id" => "i1", + "comment" => "something" + }) + ]) + ) - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.update(@issues, %{"id" => "i1"}, %{ - "description" => "updated" - }) - ]) - ) - end - - test "AUTHENTICATED w/user_id", cxt do - perms = - perms_build( - cxt, - ~s[GRANT ALL ON #{table(@comments)} TO AUTHENTICATED], - [] + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{ + "id" => "c11", + "issue_id" => "i1", + "comment" => "something", + "owner" => "invalid" + }) + ]) + ) ) - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c10"}) - ]) - ) - end + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@comments, %{"id" => "c2"}, %{"comment" => "updated"}) + ]) + ) - test "AUTHENTICATED w/o permission", cxt do - perms = - perms_build( - cxt, - ~s[GRANT SELECT ON #{table(@comments)} TO AUTHENTICATED], - [] + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@comments, %{"id" => "c10"}, %{ + "comment" => "updated", + "owner" => "changed" + }) + ]) + ) ) + end + + test "moves between auth scopes", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT UPDATE ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], + ~s[GRANT UPDATE ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')], + ~s[GRANT SELECT ON #{table(@issues)} TO 'reader'], + ~s[GRANT SELECT ON #{table(@reactions)} TO 'reader'], + @projects_assign + ], + [ + # update rights on p1 & p3 + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", @projects, "p3", "assign-1"), + # read-only role on project p2 + Roles.role("reader", @projects, "p2", "assign-1") + ] + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@issues, %{"id" => "i1", "project_id" => "p1"}, %{ + "project_id" => "p3" + }) + ]) + ) - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c10"}) - ]) - ) - end - - test "AUTHENTICATED w/o user_id", cxt do - perms = - perms_build( - cxt, - ~s[GRANT ALL ON #{table(@comments)} TO AUTHENTICATED], - [], - auth: Auth.nobody() + # attempt to move an issue into a project we don't have write access to + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@issues, %{"id" => "i1", "project_id" => "p1"}, %{ + "project_id" => "p2" + }) + ]) + ) ) - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c10"}) - ]) - ) - end + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@reactions, %{"id" => "r1", "comment_id" => "c1"}, %{ + "comment_id" => "c6" + }) + ]) + ) - test "ANYONE w/o user_id", cxt do - perms = - perms_build( - cxt, - ~s[GRANT ALL ON #{table(@comments)} TO ANYONE], - [], - auth: Auth.nobody() + # attempt to move an issue into a project we don't have write access to + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@reactions, %{"id" => "r1", "comment_id" => "c1"}, %{ + "comment_id" => "c3" + }) + ]) + ) ) + end + + test "write in scope tree", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], + ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], + ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')], + @projects_assign + ], + [ + Roles.role("editor", @projects, "p1", "assign-1") + ] + ) + + # a single tx that builds within a writable permissions scope + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p1"}), + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}), + Chgs.insert(@reactions, %{"id" => "r100", "comment_id" => "c100"}) + ]) + ) - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c10"}) - ]) - ) - end - - test "protected columns", cxt do - perms = - perms_build( - cxt, - [ - ~s[GRANT INSERT (id, text) ON #{table(@comments)} TO 'editor'], - ~s[GRANT UPDATE (text) ON #{table(@comments)} TO 'editor'], - @global_assign - ], - [ - Roles.role("editor", "assign-1") - ] + # any failure should abort the tx + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@issues, %{"id" => "i200", "project_id" => "p1"}), + # this insert lives outside our perms + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i3"}), + Chgs.insert(@reactions, %{"id" => "r100", "comment_id" => "c100"}) + ]) + ) ) + end + + test "compound keys", cxt do + perms = + cxt.module.perms( + cxt, + [ + ~s[GRANT ALL ON #{table(@compound_level2)} TO (#{table(@compound_root)}, 'editor')], + ~s[GRANT ALL ON #{table(@compound_level1)} TO (#{table(@compound_root)}, 'editor')], + ~s[ASSIGN (#{table(@compound_root)}, #{table(@compound_memberships)}.role) TO #{table(@compound_memberships)}.user_id] + ], + [ + Roles.role("editor", @compound_root, ["cmr1_1", "cmr2_1"], "assign-1") + ] + ) + + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@compound_level1, %{ + "id1" => "cml1_100", + "id2" => "cml2_100", + "root_id1" => "cmr1_1", + "root_id2" => "cmr2_1" + }) + ]) + ) + end + end - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c10", "text" => "something"}) - ]) - ) + # roles that are created on the client and then used within the same tx before triggers have + # run on pg + describe "#{module.name()}: intermediate roles" do + setup(cxt) do + {:ok, cxt} = unquote(module).setup(cxt) + {:ok, Map.put(Map.new(cxt), :module, unquote(module))} + end + + setup(cxt) do + rules = [ + # project level perms + ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'manager')], + ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'manager')], + # read only to viewer + ~s[GRANT READ ON #{table(@issues)} TO (#{table(@projects)}, 'viewer')], + ~s[GRANT READ ON #{table(@comments)} TO (#{table(@projects)}, 'viewer')], + # global roles allowing create project and assign members + ~s[GRANT ALL ON #{table(@projects)} TO 'admin'], + ~s[GRANT ALL ON #{table(@project_memberships)} TO 'admin'], + ~s[GRANT ALL ON site_admins TO 'admin'], + # global roles with a join table + ~s[GRANT ALL ON #{table(@regions)} TO 'site.admin'], + ~s[GRANT ALL ON #{table(@offices)} TO 'site.admin'], + + # the assign rule for the 'manager' role + @projects_assign, + @global_assign, + ~s[ASSIGN site_admins.role TO site_admins.user_id] + ] - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@comments, %{ - "id" => "c10", - "text" => "something", - "owner" => "invalid" - }) - ]) - ) + roles = [ + # start with the ability to create projects and memberships + Roles.role("manager", @projects, "p1", "assign-1", row_id: ["pm1"]), + Roles.role("admin", "assign-2") + ] - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.update(@comments, %{"id" => "c10"}, %{"text" => "updated"}) - ]) - ) + perms = + cxt.module.perms( + cxt, + rules, + roles + ) + + {:ok, rules: rules, roles: roles, perms: perms} + end + + test "create and write to scope", cxt do + assert {:ok, perms} = + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@projects, %{"id" => "p100", "workspace_id" => "w1"}), + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "project_id" => "p100", + "user_id" => Auth.user_id(), + "role" => "manager" + }), + Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p100"}), + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}) + ]) + ) - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.update(@comments, %{"id" => "c10"}, %{ - "text" => "updated", - "owner" => "changed" - }) - ]) - ) - end + # the generated role persists accross txs + assert {:ok, perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@issues, %{"id" => "i101", "project_id" => "p100"}), + Chgs.insert(@comments, %{"id" => "c101", "issue_id" => "i101"}), + Chgs.insert(@comments, %{"id" => "c200", "issue_id" => "i1"}), + Chgs.insert(@issues, %{"id" => "i200", "project_id" => "p1"}) + ]) + ) - test "moves between auth scopes", cxt do - perms = - perms_build( - cxt, - [ - ~s[GRANT UPDATE ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT SELECT ON #{table(@issues)} TO 'reader'], - @projects_assign - ], - [ - # update rights on p1 & p3 - Roles.role("editor", @projects, "p1", "assign-1"), - Roles.role("editor", @projects, "p3", "assign-1"), - # read-only role on project p2 - Roles.role("reader", @projects, "p2", "assign-1") - ] - ) + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@comments, %{"id" => "c102", "issue_id" => "i101"}), + Chgs.insert(@comments, %{"id" => "c103", "issue_id" => "i100"}) + ]) + ) + end + + test "create then write to scope across txns", cxt do + assert {:ok, perms} = + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@projects, %{"id" => "p100", "workspace_id" => "w1"}) + ]) + ) - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.update(@issues, %{"id" => "i1", "project_id" => "p1"}, %{ - "project_id" => "p3" - }) - ]) - ) + assert {:ok, perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "project_id" => "p100", + "user_id" => Auth.user_id(), + "role" => "manager" + }) + ]) + ) - # attempt to move an issue into a project we don't have write access to - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.update(@issues, %{"id" => "i1", "project_id" => "p1"}, %{ - "project_id" => "p2" - }) - ]) - ) - end + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p100"}), + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}) + ]) + ) + end + + test "update intermediate role", cxt do + assert {:ok, perms} = + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@projects, %{"id" => "p100", "workspace_id" => "w1"}), + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "project_id" => "p100", + "user_id" => Auth.user_id(), + "role" => "manager" + }) + ]) + ) - test "write in scope tree", cxt do - perms = - perms_build( - cxt, - [ - ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')], - @projects_assign - ], - [ - Roles.role("editor", @projects, "p1", "assign-1") - ] + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update( + @project_memberships, + %{ + "id" => "pm100", + "project_id" => "p100", + "user_id" => Auth.user_id(), + "role" => "manager" + }, + %{"role" => "viewer"} + ), + Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p100"}), + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}) + ]) + ) ) + end + + test "removal of role via delete to memberships", cxt do + assert {:ok, perms} = + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@projects, %{"id" => "p100", "workspace_id" => "w1"}), + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "project_id" => "p100", + "user_id" => Auth.user_id(), + "role" => "manager" + }), + Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p100"}), + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}) + ]) + ) - # a single tx that builds within a writable permissions scope - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p1"}), - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}), - Chgs.insert(@reactions, %{"id" => "r100", "comment_id" => "c100"}) - ]) - ) - - # any failure should abort the tx - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p1"}), - # this insert lives outside our perms - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i3"}), - Chgs.insert(@reactions, %{"id" => "r100", "comment_id" => "c100"}) - ]) - ) - end - end + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.delete(@project_memberships, %{ + "id" => "pm100", + "project_id" => "p100", + "user_id" => Auth.user_id(), + "role" => "manager" + }), + Chgs.insert(@issues, %{"id" => "i101", "project_id" => "p100"}) + ]) + ) + ) + end + + test "delete to existing memberships", cxt do + assert {:ok, perms} = + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.delete(@project_memberships, %{ + "id" => "pm1", + "project_id" => "p1", + "user_id" => Auth.user_id(), + "role" => "manager" + }) + ]) + ) - describe "intermediate roles" do - # roles that are created on the client and then used within the same tx before triggers have - # run on pg - setup(cxt) do - perms = - perms_build( - cxt, - [ - ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'manager')], - ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'manager')], - # read only to viewer - ~s[GRANT READ ON #{table(@issues)} TO (#{table(@projects)}, 'viewer')], - ~s[GRANT READ ON #{table(@comments)} TO (#{table(@projects)}, 'viewer')], - # global roles allowing create project and assign members - ~s[GRANT ALL ON #{table(@projects)} TO 'project_admin'], - ~s[GRANT ALL ON #{table(@project_memberships)} TO 'project_admin'], - # the assign rule for the 'manager' role - @projects_assign, - @global_assign - ], - [ - # start with the ability to create projects and memberships - Roles.role("manager", @projects, "p1", "assign-1", row_id: ["pm1"]), - Roles.role("project_admin", "assign-2") - ] + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p1"}) + ]) + ) ) + end - {:ok, perms: perms} - end + test "delete to existing global memberships", cxt do + # reset the db because we're repeating the permissions setup + cxt = cxt.module.reset(cxt) - test "create and write to scope", cxt do - assert {:ok, perms} = - Permissions.validate_write( - cxt.perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@projects, %{"id" => "p100", "workspace_id" => "w1"}), - Chgs.insert(@project_memberships, %{ - "id" => "pm100", - "project_id" => "p100", - "user_id" => Auth.user_id(), - "role" => "manager" - }), - Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p100"}), - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}) - ]) - ) + perms = + cxt.module.perms( + cxt, + cxt.rules, + cxt.roles ++ + [ + Roles.role("site.admin", "assign-3", row_id: ["sa1"]) + ] + ) + + assert {:ok, perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@offices, %{ + "id" => "off100", + "region_id" => "rg1" + }) + ]) + ) - # the generated role persists accross txs - assert {:ok, perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@issues, %{"id" => "i101", "project_id" => "p100"}), - Chgs.insert(@comments, %{"id" => "c101", "issue_id" => "i101"}), - Chgs.insert(@comments, %{"id" => "c200", "issue_id" => "i1"}), - Chgs.insert(@issues, %{"id" => "i200", "project_id" => "p1"}) - ]) - ) + assert {:ok, perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.delete(@site_admins, %{ + "id" => "sa1", + "user_id" => Auth.user_id(), + "role" => "site.admin" + }) + ]) + ) - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@comments, %{"id" => "c102", "issue_id" => "i101"}), - Chgs.insert(@comments, %{"id" => "c102", "issue_id" => "i100"}) - ]) - ) - end + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@offices, %{ + "id" => "off200", + "region_id" => "rg1" + }) + ]) + ) + ) + end + + test "delete to existing memberships, then re-add", cxt do + assert {:ok, perms} = + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.delete(@project_memberships, %{ + "id" => "pm1", + "project_id" => "p1", + "user_id" => Auth.user_id(), + "role" => "manager" + }), + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "project_id" => "p1", + "user_id" => Auth.user_id(), + "role" => "manager" + }) + ]) + ) - test "create then write to scope across txns", cxt do - assert {:ok, perms} = - Permissions.validate_write( - cxt.perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@projects, %{"id" => "p100", "workspace_id" => "w1"}) - ]) - ) + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p1"}) + ]) + ) + end + + test "add and delete local role", cxt do + assert {:ok, perms} = + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@projects, %{"id" => "p100", "workspace_id" => "w1"}), + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "project_id" => "p100", + "user_id" => Auth.user_id(), + "role" => "manager" + }), + Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p100"}), + Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}), + Chgs.delete(@project_memberships, %{ + "id" => "pm100", + "project_id" => "p100", + "user_id" => Auth.user_id(), + "role" => "manager" + }) + ]) + ) - assert {:ok, perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@project_memberships, %{ - "id" => "pm100", - "project_id" => "p100", - "user_id" => Auth.user_id(), - "role" => "manager" - }) - ]) - ) + # the generated role persists accross txs + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@issues, %{"id" => "i101", "project_id" => "p100"}) + ]) + ) + ) + end + + test "local unscoped roles", cxt do + assert_write_rejected( + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@offices, %{ + "id" => "o100", + "region_id" => "rg1" + }) + ]) + ) + ) - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p100"}), - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}) - ]) - ) - end + assert {:ok, perms} = + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@site_admins, %{ + "id" => "sa100", + "user_id" => Auth.user_id(), + "role" => "site.admin" + }), + Chgs.insert(@offices, %{ + "id" => "o100", + "region_id" => "rg1" + }) + ]) + ) - test "update intermediate role", cxt do - assert {:ok, perms} = - Permissions.validate_write( - cxt.perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@projects, %{"id" => "p100", "workspace_id" => "w1"}), - Chgs.insert(@project_memberships, %{ - "id" => "pm100", - "project_id" => "p100", - "user_id" => Auth.user_id(), - "role" => "manager" - }) - ]) - ) + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.delete(@site_admins, %{ + "id" => "sa100", + "user_id" => Auth.user_id(), + "role" => "site.admin" + }), + Chgs.insert(@offices, %{ + "id" => "o101", + "region_id" => "rg1" + }) + ]) + ) + ) + end + + test "local scoped roles", cxt do + # reset the db because we're repeating the permissions setup + cxt = cxt.module.reset(cxt) + + perms = + cxt.module.perms( + cxt, + [ + # project level perms + ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'manager')], + ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'manager')], + # read only to viewer + ~s[GRANT READ ON #{table(@issues)} TO (#{table(@projects)}, 'viewer')], + ~s[GRANT READ ON #{table(@comments)} TO (#{table(@projects)}, 'viewer')], + # global roles allowing create project and assign members + ~s[GRANT ALL ON #{table(@projects)} TO 'admin'], + ~s[GRANT ALL ON #{table(@project_memberships)} TO 'admin'], + ~s[GRANT ALL ON site_admins TO 'admin'], + + # global roles with a join table + ~s[GRANT ALL ON #{table(@regions)} TO 'site.admin'], + ~s[GRANT ALL ON #{table(@offices)} TO 'site.admin'], + + # the assign rule for the 'manager' role + @projects_assign, + @global_assign, + ~s[ASSIGN site_admins.role TO site_admins.user_id] + ], + [ + # don't start with the ability to create projects and memberships + # Roles.role("manager", @projects, "p1", "assign-1", row_id: ["pm1"]), + Roles.role("admin", "assign-2") + ] + ) + + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@issues, %{ + "id" => "i100", + "project_id" => "p1" + }) + ]) + ) + ) - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.update( - @project_memberships, - %{ + assert {:ok, perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@project_memberships, %{ "id" => "pm100", - "project_id" => "p100", "user_id" => Auth.user_id(), + "project_id" => "p1", "role" => "manager" - }, - %{"role" => "viewer"} - ), - Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p100"}), - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}) - ]) - ) - end - - test "removal of role via delete to memberships", cxt do - assert {:ok, perms} = - Permissions.validate_write( - cxt.perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@projects, %{"id" => "p100", "workspace_id" => "w1"}), - Chgs.insert(@project_memberships, %{ - "id" => "pm100", - "project_id" => "p100", - "user_id" => Auth.user_id(), - "role" => "manager" - }), - Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p100"}), - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}) - ]) - ) + }), + Chgs.insert(@issues, %{ + "id" => "i100", + "project_id" => "p1" + }) + ]) + ) - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.delete(@project_memberships, %{ - "id" => "pm100", - "project_id" => "p100", - "user_id" => Auth.user_id(), - "role" => "manager" - }), - Chgs.insert(@issues, %{"id" => "i101", "project_id" => "p100"}) - ]) - ) - end + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.delete(@project_memberships, %{ + "id" => "pm100", + "user_id" => Auth.user_id(), + "project_id" => "p1", + "role" => "manager" + }), + Chgs.insert(@issues, %{ + "id" => "i101", + "project_id" => "p1" + }) + ]) + ) + ) + end + + test "scope moves", cxt do + assert_write_rejected( + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.update( + @issues, + %{"id" => "i1", "project_id" => "p1"}, + %{"project_id" => "p3"} + ) + ]) + ) + ) - test "delete to existing memberships", cxt do - assert {:ok, perms} = - Permissions.validate_write( - cxt.perms, - cxt.tree, - Chgs.tx([ - Chgs.delete(@project_memberships, %{ - "id" => "pm1", - "project_id" => "p1", - "user_id" => Auth.user_id(), - "role" => "manager" - }) - ]) - ) + assert_write_rejected( + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.update( + @comments, + %{"id" => "c2", "issue_id" => "i1"}, + %{"issue_id" => "i3"} + ) + ]) + ) + ) - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p1"}) - ]) - ) - end + assert {:ok, perms} = + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "user_id" => Auth.user_id(), + "project_id" => "p3", + "role" => "manager" + }) + ]) + ) - test "delete to existing memberships, then re-add", cxt do - assert {:ok, perms} = - Permissions.validate_write( - cxt.perms, - cxt.tree, - Chgs.tx([ - Chgs.delete(@project_memberships, %{ - "id" => "pm1", - "project_id" => "p1", - "user_id" => Auth.user_id(), - "role" => "manager" - }), - Chgs.insert(@project_memberships, %{ - "id" => "pm100", - "project_id" => "p1", - "user_id" => Auth.user_id(), - "role" => "manager" - }) - ]) - ) + assert {:ok, perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update( + @comments, + %{"id" => "c2", "issue_id" => "i1"}, + %{"issue_id" => "i5"} + ) + ]) + ) - assert {:ok, _perms} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p1"}) - ]) - ) - end + assert {:ok, _perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update( + @issues, + %{"id" => "i1", "project_id" => "p1"}, + %{"project_id" => "p3"} + ) + ]) + ) + end + + test "scope move after removing existing role", cxt do + assert {:ok, perms} = + cxt.module.validate_write( + cxt.perms, + cxt.tree, + Chgs.tx([ + Chgs.insert(@project_memberships, %{ + "id" => "pm100", + "user_id" => Auth.user_id(), + "project_id" => "p3", + "role" => "manager" + }) + ]) + ) - test "add and delete local role", cxt do - assert {:ok, perms} = - Permissions.validate_write( - cxt.perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@projects, %{"id" => "p100", "workspace_id" => "w1"}), - Chgs.insert(@project_memberships, %{ - "id" => "pm100", - "project_id" => "p100", - "user_id" => Auth.user_id(), - "role" => "manager" - }), - Chgs.insert(@issues, %{"id" => "i100", "project_id" => "p100"}), - Chgs.insert(@comments, %{"id" => "c100", "issue_id" => "i100"}), - Chgs.delete(@project_memberships, %{ - "id" => "pm100", - "project_id" => "p100", - "user_id" => Auth.user_id(), - "role" => "manager" - }) - ]) - ) + assert {:ok, perms} = + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.delete(@project_memberships, %{ + "id" => "pm1", + "user_id" => Auth.user_id(), + "project_id" => "p1", + "role" => "manager" + }) + ]) + ) - # the generated role persists accross txs - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.insert(@issues, %{"id" => "i101", "project_id" => "p100"}) - ]) - ) + assert_write_rejected( + cxt.module.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update( + @comments, + %{"id" => "c6", "issue_id" => "i5"}, + %{"issue_id" => "i1"} + ) + ]) + ) + ) + end end end @@ -893,14 +1335,15 @@ defmodule Electric.Satellite.PermissionsTest do ] ) - assert {:error, _} = - Permissions.validate_write( - perms, - cxt.tree, - Chgs.tx([ - Chgs.update(@issues, %{"id" => "i3"}, %{"description" => "changed"}) - ]) - ) + assert_write_rejected( + Permissions.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@issues, %{"id" => "i3"}, %{"description" => "changed"}) + ]) + ) + ) {:ok, perms: perms} end @@ -930,45 +1373,47 @@ defmodule Electric.Satellite.PermissionsTest do test "tdp out of scope", cxt do lsn = 99 - assert {:error, _} = - cxt.perms - |> Perms.add_transient( - assign_id: "assign-1", - target_relation: @issues, - target_id: ["i4"], - scope_id: ["p1"], - valid_to: LSN.new(lsn + 1) - ) - |> Permissions.validate_write( - cxt.tree, - # i3 belongs to project p2 where we only have read-access and the transient - # permission only applies to i4, so not allowed - Chgs.tx([Chgs.update(@issues, %{"id" => "i3"}, %{"description" => "changed"})], - lsn: lsn - ) - ) + assert_write_rejected( + cxt.perms + |> Perms.add_transient( + assign_id: "assign-1", + target_relation: @issues, + target_id: ["i4"], + scope_id: ["p1"], + valid_to: LSN.new(lsn + 1) + ) + |> Permissions.validate_write( + cxt.tree, + # i3 belongs to project p2 where we only have read-access and the transient + # permission only applies to i4, so not allowed + Chgs.tx([Chgs.update(@issues, %{"id" => "i3"}, %{"description" => "changed"})], + lsn: lsn + ) + ) + ) end test "expired tdp", cxt do lsn = 99 - assert {:error, _} = - cxt.perms - |> Perms.add_transient( - assign_id: "assign-1", - target_relation: @issues, - target_id: ["i3"], - scope_id: ["p1"], - valid_to: LSN.new(lsn) - ) - |> Permissions.validate_write( - cxt.tree, - # i3 belongs to project p2 where we only have read-access, we have a - # transient permission that allows us to update it but that tdp has expired - Chgs.tx([Chgs.update(@issues, %{"id" => "i3"}, %{"description" => "changed"})], - lsn: lsn + 1 - ) - ) + assert_write_rejected( + cxt.perms + |> Perms.add_transient( + assign_id: "assign-1", + target_relation: @issues, + target_id: ["i3"], + scope_id: ["p1"], + valid_to: LSN.new(lsn) + ) + |> Permissions.validate_write( + cxt.tree, + # i3 belongs to project p2 where we only have read-access, we have a + # transient permission that allows us to update it but that tdp has expired + Chgs.tx([Chgs.update(@issues, %{"id" => "i3"}, %{"description" => "changed"})], + lsn: lsn + 1 + ) + ) + ) end end diff --git a/components/electric/test/support/permissions_helpers.ex b/components/electric/test/support/permissions_helpers.ex index 05abb4c882..22b4963457 100644 --- a/components/electric/test/support/permissions_helpers.ex +++ b/components/electric/test/support/permissions_helpers.ex @@ -1,4 +1,118 @@ defmodule ElectricTest.PermissionsHelpers do + defmodule Schema do + alias Electric.Postgres.MockSchemaLoader + alias Electric.Postgres.Extension.SchemaLoader + + def migrations do + [ + {"01", + [ + "create table regions (id uuid primary key, name text)", + "create table offices (id uuid primary key, region_id uuid not null references regions (id))", + "create table workspaces (id uuid primary key)", + "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", + "create table issues (id uuid primary key, project_id uuid not null references projects (id), description text)", + "create table comments (id uuid primary key, issue_id uuid not null references issues (id), comment text, owner text)", + "create table reactions (id uuid primary key, comment_id uuid not null references comments (id))", + "create table users (id uuid primary key, role text not null default 'normie')", + "create table teams (id uuid primary key)", + "create table tags (id uuid primary key, tag text not null)", + "create table addresses (id uuid primary key, user_id uuid not null references users (id), address text)", + """ + create table issue_tags ( + id uuid primary key, + issue_id uuid not null references issues (id), + tag_id uuid not null references tags (id) + ) + """, + """ + create table project_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + project_id uuid not null references projects (id), + role text not null + ) + """, + """ + create table team_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + team_id uuid not null references teams (id), + team_role text not null + ) + """, + """ + create table site_admins ( + id uuid primary key, + user_id uuid not null references users (id), + role text not null + ) + """, + """ + create table admin_users ( + id uuid primary key, + user_id uuid not null references users (id) + ) + """, + """ + create table compound_root ( + id1 uuid, + id2 uuid, + primary key (id1, id2) + ) + """, + """ + create table compound_level1 ( + id1 uuid, + id2 uuid, + root_id1 uuid not null, + root_id2 uuid not null, + value1 text, + value2 text, + primary key (id1, id2), + foreign key (root_id1, root_id2) references compound_root (id1, id2) + ) + """, + """ + create table compound_level2 ( + id1 uuid, + id2 uuid, + level1_id1 uuid not null, + level1_id2 uuid not null, + value1 text, + value2 text, + primary key (id1, id2), + foreign key (level1_id1, level1_id2) references compound_level1 (id1, id2) + ) + """, + """ + create table compound_memberships ( + id uuid primary key, + root_id1 uuid not null, + root_id2 uuid not null, + user_id uuid not null references users (id), + role text not null, + foreign key (root_id1, root_id2) references compound_root (id1, id2) + ) + """ + ]} + ] + end + + def loader(migrations \\ migrations()) do + loader_spec = + MockSchemaLoader.backend_spec(migrations: migrations) + + {:ok, _loader} = SchemaLoader.connect(loader_spec, []) + end + + def load(migrations \\ migrations()) do + {:ok, loader} = loader(migrations) + + {:ok, _schema_version} = SchemaLoader.load(loader) + end + end + defmodule Auth do def user_id do "92bafe18-a818-4a3f-874f-590324140478" @@ -171,8 +285,11 @@ defmodule ElectricTest.PermissionsHelpers do defmodule Roles do alias Electric.Satellite.SatPerms, as: P - def role(role_name, assign_id) do - %P.Role{role: role_name, assign_id: assign_id} + def role(role_name, assign_id, attrs \\ []) do + struct( + %P.Role{role: role_name, assign_id: assign_id}, + attrs + ) end def role(role_name, table, id, assign_id, attrs \\ []) do @@ -199,6 +316,7 @@ defmodule ElectricTest.PermissionsHelpers do @behaviour Electric.Satellite.Permissions.Graph + alias Electric.Postgres.Extension.SchemaLoader alias Electric.Replication.Changes alias Electric.Satellite.Permissions alias Electric.Postgres.Schema.FkGraph @@ -207,31 +325,31 @@ defmodule ElectricTest.PermissionsHelpers do @root :__root__ - def new(vs, fk_edges) do - {__MODULE__, {data_tree(vs), fk_graph(fk_edges)}} + def new(vs, schema) do + {__MODULE__, {data_tree(vs), fk_graph(schema), schema}} end - defp fk_graph(fk_edges) do - FkGraph.new(fk_edges) + defp fk_graph(%SchemaLoader.Version{schema: schema}) do + FkGraph.for_schema(schema) end defp graph(attrs \\ []) do Permissions.Graph.graph(attrs) end - def add_vertex({__MODULE__, {graph, fks}}, v) do + def add_vertex({__MODULE__, {graph, fks, schema}}, v) do graph = Graph.add_vertex(graph, v) - {__MODULE__, {graph, fks}} + {__MODULE__, {graph, fks, schema}} end - def delete_vertex({__MODULE__, {graph, fks}}, v) do + def delete_vertex({__MODULE__, {graph, fks, schema}}, v) do graph = Graph.delete_vertex(graph, v) - {__MODULE__, {graph, fks}} + {__MODULE__, {graph, fks, schema}} end - def add_edge({__MODULE__, {graph, fks}}, a, b) do + def add_edge({__MODULE__, {graph, fks, schema}}, a, b) do graph = Graph.add_edge(graph, a, b) - {__MODULE__, {graph, fks}} + {__MODULE__, {graph, fks, schema}} end defp data_tree(vs) do @@ -240,11 +358,15 @@ defmodule ElectricTest.PermissionsHelpers do graph end + defp build_data_tree({table, id, children}, {parent, graph}) when is_list(children) do + build_data_tree({table, id, %{}, children}, {parent, graph}) + end + defp build_data_tree({table, id}, {parent, graph}) do - build_data_tree({table, id, []}, {parent, graph}) + build_data_tree({table, id, %{}, []}, {parent, graph}) end - defp build_data_tree({_table, _id, children} = v, {parent, graph}) do + defp build_data_tree({_table, _id, _attrs, children} = v, {parent, graph}) do graph = Graph.add_edge(graph, v(v), v(parent)) {_v, graph} = Enum.reduce(children, {v, graph}, &build_data_tree/2) @@ -253,15 +375,15 @@ defmodule ElectricTest.PermissionsHelpers do defp v(@root), do: @root - defp v({table, id, _children}) do - {table, [id]} + defp v({table, id, _attrs, _children}) do + {table, List.wrap(id)} end def scope_id(_state, {_, _} = root, {_, _} = root, id) when is_list(id) do [{id, [{root, id}]}] end - def scope_id({graph, fks}, {_, _} = root, {_, _} = relation, id) when is_list(id) do + def scope_id({graph, fks, _schema}, {_, _} = root, {_, _} = relation, id) when is_list(id) do graph |> Permissions.Graph.traverse_fks(fk_path(fks, root, relation), relation, id) |> Enum.flat_map(fn @@ -271,7 +393,8 @@ defmodule ElectricTest.PermissionsHelpers do end @impl Electric.Satellite.Permissions.Graph - def scope_path({graph, fks}, {_, _} = root, {_, _} = relation, id) when is_list(id) do + def scope_path({graph, fks, _schema}, {_, _} = root, {_, _} = relation, id) + when is_list(id) do graph |> Permissions.Graph.traverse_fks(fk_path(fks, root, relation), relation, id) |> Enum.flat_map(fn @@ -281,7 +404,11 @@ defmodule ElectricTest.PermissionsHelpers do end @impl Electric.Satellite.Permissions.Graph - def modified_fks({_graph, fks} = state, {_, _} = root, %Changes.UpdatedRecord{} = update) do + def modified_fks( + {_graph, fks, _schema} = state, + {_, _} = root, + %Changes.UpdatedRecord{} = update + ) do %Changes.UpdatedRecord{ changed_columns: changed_columns, old_record: old, @@ -290,7 +417,7 @@ defmodule ElectricTest.PermissionsHelpers do } = update case FkGraph.foreign_keys(fks, root, relation) do - [] -> + nil -> [] foreign_keys -> @@ -322,7 +449,7 @@ defmodule ElectricTest.PermissionsHelpers do end @impl Electric.Satellite.Permissions.Graph - def parent({_graph, fks}, {_, _} = root, relation, record) when is_map(record) do + def parent({_graph, fks, _schema}, {_, _} = root, relation, record) when is_map(record) do with [^relation, parent_rel | _] <- FkGraph.path(fks, root, relation), [_ | _] = relations <- FkGraph.foreign_keys(fks, root, relation), {^parent_rel, fk_cols} <- Enum.find(relations, &match?({^parent_rel, _}, &1)) do @@ -333,34 +460,42 @@ defmodule ElectricTest.PermissionsHelpers do end @impl Electric.Satellite.Permissions.Graph - def apply_change({graph, fks} = state, roots, change) do + def apply_change({graph, fks, schema} = state, roots, change) do updated = Enum.reduce(roots, graph, fn root, graph -> case change do - %Changes.DeletedRecord{relation: relation, old_record: %{"id" => id}} -> - Graph.delete_vertex(graph, {relation, [id]}) + %Changes.DeletedRecord{relation: relation, old_record: old} -> + {:ok, pk_cols} = SchemaLoader.Version.primary_keys(schema, relation) + pks = Enum.map(pk_cols, &Map.fetch!(old, &1)) + + Graph.delete_vertex(graph, {relation, pks}) + + %Changes.NewRecord{relation: relation, record: record} -> + {:ok, pk_cols} = SchemaLoader.Version.primary_keys(schema, relation) + pks = Enum.map(pk_cols, &Map.fetch!(record, &1)) - %Changes.NewRecord{relation: relation, record: %{"id" => id} = record} -> case parent(state, root, relation, record) do nil -> - Graph.add_vertex(graph, {relation, [id]}) + Graph.add_vertex(graph, {relation, pks}) parent -> validate_fk!(graph, parent) - Graph.add_edge(graph, {relation, [id]}, parent) + Graph.add_edge(graph, {relation, pks}, parent) end # we copy the satellite and treat all updates as upserts %Changes.UpdatedRecord{} = change -> - %{relation: relation, old_record: old, record: %{"id" => id} = new} = change + %{relation: relation, old_record: old, record: new} = change case modified_fks(state, root, change) do [] -> graph modified_keys -> - child = {relation, [id]} + {:ok, pk_cols} = SchemaLoader.Version.primary_keys(schema, relation) + pks = Enum.map(pk_cols, &Map.fetch!(old, &1)) + child = {relation, pks} Enum.reduce(modified_keys, graph, fn {^relation, _old_id, _new_id}, graph -> @@ -386,7 +521,7 @@ defmodule ElectricTest.PermissionsHelpers do end end) - {updated, fks} + {updated, fks, schema} end defp validate_fk!(graph, parent) do @@ -405,8 +540,8 @@ defmodule ElectricTest.PermissionsHelpers do end end - def table(relation) do - Electric.Utils.inspect_relation(relation) + def table({_schema, table}) do + table end def perms_build(cxt, grants, roles, attrs \\ []) do @@ -465,4 +600,275 @@ defmodule ElectricTest.PermissionsHelpers do Protox.encode!(struct) |> IO.iodata_to_binary() end end + + defmodule Sqlite do + alias Electric.Postgres.Extension.SchemaLoader + + def build_tree(conn, data, schema) do + {conn, _} = Enum.reduce(data, {conn, nil}, &build_data_tree(&1, &2, schema)) + conn + end + + defp build_data_tree({table, id, children}, {conn, parent}, schema) when is_list(children) do + build_data_tree({table, id, %{}, children}, {conn, parent}, schema) + end + + defp build_data_tree({table, id}, {conn, parent}, schema) do + build_data_tree({table, id, %{}, []}, {conn, parent}, schema) + end + + defp build_data_tree({table, id, attrs, children} = v, {conn, parent}, schema) do + {:ok, pks} = SchemaLoader.Version.primary_keys(schema, table) + + ids = + id + |> List.wrap() + |> Enum.map(&"'#{&1}'") + + init = + case parent do + nil -> + { + pks, + ids + } + + {_table, _id, _attrs, _children} = parent -> + { + pks ++ fks(schema, table, parent), + ids ++ ids(parent) + } + end + + {cols, vals} = + Enum.reduce(attrs, init, fn {k, v}, {ks, vs} -> + {[k | ks], ["'#{v}'" | vs]} + end) + + query = "INSERT INTO #{t(v)} (#{Enum.join(cols, ",")}) VALUES (#{Enum.join(vals, ",")})" + + :ok = Exqlite.Sqlite3.execute(conn, query) + + {conn, _} = Enum.reduce(children, {conn, v}, &build_data_tree(&1, &2, schema)) + {conn, parent} + end + + defp t({{_, table}, _id, _attrs, _children}) do + table + end + + defp fks(schema, table, {parent, _, _, _}) do + {:ok, fks} = SchemaLoader.Version.foreign_keys(schema, table, parent) + fks + end + + defp ids({_table, id, _attrs, _}) do + id + |> List.wrap() + |> Enum.map(&"'#{&1}'") + end + + def query(%{conn: nil}, _), do: nil + + def query(%{conn: conn}, sql) do + {:ok, stmt} = Exqlite.Sqlite3.prepare(conn, sql) + Exqlite.Sqlite3.fetch_all(conn, stmt) |> dbg + end + + def query(_, _), do: nil + end + + defmodule Server do + use Electric.Postgres.MockSchemaLoader + + alias ElectricTest.PermissionsHelpers.{ + Tree + } + + alias Electric.Satellite.Permissions + + def setup(cxt) do + %{migrations: migrations, data: data} = cxt + + loader_spec = MockSchemaLoader.backend_spec(migrations: migrations) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + {:ok, schema_version} = SchemaLoader.load(loader) + + {:ok, tree: Tree.new(data, schema_version), loader: loader, schema_version: schema_version} + end + + def reset(cxt) do + cxt + end + + def name, do: "Server" + + def perms(cxt, grants, roles, attrs \\ []) do + ElectricTest.PermissionsHelpers.perms_build(cxt, grants, roles, attrs) + end + + def table(relation) do + Electric.Utils.inspect_relation(relation) + end + + def apply_change({Tree, tree}, roots, tx) do + tree = Tree.apply_change(tree, roots, tx) + {Tree, tree} + end + + def validate_write(perms, tree, tx) do + Permissions.validate_write(perms, tree, tx) + end + end + + defmodule Client do + use Electric.Postgres.MockSchemaLoader + + alias Electric.Replication.Changes + alias Electric.Satellite.Permissions + + def setup(cxt) do + %{migrations: migrations, data: data} = cxt + {:ok, conn} = Exqlite.Sqlite3.open(":memory:") + + conn = + Enum.reduce(migrations, conn, fn {_version, stmts}, conn -> + for stmt <- stmts do + :ok = Exqlite.Sqlite3.execute(conn, stmt) + end + + conn + end) + + loader_spec = MockSchemaLoader.backend_spec(migrations: migrations) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + {:ok, schema_version} = SchemaLoader.load(loader) + + conn = Sqlite.build_tree(conn, data, schema_version) + + {:ok, tree: conn, conn: conn, schema_version: schema_version, loader: loader} + end + + def reset(cxt) do + :ok = Exqlite.Sqlite3.close(cxt.conn) + {:ok, state} = cxt.module.setup(cxt) + Map.merge(cxt, Map.new(state)) + end + + def name, do: "Client" + + def perms(cxt, grants, roles, attrs \\ []) do + perms = ElectricTest.PermissionsHelpers.perms_build(cxt, grants, roles, attrs) + + query = Permissions.Client.permissions_triggers(perms, cxt.schema_version) + + # IO.puts(query) + + tx = + IO.iodata_to_binary([ + "BEGIN EXCLUSIVE TRANSACTION;\n\n", + query, + "\nCOMMIT;\n" + ]) + + :ok = Exqlite.Sqlite3.execute(cxt.conn, tx) + + perms + end + + def table({_schema, table}), do: table + + def validate_write(perms, conn, tx) do + query = build_query(tx) + + case Exqlite.Sqlite3.execute(conn, query) do + :ok -> + {:ok, perms} + + {:error, _} = error -> + Exqlite.Sqlite3.execute(conn, "ROLLBACK") + error + end + end + + def apply_change(conn, _roots, change) do + query = build_query(%Changes.Transaction{changes: [change]}) + + with {:error, _} = error <- Exqlite.Sqlite3.execute(conn, query) do + Exqlite.Sqlite3.execute(conn, "ROLLBACK") + error + end + end + + defp build_query(%Changes.Transaction{changes: changes}) do + IO.iodata_to_binary( + [ + "BEGIN;", + Enum.map(changes, &change_to_stmt/1), + "COMMIT;" + ] + |> Enum.intersperse("\n") + ) + end + + defp change_to_stmt(%Changes.NewRecord{relation: relation, record: record}) do + {cols, vals} = columns_values(record) + + [ + "INSERT INTO ", + t(relation), + " (", + Enum.join(cols, ", "), + ") VALUES (", + Enum.join(vals, ", "), + ");" + ] + end + + defp change_to_stmt(%Changes.UpdatedRecord{} = change) do + %{relation: relation, old_record: old, record: new, changed_columns: changed} = change + + cols = + new + |> Enum.filter(fn {k, _} -> MapSet.member?(changed, k) end) + |> columns_values() + |> Tuple.to_list() + |> Enum.zip() + + [ + "UPDATE ", + t(relation), + " SET ", + Enum.map(cols, fn {k, v} -> [k, " = ", v] end) |> Enum.intersperse(", "), + " WHERE ", + "id = ", + v(Map.fetch!(old, "id")), + ";" + ] + end + + defp change_to_stmt(%Changes.DeletedRecord{relation: relation, old_record: old}) do + [ + "DELETE FROM ", + t(relation), + " WHERE ", + "id = ", + v(Map.fetch!(old, "id")), + ";" + ] + end + + defp t({_, table}), do: table + + defp columns_values(record) do + Enum.reduce(record, {[], []}, fn {k, v}, {cols, vals} -> + {[k | cols], [v(v) | vals]} + end) + end + + defp v(s) when is_binary(s), do: "'#{s}'" + defp v(i), do: "#{i}" + end end diff --git a/protocol/satellite.proto b/protocol/satellite.proto index f31e066898..bc859f4cac 100644 --- a/protocol/satellite.proto +++ b/protocol/satellite.proto @@ -739,4 +739,10 @@ message SatPerms { string user_id = 2; Rules rules = 3; repeated Role roles = 4; + // `triggers` is the sql code to install these permissions as triggers in + // the local db. + // The assumption is that the entire message is compressed before sending + // over the wire so just include the trigger sql directly rather than + // compress it separately. + string triggers = 5; }