Skip to content

Commit

Permalink
permit views with pkeys, fixes #49
Browse files Browse the repository at this point in the history
/db/view/1 works, as does the update JSON API
  • Loading branch information
cldellow committed Feb 5, 2023
1 parent 4a220b4 commit ebf72ce
Show file tree
Hide file tree
Showing 8 changed files with 258 additions and 3 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,10 @@ chrome.

- JSON arrays of strings are 3x faster to filter and 4x faster to facet.

- Enables the row page for views on base tables that have primary keys.

- Enable the update operation of the JSON API for views on base tables that have primary keys.

- Advanced export: This control is hidden by default. Click the `(Advanced)` link to see it.

- Filters: These controls are hidden by default. Click the funnel icon to see them.
Expand Down
2 changes: 2 additions & 0 deletions datasette_ui_extras/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from .facets import enable_yolo_facets, facets_extra_body_script
from .filters import enable_yolo_arraycontains_filter, enable_yolo_exact_filter, yolo_filters_from_request
from .new_facets import StatsFacet, YearFacet, YearMonthFacet
from .view_row_pages import enable_yolo_view_row_pages

PLUGIN = 'datasette-ui-extras'

Expand Down Expand Up @@ -69,6 +70,7 @@ def startup():
enable_yolo_facets()
enable_yolo_arraycontains_filter()
enable_yolo_exact_filter()
enable_yolo_view_row_pages()

@hookimpl
def register_facet_classes():
Expand Down
182 changes: 182 additions & 0 deletions datasette_ui_extras/view_row_pages.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,182 @@
from datasette.database import Database
from sqlite_utils.db import View, Table, validate_column_names, jsonify_if_needed
from sqlglot import parse_one, exp

# Adds two features:
# - can navigate to the row page for a view row, eg /db/viewname/1
# - can use the JSON API to do an update of a view row (...assumes there's an INSTEAD OF trigger)

original_primary_keys = Database.primary_keys

async def patched_primary_keys(self, table):
# Is it a view?
# Get its definition
# Parse it with sqlglot
# Is it a select that queries exactly one table?
is_view = list(await self.execute("select sql from sqlite_master where type = 'view' and name = ?", [table]))

if not is_view:
return await original_primary_keys(self, table)

sql = is_view[0]['sql']

parsed = None
try:
parsed = parse_one(sql)
except:
return []

#print(repr(parsed))

view_exp = parsed.expression

if not 'from' in view_exp.args:
return []

from_exps = view_exp.args['from'].expressions

if len(from_exps) != 1:
return []

table_name = from_exps[0].this.this

table_pkeys = await original_primary_keys(self, table_name)

cols = view_exp.expressions

# We only want the columns that are pass-through identifiers
ids = []
for col in cols:
this = col.this
if isinstance(col, exp.Column) and isinstance(col.this, exp.Identifier):
ids.append(col.name)


#print('ids={} table_pkeys={}'.format(ids, table_pkeys))

# If every table pkey is in ids, return table_pkeys.
ok = True
for pkey in table_pkeys:
ok = ok and pkey in ids

if ok:
return table_pkeys

return []

original_get_all_foreign_keys = Database.get_all_foreign_keys
async def patched_get_all_foreign_keys(self):
rv = await original_get_all_foreign_keys(self)

names = await self.execute("select name from sqlite_master where type = 'view'")
for name, in names:
rv[name] = {'incoming': [], 'outgoing': []}

return rv

class UpdateableView(View):
def __init__(self, underlying):
self.underlying = underlying

@property
def pks(self):
"Primary key columns for this view."
# TODO: figure this out intelligently
return ['id']

names = [column.name for column in self.columns if column.is_pk]
if not names:
names = ["rowid"]
#print('pks={}'.format(names))
return names

def __getattr__(self, name):
return getattr(self.underlying, name)

def update(
self,
pk_values,
updates = None,
alter = False,
conversions = None,
):
"""
Execute a SQL ``UPDATE`` against the specified row.
See :ref:`python_api_update`.
:param pk_values: The primary key of an individual record - can be a tuple if the
table has a compound primary key.
:param updates: A dictionary mapping columns to their updated values.
:param alter: Set to ``True`` to add any missing columns.
:param conversions: Optional dictionary of SQL functions to apply during the update, for example
``{"mycolumn": "upper(?)"}``.
"""
updates = updates or {}
conversions = conversions or {}
if not isinstance(pk_values, (list, tuple)):
pk_values = [pk_values]
# Soundness check that the record exists (raises error if not):
self.get(pk_values)
if not updates:
return self
args = []
sets = []
wheres = []
pks = self.pks
validate_column_names(updates.keys())
for key, value in updates.items():
sets.append("[{}] = {}".format(key, conversions.get(key, "?")))
args.append(jsonify_if_needed(value))
wheres = ["[{}] = ?".format(pk_name) for pk_name in pks]
args.extend(pk_values)
sql = "update [{table}] set {sets} where {wheres}".format(
table=self.name, sets=", ".join(sets), wheres=" and ".join(wheres)
)
#print('running update: sql={} args={}'.format(sql, args))
with self.db.conn:
try:
rowcount = self.db.execute(sql, args).rowcount
except OperationalError as e:
if alter and (" column" in e.args[0]):
# Attempt to add any missing columns, then try again
self.add_missing_columns([updates])
rowcount = self.db.execute(sql, args).rowcount
else:
raise

# NOTE: Don't check rowcount, updates on updatable views don't have a rowcount.
# assert rowcount == 1
self.last_pk = pk_values[0] if len(pks) == 1 else pk_values
return self


def thunk_update(
self,
pk_values,
updates = None,
alter = False,
conversions = None,
):

if alter:
raise Exception('updates on view must have alter=False')

if conversions:
raise Exception('updates on view must have conversions=None')

uv = UpdateableView(self)
return uv.update(pk_values, updates, alter=False, conversions=None)

def enable_yolo_view_row_pages():
Database.primary_keys = patched_primary_keys
Database.get_all_foreign_keys = patched_get_all_foreign_keys

# Enable updating views in the JSON API. I didn't want to snapshot the code,
# nor generally .get and .pks on views, so I do some shenanigans here to
# add an .update method that gets hijacked by UpdateableView.
#
# I ended up having to snapshot the code, because the existing stuff did
# an assert on the rowcount -- which is not correct for INSTEAD OF triggers.
UpdateableView.get = Table.get
View.update = thunk_update
2 changes: 1 addition & 1 deletion go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/bin/bash
set -euo pipefail

datasette --reload --setting trace_debug 1 --metadata metadata.json geonames.db cooking.db superuser.db --plugins-dir plugins --setting facet_time_limit_ms 2000 --setting sql_time_limit_ms 2000 --setting truncate_cells_html 500
datasette --reload --setting trace_debug 1 --metadata metadata.json geonames.db cooking.db superuser.db --plugins-dir plugins --template-dir templates --setting facet_time_limit_ms 2000 --setting sql_time_limit_ms 2000 --setting truncate_cells_html 500
3 changes: 2 additions & 1 deletion plugins/yolo_auth.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from datasette import hookimpl

@hookimpl
def permission_allowed(action):
def permission_allowed(actor, action, resource):
#print('actor={} action={} resource={}'.format(actor, action, resource))
return True
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def get_long_description():
version=VERSION,
packages=["datasette_ui_extras"],
entry_points={"datasette": ["ui_extras = datasette_ui_extras"]},
install_requires=["datasette>=1.0a0"],
install_requires=["datasette>=1.0a0", "sqlglot"],
extras_require={"test": ["pytest", "pytest-asyncio", "pytest-watch", "sqlite-utils"]},
package_data={
"datasette_ui_extras": ["static/*", "templates/*"]
Expand Down
6 changes: 6 additions & 0 deletions tests/plugins/auth.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from datasette import hookimpl

@hookimpl
def actor_from_request(request):
return {'id': 'root'}

60 changes: 60 additions & 0 deletions tests/test_ui_extras.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from datasette.app import Datasette
import pytest
import json
import sqlite3


@pytest.mark.asyncio
Expand All @@ -9,3 +11,61 @@ async def test_plugin_is_installed():
assert response.status_code == 200
installed_plugins = {p["name"] for p in response.json()}
assert "datasette-ui-extras" in installed_plugins

@pytest.mark.asyncio
async def test_updatable_view(tmp_path):
db_name = tmp_path / "db.sqlite"
conn = sqlite3.connect(db_name)
with conn:
conn.execute("CREATE TABLE data(id integer primary key, title text, other text)")
conn.execute("INSERT INTO data(id, title) VALUES (1, 'title1')").fetchall()
conn.execute("INSERT INTO data(id, title) VALUES (2, 'title2')").fetchall()
conn.execute("CREATE VIEW view AS SELECT id, 'some jazz' as instructions, title, other as uneditable FROM data")
conn.execute("""
CREATE TRIGGER update_underlying_fields INSTEAD OF UPDATE OF title ON view
BEGIN
UPDATE data SET title = new.title WHERE id = old.id;
END;
""")
conn.close()

datasette = Datasette(
memory=True,
plugins_dir='./tests/plugins/',
files=[db_name]
)

response = await datasette.client.get("/db.json")
assert response.status_code == 200

response = await datasette.client.get("/db/data.json")
assert response.status_code == 200

response = await datasette.client.get("/db/data/1.json?_shape=array")
assert response.status_code == 200
assert response.json() == [{'id': 1, 'title': 'title1', 'other': None}]

response = await datasette.client.get("/db/view/1.json?_shape=array")
assert response.status_code == 200
assert response.json() == [{'id': 1, 'instructions': 'some jazz', 'uneditable': None, 'title': 'title1'}]

response = await datasette.client.post(
"/db/view/1/-/update",
headers={
'content-type': 'application/json',
},
content=json.dumps({
"update": {
"title": "new title",
}
})
)
assert response.status_code == 200

response = await datasette.client.get("/db/view/1.json?_shape=array&_col=title")
assert response.status_code == 200
assert response.json() == [{'id': 1, 'instructions': 'some jazz', 'uneditable': None, 'title': 'new title'}]

response = await datasette.client.get("/db/view/1")
assert response.status_code == 200

0 comments on commit ebf72ce

Please sign in to comment.