Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

More precise type for LoggingTransaction.execute #15432

Merged
merged 8 commits into from Apr 14, 2023
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog.d/15432.misc
@@ -0,0 +1 @@
Improve type hints.
20 changes: 17 additions & 3 deletions synapse/storage/database.py
Expand Up @@ -58,7 +58,7 @@
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage.background_updates import BackgroundUpdater
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine
from synapse.storage.types import Connection, Cursor
from synapse.storage.types import Connection, Cursor, _Parameters
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we're now importing _Parameters somewhere else can we rename it to TransactionParameters or something? It could also use a comment above it...

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can do. Will probably call it ExecuteParameters or QueryParameters.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Went for SQLQueryParamters in the end (as opposed to URL query parameters).

from synapse.util.async_helpers import delay_cancellation
from synapse.util.iterutils import batch_iter

Expand Down Expand Up @@ -371,10 +371,18 @@ def execute_batch(self, sql: str, args: Iterable[Iterable[Any]]) -> None:
if isinstance(self.database_engine, PostgresEngine):
from psycopg2.extras import execute_batch

# TODO: is it safe for values to be Iterable[Iterable[Any]] here?
# https://www.psycopg.org/docs/extras.html?highlight=execute_batch#psycopg2.extras.execute_batch
# suggests each arg in args should be a sequence or mapping
self._do_execute(
lambda the_sql: execute_batch(self.txn, the_sql, args), sql
)
else:
# TODO: is it safe for values to be Iterable[Iterable[Any]] here?
# https://docs.python.org/3/library/sqlite3.html?highlight=sqlite3#sqlite3.Cursor.executemany
# suggests that the outer collection may be iterable, but
# https://docs.python.org/3/library/sqlite3.html?highlight=sqlite3#how-to-use-placeholders-to-bind-values-in-sql-queries
# suggests that the inner collection should be a sequence or dict.
self.executemany(sql, args)

def execute_values(
Expand All @@ -390,14 +398,20 @@ def execute_values(
from psycopg2.extras import execute_values

return self._do_execute(
# TODO: is it safe for values to be Iterable[Iterable[Any]] here?
# https://www.psycopg.org/docs/extras.html?highlight=execute_batch#psycopg2.extras.execute_values says values should be Sequence[Sequence]
lambda the_sql: execute_values(self.txn, the_sql, values, fetch=fetch),
sql,
)

def execute(self, sql: str, *args: Any) -> None:
self._do_execute(self.txn.execute, sql, *args)
def execute(self, sql: str, parameters: _Parameters = ()) -> None:
DMRobertson marked this conversation as resolved.
Show resolved Hide resolved
self._do_execute(self.txn.execute, sql, parameters)

def executemany(self, sql: str, *args: Any) -> None:
DMRobertson marked this conversation as resolved.
Show resolved Hide resolved
# TODO: we should add a type for *args here. Looking at Cursor.executemany
# and DBAPI2 it ought to be Sequence[_Parameter], but we pass in
# Iterable[Iterable[Any]] in execute_batch and execute_values above, which mypy
# complains about.
self._do_execute(self.txn.executemany, sql, *args)

def executescript(self, sql: str) -> None:
Expand Down
19 changes: 11 additions & 8 deletions synapse/storage/databases/main/event_federation.py
Expand Up @@ -114,6 +114,10 @@ def __init__(self, room_id: str):


class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBaseStore):
# TODO: this attribute comes from EventPushActionWorkerStore. Should we inherit from
# that store so that mypy can deduce this for itself?
stream_ordering_month_ago: Optional[int]
Comment on lines +117 to +119
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I can't see why this should live on EventPushActionsWorkerStore (and I can't see anywhere else that uses it). I think it'd make more sense to move this onto EventsWorkerStore if we anticipate other users, and here if not.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

EventPushActionsWorkerStore used to use it -- see #13118.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(This change seems unrelated, did the above changes shake it out somehow?)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If I don't make this change then mypy doesn't know what self.stream_ordering_month_ago is elsewhere in the file. That means it's treated as Any, which means the fix to the signature of execute doesn't spot the missing comma typo.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it'd make more sense to move this onto EventsWorkerStore if we anticipate other users, and here if not.

I might have overly aggressively resolved this -- did we want to do this?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I wasn't in any particular hurry tbh!


def __init__(
self,
database: DatabasePool,
Expand Down Expand Up @@ -1182,8 +1186,8 @@ async def have_room_forward_extremities_changed_since(
Throws a StoreError if we have since purged the index for
stream_orderings from that point.
"""

if stream_ordering <= self.stream_ordering_month_ago: # type: ignore[attr-defined]
assert self.stream_ordering_month_ago is not None
if stream_ordering <= self.stream_ordering_month_ago:
raise StoreError(400, f"stream_ordering too old {stream_ordering}")

sql = """
Expand Down Expand Up @@ -1231,7 +1235,8 @@ async def get_forward_extremities_for_room_at_stream_ordering(

# provided the last_change is recent enough, we now clamp the requested
# stream_ordering to it.
if last_change > self.stream_ordering_month_ago: # type: ignore[attr-defined]
assert self.stream_ordering_month_ago is not None
if last_change > self.stream_ordering_month_ago:
stream_ordering = min(last_change, stream_ordering)

return await self._get_forward_extremeties_for_room(room_id, stream_ordering)
Expand All @@ -1246,8 +1251,8 @@ async def _get_forward_extremeties_for_room(
Throws a StoreError if we have since purged the index for
stream_orderings from that point.
"""

if stream_ordering <= self.stream_ordering_month_ago: # type: ignore[attr-defined]
assert self.stream_ordering_month_ago is not None
if stream_ordering <= self.stream_ordering_month_ago:
raise StoreError(400, "stream_ordering too old %s" % (stream_ordering,))

sql = """
Expand Down Expand Up @@ -1707,9 +1712,7 @@ def _delete_old_forward_extrem_cache_txn(txn: LoggingTransaction) -> None:
DELETE FROM stream_ordering_to_exterm
WHERE stream_ordering < ?
"""
txn.execute(
sql, (self.stream_ordering_month_ago,) # type: ignore[attr-defined]
)
txn.execute(sql, (self.stream_ordering_month_ago,))

await self.db_pool.runInteraction(
"_delete_old_forward_extrem_cache",
Expand Down