Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Flake8 linting of sydent/ and tests/ directories #345

Closed
wants to merge 11 commits into from
1 change: 1 addition & 0 deletions changelog.d/345.misc
@@ -0,0 +1 @@
Run flake8 lints on sydent/ and update files accordingly.
12 changes: 6 additions & 6 deletions sydent/db/accounts.py
Expand Up @@ -34,8 +34,8 @@ def getAccountByToken(self, token):
"""
cur = self.sydent.db.cursor()
res = cur.execute(
"select a.user_id, a.created_ts, a.consent_version from accounts a, tokens t "
"where t.user_id = a.user_id and t.token = ?",
"select a.user_id, a.created_ts, a.consent_version from accounts a, "
"tokens t where t.user_id = a.user_id and t.token = ?",
(token,),
)

Expand All @@ -58,7 +58,7 @@ def storeAccount(self, user_id, creation_ts, consent_version):
:type consent_version: str or None
"""
cur = self.sydent.db.cursor()
res = cur.execute(
cur.execute(
"insert or ignore into accounts (user_id, created_ts, consent_version) "
"values (?, ?, ?)",
(user_id, creation_ts, consent_version),
Expand All @@ -76,7 +76,7 @@ def setConsentVersion(self, user_id, consent_version):
:type consent_version: unicode or None
"""
cur = self.sydent.db.cursor()
res = cur.execute(
cur.execute(
"update accounts set consent_version = ? where user_id = ?",
(consent_version, user_id),
)
Expand All @@ -92,7 +92,7 @@ def addToken(self, user_id, token):
:type token: unicode
"""
cur = self.sydent.db.cursor()
res = cur.execute(
cur.execute(
"insert into tokens (user_id, token) values (?, ?)",
(user_id, token),
)
Expand All @@ -106,7 +106,7 @@ def delToken(self, token):
:type token: unicode
"""
cur = self.sydent.db.cursor()
res = cur.execute(
cur.execute(
"delete from tokens where token = ?",
(token,),
)
Expand Down
15 changes: 9 additions & 6 deletions sydent/db/hashing_metadata.py
Expand Up @@ -71,23 +71,25 @@ def store_lookup_pepper(self, hashing_function, pepper):
cur, hashing_function, pepper, "global_threepid_associations"
)

# Commit the queued db transactions so that adding a new pepper and hashing is atomic
# Commit the queued db transactions so that adding a new pepper and hashing
# is atomic
self.sydent.db.commit()

def _rehash_threepids(self, cur, hashing_function, pepper, table):
"""Rehash 3PIDs of a given table using a given hashing_function and pepper

A database cursor `cur` must be passed to this function. After this function completes,
the calling function should make sure to call self`self.sydent.db.commit()` to commit
the made changes to the database.
A database cursor `cur` must be passed to this function. After this function
completes, the calling function should make sure to call
self`self.sydent.db.commit()` to commit the made changes to the database.

:param cur: Database cursor
:type cur:

:param hashing_function: A function with single input and output strings
:type hashing_function func(str) -> str

:param pepper: A pepper to append to the end of the 3PID (after a space) before hashing
:param pepper: A pepper to append to the end of the 3PID (after a space)
before hashing
:type pepper: str

:param table: The database table to perform the rehashing on
Expand Down Expand Up @@ -121,7 +123,8 @@ def _rehash_threepids(self, cur, hashing_function, pepper, table):

# Combine the medium, address and pepper together in the
# following form: "address medium pepper"
# According to MSC2134: https://github.com/matrix-org/matrix-doc/pull/2134
# According to MSC2134:
# https://github.com/matrix-org/matrix-doc/pull/2134
combo = "%s %s %s" % (address, medium, pepper)

# Hash the resulting string
Expand Down
6 changes: 4 additions & 2 deletions sydent/db/peers.py
Expand Up @@ -34,7 +34,8 @@ def getPeerByName(self, name):
"""
cur = self.sydent.db.cursor()
res = cur.execute(
"select p.name, p.port, p.lastSentVersion, pk.alg, pk.key from peers p, peer_pubkeys pk "
"select p.name, p.port, p.lastSentVersion, pk.alg, pk.key "
"from peers p, peer_pubkeys pk "
"where p.name = ? and pk.peername = p.name and p.active = 1",
(name,),
)
Expand Down Expand Up @@ -66,7 +67,8 @@ def getAllPeers(self):
"""
cur = self.sydent.db.cursor()
res = cur.execute(
"select p.name, p.port, p.lastSentVersion, pk.alg, pk.key from peers p, peer_pubkeys pk "
"select p.name, p.port, p.lastSentVersion, pk.alg, pk.key "
"from peers p, peer_pubkeys pk "
"where pk.peername = p.name and p.active = 1"
)

Expand Down
63 changes: 40 additions & 23 deletions sydent/db/sqlitedb.py
Expand Up @@ -54,7 +54,7 @@ def _createSchema(self):
try:
logger.info("Importing %s", scriptPath)
c.executescript(fp.read())
except:
except: # noqa: E722
logger.error("Error importing %s", scriptPath)
raise
fp.close()
Expand All @@ -68,18 +68,21 @@ def _upgradeSchema(self):
if curVer < 1:
cur = self.db.cursor()

# add auto_increment to the primary key of local_threepid_associations to ensure ids are never re-used,
# allow the mxid column to be null to represent the deletion of a binding
# and remove not null constraints on ts, notBefore and notAfter (again for when a binding has been deleted
# add auto_increment to the primary key of local_threepid_associations to
# ensure ids are never re-used, allow the mxid column to be null to
# represent the deletion of a binding and remove not null constraints on
# ts, notBefore and notAfter (again for when a binding has been deleted
# and these wouldn't be very meaningful)
logger.info("Migrating schema from v0 to v1")
cur.execute("DROP INDEX IF EXISTS medium_address")
cur.execute("DROP INDEX IF EXISTS local_threepid_medium_address")
cur.execute(
"ALTER TABLE local_threepid_associations RENAME TO old_local_threepid_associations"
"ALTER TABLE local_threepid_associations "
"RENAME TO old_local_threepid_associations"
)
cur.execute(
"CREATE TABLE local_threepid_associations (id integer primary key autoincrement, "
"CREATE TABLE local_threepid_associations ( "
"id integer primary key autoincrement, "
"medium varchar(16) not null, "
"address varchar(256) not null, "
"mxid varchar(256), "
Expand All @@ -88,23 +91,28 @@ def _upgradeSchema(self):
"notAfter bigint)"
)
cur.execute(
"INSERT INTO local_threepid_associations (medium, address, mxid, ts, notBefore, notAfter) "
"SELECT medium, address, mxid, ts, notBefore, notAfter FROM old_local_threepid_associations"
"INSERT INTO local_threepid_associations ( "
"medium, address, mxid, ts, notBefore, notAfter) "
"SELECT medium, address, mxid, ts, notBefore, notAfter "
"FROM old_local_threepid_associations"
)
cur.execute(
"CREATE UNIQUE INDEX local_threepid_medium_address on local_threepid_associations(medium, address)"
"CREATE UNIQUE INDEX local_threepid_medium_address "
"on local_threepid_associations(medium, address)"
)
cur.execute("DROP TABLE old_local_threepid_associations")

# same autoincrement for global_threepid_associations (fields stay non-nullable because we don't need
# entries in this table for deletions, we can just delete the rows)
# same autoincrement for global_threepid_associations (fields stay
# non-nullable because we don't need entries in this table for deletions,
# we can just delete the rows)
cur.execute("DROP INDEX IF EXISTS global_threepid_medium_address")
cur.execute("DROP INDEX IF EXISTS global_threepid_medium_lower_address")
cur.execute("DROP INDEX IF EXISTS global_threepid_originServer_originId")
cur.execute("DROP INDEX IF EXISTS medium_lower_address")
cur.execute("DROP INDEX IF EXISTS threepid_originServer_originId")
cur.execute(
"ALTER TABLE global_threepid_associations RENAME TO old_global_threepid_associations"
"ALTER TABLE global_threepid_associations "
"RENAME TO old_global_threepid_associations"
)
cur.execute(
"CREATE TABLE IF NOT EXISTS global_threepid_associations "
Expand All @@ -121,12 +129,15 @@ def _upgradeSchema(self):
)
cur.execute(
"INSERT INTO global_threepid_associations "
"(medium, address, mxid, ts, notBefore, notAfter, originServer, originId, sgAssoc) "
"SELECT medium, address, mxid, ts, notBefore, notAfter, originServer, originId, sgAssoc "
"(medium, address, mxid, ts, notBefore, notAfter, "
"originServer, originId, sgAssoc) "
"SELECT medium, address, mxid, ts, notBefore, notAfter, "
"originServer, originId, sgAssoc "
"FROM old_global_threepid_associations"
)
cur.execute(
"CREATE INDEX global_threepid_medium_address on global_threepid_associations (medium, address)"
"CREATE INDEX global_threepid_medium_address on "
"global_threepid_associations (medium, address)"
)
cur.execute(
"CREATE INDEX global_threepid_medium_lower_address on "
Expand All @@ -145,7 +156,8 @@ def _upgradeSchema(self):
logger.info("Migrating schema from v1 to v2")
cur = self.db.cursor()
cur.execute(
"CREATE INDEX threepid_validation_sessions_mtime ON threepid_validation_sessions(mtime)"
"CREATE INDEX threepid_validation_sessions_mtime ON "
"threepid_validation_sessions(mtime)"
)
self.db.commit()
logger.info("v1 -> v2 schema migration complete")
Expand Down Expand Up @@ -184,16 +196,20 @@ def _upgradeSchema(self):
if curVer < 4:
cur = self.db.cursor()
cur.execute(
"CREATE TABLE accounts(user_id TEXT NOT NULL PRIMARY KEY, created_ts BIGINT NOT NULL, consent_version TEXT)"
"CREATE TABLE accounts(user_id TEXT NOT NULL PRIMARY KEY, "
"created_ts BIGINT NOT NULL, consent_version TEXT)"
)
cur.execute(
"CREATE TABLE tokens(token TEXT NOT NULL PRIMARY KEY, user_id TEXT NOT NULL)"
"CREATE TABLE tokens(token TEXT NOT NULL PRIMARY KEY, "
"user_id TEXT NOT NULL)"
)
cur.execute(
"CREATE TABLE accepted_terms_urls(user_id TEXT NOT NULL, url TEXT NOT NULL)"
"CREATE TABLE accepted_terms_urls(user_id TEXT NOT NULL, "
"url TEXT NOT NULL)"
)
cur.execute(
"CREATE UNIQUE INDEX accepted_terms_urls_idx ON accepted_terms_urls (user_id, url)"
"CREATE UNIQUE INDEX accepted_terms_urls_idx ON "
"accepted_terms_urls (user_id, url)"
)
self.db.commit()
logger.info("v3 -> v4 schema migration complete")
Expand All @@ -204,20 +220,21 @@ def _upgradeSchema(self):
cur = self.db.cursor()
cur.execute("DROP INDEX IF EXISTS lookup_hash_medium")
cur.execute(
"CREATE INDEX global_threepid_lookup_hash ON global_threepid_associations(lookup_hash)"
"CREATE INDEX global_threepid_lookup_hash ON "
"global_threepid_associations(lookup_hash)"
)
self.db.commit()
logger.info("v4 -> v5 schema migration complete")
self._setSchemaVersion(5)

def _getSchemaVersion(self):
cur = self.db.cursor()
res = cur.execute("PRAGMA user_version")
cur.execute("PRAGMA user_version")
row = cur.fetchone()
return row[0]

def _setSchemaVersion(self, ver):
cur = self.db.cursor()
# NB. pragma doesn't support variable substitution so we
# do it in python (as a decimal so we don't risk SQL injection)
res = cur.execute("PRAGMA user_version = %d" % (ver,))
cur.execute("PRAGMA user_version = %d" % (ver,))
2 changes: 1 addition & 1 deletion sydent/db/terms.py
Expand Up @@ -55,7 +55,7 @@ def addAgreedUrls(self, user_id, urls):
:type urls: list[unicode]
"""
cur = self.sydent.db.cursor()
res = cur.executemany(
cur.executemany(
"insert or ignore into accepted_terms_urls (user_id, url) values (?, ?)",
((user_id, u) for u in urls),
)
Expand Down
51 changes: 30 additions & 21 deletions sydent/db/threepid_associations.py
Expand Up @@ -42,7 +42,8 @@ def addOrUpdateAssociation(self, assoc):
# sqlite's support for upserts is atrocious
cur.execute(
"insert or replace into local_threepid_associations "
"('medium', 'address', 'lookup_hash', 'mxid', 'ts', 'notBefore', 'notAfter')"
"('medium', 'address', 'lookup_hash', 'mxid', 'ts', "
"'notBefore', 'notAfter')"
" values (?, ?, ?, ?, ?, ?, ?)",
(
assoc.medium,
Expand Down Expand Up @@ -76,8 +77,8 @@ def getAssociationsAfterId(self, afterId, limit=None):
afterId = -1

q = (
"select id, medium, address, lookup_hash, mxid, ts, notBefore, notAfter from "
"local_threepid_associations "
"select id, medium, address, lookup_hash, mxid, ts, "
"notBefore, notAfter from local_threepid_associations "
"where id > ? order by id asc"
)
if limit is not None:
Expand Down Expand Up @@ -201,9 +202,9 @@ def signedAssociationStringForThreepid(self, medium, address):
# case insensitive which is technically incorrect). If we someday get a
# case-sensitive threepid, this can change.
res = cur.execute(
"select sgAssoc from global_threepid_associations where "
"medium = ? and lower(address) = lower(?) and notBefore < ? and notAfter > ? "
"order by ts desc limit 1",
"select sgAssoc from global_threepid_associations "
"where medium = ? and lower(address) = lower(?) "
"and notBefore < ? and notAfter > ? order by ts desc limit 1",
(medium, address, time_msec(), time_msec()),
)

Expand All @@ -230,9 +231,9 @@ def getMxid(self, medium, address):
"""
cur = self.sydent.db.cursor()
res = cur.execute(
"select mxid from global_threepid_associations where "
"medium = ? and lower(address) = lower(?) and notBefore < ? and notAfter > ? "
"order by ts desc limit 1",
"select mxid from global_threepid_associations "
"where medium = ? and lower(address) = lower(?) and notBefore < ? "
"and notAfter > ? order by ts desc limit 1",
(medium, address, time_msec(), time_msec()),
)

Expand All @@ -257,10 +258,12 @@ def getMxids(self, threepid_tuples):
cur = self.sydent.db.cursor()

cur.execute(
"CREATE TEMPORARY TABLE tmp_getmxids (medium VARCHAR(16), address VARCHAR(256))"
"CREATE TEMPORARY TABLE tmp_getmxids (medium VARCHAR(16), "
"address VARCHAR(256))"
)
cur.execute(
"CREATE INDEX tmp_getmxids_medium_lower_address ON tmp_getmxids (medium, lower(address))"
"CREATE INDEX tmp_getmxids_medium_lower_address "
"ON tmp_getmxids (medium, lower(address))"
)

try:
Expand All @@ -273,10 +276,13 @@ def getMxids(self, threepid_tuples):
inserted_cap += 500

res = cur.execute(
# 'notBefore' is the time the association starts being valid, 'notAfter' the the time at which
# it ceases to be valid, so the ts must be greater than 'notBefore' and less than 'notAfter'.
"SELECT gte.medium, gte.address, gte.ts, gte.mxid FROM global_threepid_associations gte "
"JOIN tmp_getmxids ON gte.medium = tmp_getmxids.medium AND lower(gte.address) = lower(tmp_getmxids.address) "
# 'notBefore' is the time the association starts being valid, 'notAfter'
# the the time at which it ceases to be valid, so the ts must be greater
# than 'notBefore' and less than 'notAfter'.
"SELECT gte.medium, gte.address, gte.ts, gte.mxid "
"FROM global_threepid_associations gte "
"JOIN tmp_getmxids ON gte.medium = tmp_getmxids.medium "
"AND lower(gte.address) = lower(tmp_getmxids.address) "
"WHERE gte.notBefore < ? AND gte.notAfter > ? "
"ORDER BY gte.medium, gte.address, gte.ts DESC",
(time_msec(), time_msec()),
Expand Down Expand Up @@ -317,7 +323,8 @@ def addAssociation(self, assoc, rawSgAssoc, originServer, originId, commit=True)
cur = self.sydent.db.cursor()
cur.execute(
"insert or ignore into global_threepid_associations "
"(medium, address, lookup_hash, mxid, ts, notBefore, notAfter, originServer, originId, sgAssoc) values "
"(medium, address, lookup_hash, mxid, ts, notBefore, notAfter, "
"originServer, originId, sgAssoc) values "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
assoc.medium,
Expand Down Expand Up @@ -417,9 +424,11 @@ def retrieveMxidsForHashes(self, addresses):
inserted_cap += 500

res = cur.execute(
# 'notBefore' is the time the association starts being valid, 'notAfter' the the time at which
# it ceases to be valid, so the ts must be greater than 'notBefore' and less than 'notAfter'.
"SELECT gta.lookup_hash, gta.mxid FROM global_threepid_associations gta "
# 'notBefore' is the time the association starts being valid, 'notAfter'
# the the time at which it ceases to be valid, so the ts must be greater
# than 'notBefore' and less than 'notAfter'.
"SELECT gta.lookup_hash, gta.mxid "
"FROM global_threepid_associations gta "
"JOIN tmp_retrieve_mxids_for_hashes "
"ON gta.lookup_hash = tmp_retrieve_mxids_for_hashes.lookup_hash "
"WHERE gta.notBefore < ? AND gta.notAfter > ? "
Expand All @@ -428,8 +437,8 @@ def retrieveMxidsForHashes(self, addresses):
)

# Place the results from the query into a dictionary
# Results are sorted from oldest to newest, so if there are multiple mxid's for
# the same lookup hash, only the newest mapping will be returned
# Results are sorted from oldest to newest, so if there are multiple mxid's
# for the same lookup hash, only the newest mapping will be returned
for lookup_hash, mxid in res.fetchall():
results[lookup_hash] = mxid

Expand Down