Skip to content

Commit

Permalink
Upgrade mypy and enable SQLAlchemy plugin
Browse files Browse the repository at this point in the history
* Upgrade mypy to 0.942 and sqlalchemy-stubs to 0.4. Type stubs are no
  longer bundled with mypy, so we need to explicitly install various
  types-{name} packages.
* Since we need to install extra packages and need other dependencies
  like Flask, move mypy into test-requirements.txt and run it via the
  dev container.
* The main issue spotted by the SQLAlchemy plugin, that
  sources.filesystem_id is nullable, is being fixed in a separate PR:
  #6350.
* Drop the explicit type comments now that mypy can infer them.
* Use `--explicit-package-bases` when running mypy, so it treats the
  securedrop/admin directories as modules regardless of whether
  `__init__.py` files exist. This is needed so the two tests/
  directories don't conflict with each other.
* Add missing return types to all schema changes and the templates that
  generate them.
* In the `create_source_uuid_column` migration, drop the use of
  `quoted_name()`. For some reason mypy doesn't like it, but more
  importantly we don't need it in SQLite and we don't use it in any
  other schema change.
* Make SecureTemporaryFile.write()'s return type match its parent by
  returning the number of bytes that were written.

Fixes #6346.
Fixes #6358.
  • Loading branch information
legoktm committed Mar 31, 2022
1 parent fc72191 commit bacc485
Show file tree
Hide file tree
Showing 31 changed files with 154 additions and 134 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ shellcheckclean: ## Clean up temporary container associated with shellcheck tar
.PHONY: typelint
typelint: ## Run mypy type linting.
@echo "███ Running mypy type checking..."
@mypy ./securedrop ./admin
@$(DEVSHELL) bin/run-mypy
@echo

.PHONY: yamllint
Expand Down
4 changes: 3 additions & 1 deletion mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ ignore_missing_imports = True
no_implicit_optional = True
disallow_untyped_defs = True
disallow_incomplete_defs = True
warn_unused_configs = True
python_version = 3.8
plugins = sqlmypy

[mypy-tests.*]
[mypy-securedrop.tests.*,admin.tests.*]
disallow_untyped_defs = False
4 changes: 2 additions & 2 deletions securedrop/alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
target_metadata = db.Model.metadata


def run_migrations_offline():
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
Expand All @@ -53,7 +53,7 @@ def run_migrations_offline():
context.run_migrations()


def run_migrations_online():
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
Expand Down
4 changes: 2 additions & 2 deletions securedrop/alembic/script.py.mako
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
def upgrade() -> None:
${upgrades if upgrades else "pass"}


def downgrade():
def downgrade() -> None:
${downgrades if downgrades else "pass"}
4 changes: 2 additions & 2 deletions securedrop/alembic/versions/15ac9509fc68_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
op.create_table(
"journalists",
sa.Column("id", sa.Integer(), nullable=False),
Expand Down Expand Up @@ -84,7 +84,7 @@ def upgrade():
)


def downgrade():
def downgrade() -> None:
op.drop_table("submissions")
op.drop_table("source_stars")
op.drop_table("replies")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,15 @@
depends_on = None


def upgrade():
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('instance_config', schema=None) as batch_op:
batch_op.create_index('ix_one_active_instance_config', [sa.text('valid_until IS NULL')], unique=True, sqlite_where=sa.text('valid_until IS NULL'))

# ### end Alembic commands ###


def downgrade():
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('instance_config', schema=None) as batch_op:
batch_op.drop_index('ix_one_active_instance_config')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@
depends_on = None


def upgrade():
def upgrade() -> None:
op.add_column("journalists", sa.Column("passphrase_hash", sa.String(length=256), nullable=True))


def downgrade():
def downgrade() -> None:
# sqlite has no `drop column` command, so we recreate the original table
# then load it from a temp table
conn = op.get_bind()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def create_deleted() -> int:
return result[0][0]


def migrate_nulls():
def migrate_nulls() -> None:
"""migrate existing journalist_id=NULL over to deleted or delete them"""
op.execute("DELETE FROM journalist_login_attempt WHERE journalist_id IS NULL;")
op.execute("DELETE FROM revoked_tokens WHERE journalist_id IS NULL;")
Expand Down Expand Up @@ -90,7 +90,7 @@ def migrate_nulls():
op.execute(f'DELETE FROM {table} WHERE journalist_id IS NULL') # nosec


def upgrade():
def upgrade() -> None:
migrate_nulls()

with op.batch_alter_table('journalist_login_attempt', schema=None) as batch_op:
Expand Down Expand Up @@ -124,7 +124,7 @@ def upgrade():
nullable=False)


def downgrade():
def downgrade() -> None:
# We do not un-migrate the data back to journalist_id=NULL

with op.batch_alter_table('seen_replies', schema=None) as batch_op:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,15 @@
depends_on = None


def upgrade():
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sources', schema=None) as batch_op:
batch_op.add_column(sa.Column('deleted_at', sa.DateTime(), nullable=True))

# ### end Alembic commands ###


def downgrade():
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sources', schema=None) as batch_op:
batch_op.drop_column('deleted_at')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import quoted_name
import uuid

# revision identifiers, used by Alembic.
Expand All @@ -17,7 +16,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
conn = op.get_bind()
conn.execute("PRAGMA legacy_alter_table=ON")
# Schema migration
Expand All @@ -39,7 +38,7 @@ def upgrade():

# Now create new table with unique constraint applied.
op.create_table(
quoted_name("sources", quote=False),
"sources",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("uuid", sa.String(length=36), nullable=False),
sa.Column("filesystem_id", sa.String(length=96), nullable=True),
Expand Down Expand Up @@ -67,6 +66,6 @@ def upgrade():
op.drop_table("sources_tmp")


def downgrade():
def downgrade() -> None:
with op.batch_alter_table("sources", schema=None) as batch_op:
batch_op.drop_column("uuid")
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
depends_on = None


def raw_sql_grab_orphaned_objects(table_name):
def raw_sql_grab_orphaned_objects(table_name: str) -> str:
"""Objects that have a source ID that doesn't exist in the
sources table OR a NULL source ID should be deleted."""
return ('SELECT id, filename, source_id FROM {table} ' # nosec
Expand All @@ -39,7 +39,7 @@ def raw_sql_grab_orphaned_objects(table_name):
'WHERE source_id IS NULL').format(table=table_name)


def upgrade():
def upgrade() -> None:
conn = op.get_bind()
submissions = conn.execute(
sa.text(raw_sql_grab_orphaned_objects('submissions'))
Expand Down Expand Up @@ -100,6 +100,6 @@ def upgrade():
raise


def downgrade():
def downgrade() -> None:
# This is a destructive alembic migration, it cannot be downgraded
pass
4 changes: 2 additions & 2 deletions securedrop/alembic/versions/48a75abc0121_add_seen_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
op.create_table(
"seen_files",
sa.Column("id", sa.Integer(), nullable=False),
Expand Down Expand Up @@ -51,7 +51,7 @@ def upgrade():
)


def downgrade():
def downgrade() -> None:
op.drop_table("seen_files")
op.drop_table("seen_messages")
op.drop_table("seen_replies")
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('instance_config',
sa.Column('version', sa.Integer(), nullable=False),
Expand All @@ -35,7 +35,7 @@ def upgrade():
conn.execute("""INSERT INTO instance_config (allow_document_uploads) VALUES (1)""")


def downgrade():
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('instance_config')
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
conn = op.get_bind()
conn.execute("PRAGMA legacy_alter_table=ON")
# Save existing journalist table.
Expand Down Expand Up @@ -70,7 +70,7 @@ def upgrade():
op.drop_table('journalists_tmp')


def downgrade():
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('journalists', schema=None) as batch_op:
batch_op.drop_column('session_nonce')
Expand Down
4 changes: 2 additions & 2 deletions securedrop/alembic/versions/6db892e17271_add_reply_uuid.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
conn = op.get_bind()
conn.execute("PRAGMA legacy_alter_table=ON")
# Schema migration
Expand Down Expand Up @@ -67,7 +67,7 @@ def upgrade():
op.drop_table("replies_tmp")


def downgrade():
def downgrade() -> None:
with op.batch_alter_table("replies", schema=None) as batch_op:
batch_op.drop_column("uuid")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,15 @@
depends_on = None


def upgrade():
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('instance_config', schema=None) as batch_op:
batch_op.add_column(sa.Column('organization_name', sa.String(length=255), nullable=True))

# ### end Alembic commands ###


def downgrade():
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('instance_config', schema=None) as batch_op:
batch_op.drop_column('organization_name')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@
depends_on = None


def upgrade():
def upgrade() -> None:
with op.batch_alter_table('journalists', schema=None) as batch_op:
batch_op.add_column(sa.Column('first_name', sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column('last_name', sa.String(length=255), nullable=True))


def downgrade():
def downgrade() -> None:
with op.batch_alter_table('journalists', schema=None) as batch_op:
batch_op.drop_column('last_name')
batch_op.drop_column('first_name')
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@
depends_on = None


def upgrade():
def upgrade() -> None:
with op.batch_alter_table("sources", schema=None) as batch_op:
batch_op.drop_column("flagged")


def downgrade():
def downgrade() -> None:
# You might be tempted to try Alembic's batch_ops for the
# downgrade too. Don't. SQLite's unnamed check constraints require
# kludges.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
with op.batch_alter_table("replies", schema=None) as batch_op:
batch_op.add_column(sa.Column("checksum", sa.String(length=255), nullable=True))

Expand Down Expand Up @@ -88,7 +88,7 @@ def upgrade():
raise


def downgrade():
def downgrade() -> None:
op.drop_table("revoked_tokens")

with op.batch_alter_table("submissions", schema=None) as batch_op:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# remove the old partial index on valid_until, as alembic can't handle it.
op.execute(sa.text('DROP INDEX IF EXISTS ix_one_active_instance_config'))
Expand Down Expand Up @@ -50,7 +50,7 @@ def upgrade():
# ### end Alembic commands ###


def downgrade():
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('instance_config', schema=None) as batch_op:
batch_op.alter_column('valid_until',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('journalists', schema=None) as batch_op:
batch_op.alter_column('otp_secret',
Expand All @@ -27,7 +27,7 @@ def upgrade():
# ### end Alembic commands ###


def downgrade():
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('journalists', schema=None) as batch_op:
batch_op.alter_column('otp_secret',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
conn = op.get_bind()
conn.execute("PRAGMA legacy_alter_table=ON")
# Schema migration
Expand Down Expand Up @@ -64,6 +64,6 @@ def upgrade():
op.drop_table("replies_tmp")


def downgrade():
def downgrade() -> None:
with op.batch_alter_table("replies", schema=None) as batch_op:
batch_op.drop_column("deleted_by_source")

0 comments on commit bacc485

Please sign in to comment.