Skip to content

Commit

Permalink
update deps and switch from black to ruff and add migration for setti…
Browse files Browse the repository at this point in the history
…ng correct nullability (#147)
  • Loading branch information
falkben committed Nov 23, 2023
1 parent ff6aa6f commit 8f158c6
Show file tree
Hide file tree
Showing 8 changed files with 255 additions and 90 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,21 +58,21 @@ To install the project locally:

```sh
pip-compile -o requirements.txt pyproject.toml --quiet && \
pip-compile --extra dev -o dev-requirements.txt pyproject.toml --quiet
pip-compile --extra dev -c requirements.txt -o dev-requirements.txt pyproject.toml --quiet
```

3. Upgrade a package:

```sh
pip-compile -o requirements.txt pyproject.toml --quiet --upgrade-package PACKAGE && \
pip-compile --extra dev -o dev-requirements.txt pyproject.toml --quiet
pip-compile --extra dev -c requirements.txt -o dev-requirements.txt pyproject.toml --quiet
```

4. Upgrade all packages with:

```sh
pip-compile -o requirements.txt pyproject.toml --quiet --upgrade && \
pip-compile --extra dev -o dev-requirements.txt pyproject.toml --quiet --upgrade
pip-compile --extra dev -c requirements.txt -o dev-requirements.txt pyproject.toml --quiet --upgrade
```

More here: <https://github.com/jazzband/pip-tools/>
Expand Down
16 changes: 11 additions & 5 deletions alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,17 @@ sqlalchemy.url = sqlite:///database.db
# on newly generated revision scripts. See the documentation for further
# detail and examples

# format using "black" - use the console_scripts runner, against the "black" entrypoint
hooks = black
black.type = console_scripts
black.entrypoint = black
black.options = -l 79 REVISION_SCRIPT_FILENAME
hooks = ruff,ruff_format

# ruff fix
ruff.type = exec
ruff.executable = ruff
ruff.options = --fix REVISION_SCRIPT_FILENAME

# format using "ruff" (black style)
ruff_format.type = exec
ruff_format.executable = ruff
ruff_format.options = format REVISION_SCRIPT_FILENAME

# Logging configuration
[loggers]
Expand Down
166 changes: 114 additions & 52 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,38 +2,50 @@
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --extra=dev --output-file=dev-requirements.txt pyproject.toml
# pip-compile --constraint=requirements.txt --extra=dev --output-file=dev-requirements.txt pyproject.toml
#
aiofiles==23.2.1
# via datasette
# via
# -c requirements.txt
# datasette
alembic==1.12.1
# via steam2sqlite (pyproject.toml)
# via
# -c requirements.txt
# steam2sqlite (pyproject.toml)
aniso8601==9.0.1
# via graphene
anyio==4.0.0
# via httpcore
anyio==4.1.0
# via
# -c requirements.txt
# httpx
asgi-csrf==0.9
# via datasette
# via
# -c requirements.txt
# datasette
asgiref==3.7.2
# via datasette
black==23.10.1
# via steam2sqlite (pyproject.toml)
certifi==2023.7.22
# via
# -c requirements.txt
# datasette
certifi==2023.11.17
# via
# -c requirements.txt
# httpcore
# httpx
# requests
charset-normalizer==3.3.2
# via requests
# via
# -c requirements.txt
# requests
click==8.1.7
# via
# black
# -c requirements.txt
# click-default-group
# datasette
# sqlite-utils
# uvicorn
click-default-group==1.2.4
# via
# -c requirements.txt
# datasette
# sqlite-utils
coverage[toml]==7.3.2
Expand All @@ -42,18 +54,23 @@ coverage[toml]==7.3.2
# pytest-cov
datasette==0.64.5
# via
# -c requirements.txt
# datasette-graphql
# datasette-publish-fly
# datasette-vega
# steam2sqlite (pyproject.toml)
datasette-graphql==2.2
# via steam2sqlite (pyproject.toml)
datasette-publish-fly==1.3.1
# via steam2sqlite (pyproject.toml)
# via
# -c requirements.txt
# steam2sqlite (pyproject.toml)
datasette-vega==0.6.2
# via steam2sqlite (pyproject.toml)
dropbox==11.36.2
# via steam2sqlite (pyproject.toml)
# via
# -c requirements.txt
# steam2sqlite (pyproject.toml)
graphene==3.3
# via datasette-graphql
graphql-core==3.2.3
Expand All @@ -64,65 +81,85 @@ graphql-core==3.2.3
graphql-relay==3.2.0
# via graphene
greenlet==3.0.1
# via sqlalchemy
# via
# -c requirements.txt
# sqlalchemy
h11==0.14.0
# via
# -c requirements.txt
# httpcore
# uvicorn
httpcore==0.18.0
# via httpx
httpx==0.25.0
httpcore==1.0.2
# via
# -c requirements.txt
# httpx
httpx==0.25.1
# via
# -c requirements.txt
# datasette
# steam2sqlite (pyproject.toml)
hupper==1.12
# via datasette
# via
# -c requirements.txt
# datasette
idna==3.4
# via
# -c requirements.txt
# anyio
# httpx
# requests
iniconfig==2.0.0
# via pytest
itsdangerous==2.1.2
# via
# -c requirements.txt
# asgi-csrf
# datasette
janus==1.0.0
# via datasette
# via
# -c requirements.txt
# datasette
jinja2==3.1.2
# via datasette
# via
# -c requirements.txt
# datasette
loguru==0.7.2
# via steam2sqlite (pyproject.toml)
mako==1.2.4
# via alembic
# via
# -c requirements.txt
# steam2sqlite (pyproject.toml)
mako==1.3.0
# via
# -c requirements.txt
# alembic
markupsafe==2.1.3
# via
# -c requirements.txt
# jinja2
# mako
mergedeep==1.3.4
# via datasette
mypy-extensions==1.0.0
# via black
packaging==23.2
# via
# black
# pytest
pathspec==0.11.2
# via black
# -c requirements.txt
# datasette
packaging==23.2
# via pytest
pint==0.22
# via datasette
platformdirs==3.11.0
# via black
# via
# -c requirements.txt
# datasette
pluggy==1.3.0
# via
# -c requirements.txt
# datasette
# pytest
# sqlite-utils
ply==3.11
# via stone
# via
# -c requirements.txt
# stone
pydantic==1.10.13
# via sqlmodel
# via
# -c requirements.txt
# sqlmodel
pytest==7.4.3
# via
# pytest-asyncio
Expand All @@ -135,52 +172,77 @@ pytest-cov==4.1.0
python-dateutil==2.8.2
# via sqlite-utils
python-dotenv==1.0.0
# via steam2sqlite (pyproject.toml)
# via
# -c requirements.txt
# steam2sqlite (pyproject.toml)
python-multipart==0.0.6
# via asgi-csrf
# via
# -c requirements.txt
# asgi-csrf
pyyaml==6.0.1
# via datasette
# via
# -c requirements.txt
# datasette
requests==2.31.0
# via dropbox
# via
# -c requirements.txt
# dropbox
ruff==0.1.6
# via steam2sqlite (pyproject.toml)
six==1.16.0
# via
# -c requirements.txt
# dropbox
# python-dateutil
# stone
sniffio==1.3.0
# via
# -c requirements.txt
# anyio
# httpcore
# httpx
sqlalchemy==1.4.50
# via
# -c requirements.txt
# alembic
# sqlmodel
sqlalchemy2-stubs==0.0.2a36
# via sqlmodel
sqlalchemy2-stubs==0.0.2a37
# via
# -c requirements.txt
# sqlmodel
sqlite-fts4==1.0.3
# via sqlite-utils
sqlite-utils==3.35.1
sqlite-utils==3.35.2
# via datasette-graphql
sqlmodel==0.0.11
# via steam2sqlite (pyproject.toml)
# via
# -c requirements.txt
# steam2sqlite (pyproject.toml)
stone==3.3.1
# via dropbox
# via
# -c requirements.txt
# dropbox
tabulate==0.9.0
# via sqlite-utils
typing-extensions==4.8.0
# via
# -c requirements.txt
# alembic
# janus
# pint
# pydantic
# sqlalchemy2-stubs
urllib3==2.0.7
# via requests
uvicorn==0.23.2
# via datasette
urllib3==2.1.0
# via
# -c requirements.txt
# requests
uvicorn==0.24.0.post1
# via
# -c requirements.txt
# datasette
uvloop==0.19.0
# via steam2sqlite (pyproject.toml)
# via
# -c requirements.txt
# steam2sqlite (pyproject.toml)

# The following packages are considered to be unsafe in a requirements file:
# pip
Expand Down
8 changes: 4 additions & 4 deletions migrations/env.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
from logging.config import fileConfig

from sqlalchemy import engine_from_config
from sqlalchemy import pool

from alembic import context
from sqlalchemy import engine_from_config, pool
from sqlmodel import SQLModel

# this is the Alembic Config object, which provides
Expand All @@ -21,6 +19,7 @@
# target_metadata = None

from steam2sqlite.models import *

target_metadata = SQLModel.metadata

# other values from the config, defined by the needs of env.py,
Expand All @@ -47,6 +46,7 @@ def run_migrations_offline():
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
render_as_batch=True,
)

with context.begin_transaction():
Expand All @@ -68,7 +68,7 @@ def run_migrations_online():

with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
connection=connection, target_metadata=target_metadata, render_as_batch=True
)

with context.begin_transaction():
Expand Down
Loading

0 comments on commit 8f158c6

Please sign in to comment.