From ab766286178b3d73218750913d96be7fa66200ec Mon Sep 17 00:00:00 2001 From: Payam Date: Tue, 5 Mar 2024 23:45:19 +0330 Subject: [PATCH 01/18] feat: Activated alembic with async sqlalchemy --- alembic/README | 1 + alembic/env.py | 94 ++++++++++++ alembic/script.py.mako | 26 ++++ alembic/versions/297ae216d270_init.py | 37 +++++ app/auth/models.py | 29 ++-- app/db.py | 3 + {migrations => migrations-old}/README | 0 {migrations => migrations-old}/env.py | 0 {migrations => migrations-old}/script.py.mako | 0 .../versions/982fd2bb72de_init.py | 0 poetry.lock | 135 +++++++++--------- pyproject.toml | 8 +- 12 files changed, 254 insertions(+), 79 deletions(-) create mode 100644 alembic/README create mode 100644 alembic/env.py create mode 100644 alembic/script.py.mako create mode 100644 alembic/versions/297ae216d270_init.py rename {migrations => migrations-old}/README (100%) rename {migrations => migrations-old}/env.py (100%) rename {migrations => migrations-old}/script.py.mako (100%) rename {migrations => migrations-old}/versions/982fd2bb72de_init.py (100%) diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000..e0d0858 --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration with an async dbapi. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..335fe50 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,94 @@ +import asyncio +import os +from logging.config import fileConfig + +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +from alembic import context +from app.db import Base +from app.auth.models import User #New +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config +config.set_main_option( + "sqlalchemy.url", os.environ.get("DATABASE_URL") or "sqlite+aiosqlite:///database.db" +) +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/297ae216d270_init.py b/alembic/versions/297ae216d270_init.py new file mode 100644 index 0000000..4980e6b --- /dev/null +++ b/alembic/versions/297ae216d270_init.py @@ -0,0 +1,37 @@ +"""init + +Revision ID: 297ae216d270 +Revises: +Create Date: 2024-03-05 23:44:00.008968 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '297ae216d270' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('users', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('username', sa.String(), nullable=True), + sa.Column('email', sa.String(), nullable=True), + sa.Column('full_name', sa.String(), nullable=True), + sa.Column('password', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('users') + # ### end Alembic commands ### diff --git a/app/auth/models.py b/app/auth/models.py index 741a6cd..99d2e16 100644 --- a/app/auth/models.py +++ b/app/auth/models.py @@ -1,13 +1,24 @@ -from sqlmodel import Field, SQLModel +from sqlalchemy import Column, Integer, String +from ..db import Base -class BaseUser(SQLModel): - username: str - email: str | None = None - full_name: str | None = None - password: str +# class BaseUser(SQLModel): +# username: str +# email: str | None = None +# full_name: str | None = None +# password: str +# +# +# class User(BaseUser, table=True): +# id: int = Field(default=None, nullable=False, primary_key=True) +# disabled: bool | None = None -class User(BaseUser, table=True): - id: int = Field(default=None, nullable=False, primary_key=True) - disabled: bool | None = None +class User(Base): + __tablename__ = "users" + + id = Column(Integer, primary_key=True) + username = Column(String) + email = Column(String) + full_name = Column(String) + password = Column(String) diff --git a/app/db.py b/app/db.py index 8f66577..53327aa 100644 --- a/app/db.py +++ b/app/db.py @@ -1,7 +1,10 @@ import os +from sqlalchemy.orm import declarative_base from sqlmodel import Session, create_engine +# declarative base class +Base = declarative_base() # DATABASE_URL = "sqlite+aiosqlite:///database.db" DATABASE_URL = os.environ.get("DATABASE_URL") or "sqlite:///database.db" diff --git a/migrations/README b/migrations-old/README similarity index 100% rename from migrations/README rename to migrations-old/README diff --git a/migrations/env.py b/migrations-old/env.py similarity index 100% rename from migrations/env.py rename to migrations-old/env.py diff --git a/migrations/script.py.mako b/migrations-old/script.py.mako similarity index 100% rename from migrations/script.py.mako rename to migrations-old/script.py.mako diff --git a/migrations/versions/982fd2bb72de_init.py b/migrations-old/versions/982fd2bb72de_init.py similarity index 100% rename from migrations/versions/982fd2bb72de_init.py rename to migrations-old/versions/982fd2bb72de_init.py diff --git a/poetry.lock b/poetry.lock index 6627ea8..6992a03 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,18 +2,21 @@ [[package]] name = "aiosqlite" -version = "0.19.0" +version = "0.20.0" description = "asyncio bridge to the standard sqlite3 module" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "aiosqlite-0.19.0-py3-none-any.whl", hash = "sha256:edba222e03453e094a3ce605db1b970c4b3376264e56f32e2a4959f948d66a96"}, - {file = "aiosqlite-0.19.0.tar.gz", hash = "sha256:95ee77b91c8d2808bd08a59fbebf66270e9090c3d92ffbf260dc0db0b979577d"}, + {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"}, + {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"}, ] +[package.dependencies] +typing_extensions = ">=4.0" + [package.extras] -dev = ["aiounittest (==1.4.1)", "attribution (==1.6.2)", "black (==23.3.0)", "coverage[toml] (==7.2.3)", "flake8 (==5.0.4)", "flake8-bugbear (==23.3.12)", "flit (==3.7.1)", "mypy (==1.2.0)", "ufmt (==2.1.0)", "usort (==1.0.6)"] -docs = ["sphinx (==6.1.3)", "sphinx-mdinclude (==0.5.3)"] +dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"] +docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"] [[package]] name = "alembic" @@ -688,13 +691,13 @@ gmpy2 = ["gmpy2"] [[package]] name = "fastapi" -version = "0.109.2" +version = "0.110.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, - {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, + {file = "fastapi-0.110.0-py3-none-any.whl", hash = "sha256:87a1f6fb632a218222c5984be540055346a8f5d8a68e8f6fb647b1dc9934de4b"}, + {file = "fastapi-0.110.0.tar.gz", hash = "sha256:266775f0dcc95af9d3ef39bad55cff525329a931d5fd51930aadd4f428bf7ff3"}, ] [package.dependencies] @@ -1566,13 +1569,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1786,60 +1789,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.27" +version = "2.0.28" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, - {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, - {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0b148ab0438f72ad21cb004ce3bdaafd28465c4276af66df3b9ecd2037bf252"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbda76961eb8f27e6ad3c84d1dc56d5bc61ba8f02bd20fcf3450bd421c2fcc9c"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feea693c452d85ea0015ebe3bb9cd15b6f49acc1a31c28b3c50f4db0f8fb1e71"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5da98815f82dce0cb31fd1e873a0cb30934971d15b74e0d78cf21f9e1b05953f"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a5adf383c73f2d49ad15ff363a8748319ff84c371eed59ffd0127355d6ea1da"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56856b871146bfead25fbcaed098269d90b744eea5cb32a952df00d542cdd368"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-win32.whl", hash = "sha256:943aa74a11f5806ab68278284a4ddd282d3fb348a0e96db9b42cb81bf731acdc"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-win_amd64.whl", hash = "sha256:c6c4da4843e0dabde41b8f2e8147438330924114f541949e6318358a56d1875a"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46a3d4e7a472bfff2d28db838669fc437964e8af8df8ee1e4548e92710929adc"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3dd67b5d69794cfe82862c002512683b3db038b99002171f624712fa71aeaa"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61e2e41656a673b777e2f0cbbe545323dbe0d32312f590b1bc09da1de6c2a02"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0315d9125a38026227f559488fe7f7cee1bd2fbc19f9fd637739dc50bb6380b2"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af8ce2d31679006e7b747d30a89cd3ac1ec304c3d4c20973f0f4ad58e2d1c4c9"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:81ba314a08c7ab701e621b7ad079c0c933c58cdef88593c59b90b996e8b58fa5"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-win32.whl", hash = "sha256:1ee8bd6d68578e517943f5ebff3afbd93fc65f7ef8f23becab9fa8fb315afb1d"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-win_amd64.whl", hash = "sha256:ad7acbe95bac70e4e687a4dc9ae3f7a2f467aa6597049eeb6d4a662ecd990bb6"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d3499008ddec83127ab286c6f6ec82a34f39c9817f020f75eca96155f9765097"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b66fcd38659cab5d29e8de5409cdf91e9986817703e1078b2fdaad731ea66f5"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea30da1e76cb1acc5b72e204a920a3a7678d9d52f688f087dc08e54e2754c67"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:124202b4e0edea7f08a4db8c81cc7859012f90a0d14ba2bf07c099aff6e96462"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e23b88c69497a6322b5796c0781400692eca1ae5532821b39ce81a48c395aae9"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b6303bfd78fb3221847723104d152e5972c22367ff66edf09120fcde5ddc2e2"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-win32.whl", hash = "sha256:a921002be69ac3ab2cf0c3017c4e6a3377f800f1fca7f254c13b5f1a2f10022c"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-win_amd64.whl", hash = "sha256:b4a2cf92995635b64876dc141af0ef089c6eea7e05898d8d8865e71a326c0385"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e91b5e341f8c7f1e5020db8e5602f3ed045a29f8e27f7f565e0bdee3338f2c7"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c7b78dfc7278329f27be02c44abc0d69fe235495bb8e16ec7ef1b1a17952db"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eba73ef2c30695cb7eabcdb33bb3d0b878595737479e152468f3ba97a9c22a4"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5df5d1dafb8eee89384fb7a1f79128118bc0ba50ce0db27a40750f6f91aa99d5"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2858bbab1681ee5406650202950dc8f00e83b06a198741b7c656e63818633526"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-win32.whl", hash = "sha256:9461802f2e965de5cff80c5a13bc945abea7edaa1d29360b485c3d2b56cdb075"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-win_amd64.whl", hash = "sha256:a6bec1c010a6d65b3ed88c863d56b9ea5eeefdf62b5e39cafd08c65f5ce5198b"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:843a882cadebecc655a68bd9a5b8aa39b3c52f4a9a5572a3036fb1bb2ccdc197"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dbb990612c36163c6072723523d2be7c3eb1517bbdd63fe50449f56afafd1133"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7e4baf9161d076b9a7e432fce06217b9bd90cfb8f1d543d6e8c4595627edb9"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0a5354cb4de9b64bccb6ea33162cb83e03dbefa0d892db88a672f5aad638a75"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fffcc8edc508801ed2e6a4e7b0d150a62196fd28b4e16ab9f65192e8186102b6"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca7b6d99a4541b2ebab4494f6c8c2f947e0df4ac859ced575238e1d6ca5716b"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-win32.whl", hash = "sha256:8c7f10720fc34d14abad5b647bc8202202f4948498927d9f1b4df0fb1cf391b7"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-win_amd64.whl", hash = "sha256:243feb6882b06a2af68ecf4bec8813d99452a1b62ba2be917ce6283852cf701b"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc4974d3684f28b61b9a90fcb4c41fb340fd4b6a50c04365704a4da5a9603b05"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87724e7ed2a936fdda2c05dbd99d395c91ea3c96f029a033a4a20e008dd876bf"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68722e6a550f5de2e3cfe9da6afb9a7dd15ef7032afa5651b0f0c6b3adb8815d"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:328529f7c7f90adcd65aed06a161851f83f475c2f664a898af574893f55d9e53"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:df40c16a7e8be7413b885c9bf900d402918cc848be08a59b022478804ea076b8"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:426f2fa71331a64f5132369ede5171c52fd1df1bd9727ce621f38b5b24f48750"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-win32.whl", hash = "sha256:33157920b233bc542ce497a81a2e1452e685a11834c5763933b440fedd1d8e2d"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-win_amd64.whl", hash = "sha256:2f60843068e432311c886c5f03c4664acaef507cf716f6c60d5fde7265be9d7b"}, + {file = "SQLAlchemy-2.0.28-py3-none-any.whl", hash = "sha256:78bb7e8da0183a8301352d569900d9d3594c48ac21dc1c2ec6b3121ed8b6c986"}, + {file = "SQLAlchemy-2.0.28.tar.gz", hash = "sha256:dd53b6c4e6d960600fd6532b79ee28e2da489322fcf6648738134587faf767b6"}, ] [package.dependencies] @@ -1955,13 +1958,13 @@ files = [ [[package]] name = "types-pyasn1" -version = "0.5.0.20240205" +version = "0.5.0.20240301" description = "Typing stubs for pyasn1" optional = false python-versions = ">=3.8" files = [ - {file = "types-pyasn1-0.5.0.20240205.tar.gz", hash = "sha256:b42b4e967d2ad780bde2ce47d7627a00dfb11b37a451f3e73b264ec6e97e50c7"}, - {file = "types_pyasn1-0.5.0.20240205-py3-none-any.whl", hash = "sha256:40b205856c6a01d2ce6fa47a0be2a238a5556b04f47a2875a2aba680a65a959f"}, + {file = "types-pyasn1-0.5.0.20240301.tar.gz", hash = "sha256:da328f5771d54a2016863270b281047f9cc38e39f65a297ba9f987d5de3403f1"}, + {file = "types_pyasn1-0.5.0.20240301-py3-none-any.whl", hash = "sha256:d9989899184bbd6e2adf6f812c8f49c48197fceea251a6fb13666dae3203f80d"}, ] [[package]] @@ -2159,4 +2162,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "c46d63cbc355c6ca209597897e126dfc51d73be702be13cc005895454d9e89ac" +content-hash = "d805a19f77196e19b8a69a88c01caf12aaef066469bf27cf1949f4c9260ce6a8" diff --git a/pyproject.toml b/pyproject.toml index d6d993b..df51891 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.11" -fastapi = "^0.109" +fastapi = "^0.110" uvicorn = "^0.27" sqlmodel = "0.0.16" psycopg2-binary = "^2" @@ -16,7 +16,7 @@ redis = "^4" celery = "^5" alembic = "^1" tenacity = "^8" -aiosqlite = "^0.19" +aiosqlite = "^0.20" pytest = "^8" httpx = "^0.26" pre-commit = "^3" @@ -52,7 +52,7 @@ fix = true indent-width = 4 line-length = 88 target-version = "py311" -exclude = ["migrations/*.*"] +exclude = ["migrations/*.*", "alembic/*.*"] lint.select = [ "E", # pycodestyle errors @@ -75,7 +75,7 @@ known-third-party = ["fastapi", "pydantic", "starlette"] [tool.mypy] python_version = 3.11 -exclude = ['.venv/*.*', 'app/songs/models.py', 'migrations/*.*'] +exclude = ['.venv/*.*', 'app/songs/models.py', 'migrations/*.*', "alembic/*.*"] [tool.commitizen] name = "cz_conventional_commits" From 2cc87437562ed17015e0a0a9c76536efe724b59d Mon Sep 17 00:00:00 2001 From: Payam Date: Tue, 5 Mar 2024 23:45:47 +0330 Subject: [PATCH 02/18] fix: added alembic.ini --- alembic-old.ini | 109 ++++++++++++++++++++++++++++++++++++++++++++++++ alembic.ini | 21 ++++++---- 2 files changed, 122 insertions(+), 8 deletions(-) create mode 100644 alembic-old.ini diff --git a/alembic-old.ini b/alembic-old.ini new file mode 100644 index 0000000..2e07d87 --- /dev/null +++ b/alembic-old.ini @@ -0,0 +1,109 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + + +#sqlalchemy.url = sqlite+aiosqlite:///database.db +#sqlalchemy.url = postgresql+asyncpg://payam:payam@pdb:5432/foo + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic.ini b/alembic.ini index 2e07d87..a634513 100644 --- a/alembic.ini +++ b/alembic.ini @@ -2,7 +2,7 @@ [alembic] # path to migration scripts -script_location = migrations +script_location = alembic # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s # Uncomment the line below if you want the files to be prepended with date and time @@ -14,9 +14,9 @@ prepend_sys_path = . # timezone to use when rendering the date within the migration file # as well as the filename. -# If specified, requires the python-dateutil library that can be -# installed by adding `alembic[tz]` to the pip requirements -# string value is passed to dateutil.tz.gettz() +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() # leave blank for localtime # timezone = @@ -34,10 +34,10 @@ prepend_sys_path = . # sourceless = false # version location specification; This defaults -# to migrations/versions. When using multiple version +# to alembic/versions. When using multiple version # directories, initial revisions must be specified with --version-path. # The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions # version path separator; As mentioned above, this is the character used to split # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. @@ -58,9 +58,8 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne # are written from script.py.mako # output_encoding = utf-8 +# sqlalchemy.url = driver://user:pass@localhost/dbname -#sqlalchemy.url = sqlite+aiosqlite:///database.db -#sqlalchemy.url = postgresql+asyncpg://payam:payam@pdb:5432/foo [post_write_hooks] # post_write_hooks defines scripts or Python functions that are run @@ -73,6 +72,12 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne # black.entrypoint = black # black.options = -l 79 REVISION_SCRIPT_FILENAME +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + # Logging configuration [loggers] keys = root,sqlalchemy,alembic From 8d28d424e52c4de829ce77603df90b76dc2c3351 Mon Sep 17 00:00:00 2001 From: Payam Date: Tue, 5 Mar 2024 23:52:38 +0330 Subject: [PATCH 03/18] fix: added base song model --- alembic-old.ini | 109 ------------------ alembic/env.py | 4 +- ...ae216d270_init.py => ab5b634132be_init.py} | 12 +- app/songs/models.py | 1 + app/songs/tables.py | 10 ++ migrations-old/README | 1 - migrations-old/env.py | 86 -------------- migrations-old/script.py.mako | 27 ----- migrations-old/versions/982fd2bb72de_init.py | 72 ------------ 9 files changed, 23 insertions(+), 299 deletions(-) delete mode 100644 alembic-old.ini rename alembic/versions/{297ae216d270_init.py => ab5b634132be_init.py} (75%) create mode 100644 app/songs/tables.py delete mode 100644 migrations-old/README delete mode 100644 migrations-old/env.py delete mode 100644 migrations-old/script.py.mako delete mode 100644 migrations-old/versions/982fd2bb72de_init.py diff --git a/alembic-old.ini b/alembic-old.ini deleted file mode 100644 index 2e07d87..0000000 --- a/alembic-old.ini +++ /dev/null @@ -1,109 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = migrations - -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python-dateutil library that can be -# installed by adding `alembic[tz]` to the pip requirements -# string value is passed to dateutil.tz.gettz() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the -# "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to migrations/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - - -#sqlalchemy.url = sqlite+aiosqlite:///database.db -#sqlalchemy.url = postgresql+asyncpg://payam:payam@pdb:5432/foo - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/alembic/env.py b/alembic/env.py index 335fe50..1547eae 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -8,7 +8,9 @@ from alembic import context from app.db import Base -from app.auth.models import User #New +from app.auth.models import User # New +from app.songs.tables import Song # New + # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config diff --git a/alembic/versions/297ae216d270_init.py b/alembic/versions/ab5b634132be_init.py similarity index 75% rename from alembic/versions/297ae216d270_init.py rename to alembic/versions/ab5b634132be_init.py index 4980e6b..4331ab1 100644 --- a/alembic/versions/297ae216d270_init.py +++ b/alembic/versions/ab5b634132be_init.py @@ -1,8 +1,8 @@ """init -Revision ID: 297ae216d270 +Revision ID: ab5b634132be Revises: -Create Date: 2024-03-05 23:44:00.008968 +Create Date: 2024-03-05 23:51:31.252368 """ from typing import Sequence, Union @@ -12,7 +12,7 @@ # revision identifiers, used by Alembic. -revision: str = '297ae216d270' +revision: str = 'ab5b634132be' down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None @@ -20,6 +20,11 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### + op.create_table('songs', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(), nullable=True), @@ -34,4 +39,5 @@ def upgrade() -> None: def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.drop_table('users') + op.drop_table('songs') # ### end Alembic commands ### diff --git a/app/songs/models.py b/app/songs/models.py index 964a495..dedf641 100644 --- a/app/songs/models.py +++ b/app/songs/models.py @@ -72,3 +72,4 @@ class City(CityCreate, table=True): id: int = Field(default=None, nullable=False, primary_key=True) songs: list["Song"] = Relationship(back_populates="city") + diff --git a/app/songs/tables.py b/app/songs/tables.py new file mode 100644 index 0000000..ef9921b --- /dev/null +++ b/app/songs/tables.py @@ -0,0 +1,10 @@ +from sqlalchemy import Column, Integer, String + +from ..db import Base + + +class Song(Base): + __tablename__ = "songs" + + id = Column(Integer, primary_key=True) + name = Column(String) diff --git a/migrations-old/README b/migrations-old/README deleted file mode 100644 index 98e4f9c..0000000 --- a/migrations-old/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. \ No newline at end of file diff --git a/migrations-old/env.py b/migrations-old/env.py deleted file mode 100644 index 9a1a605..0000000 --- a/migrations-old/env.py +++ /dev/null @@ -1,86 +0,0 @@ -import os -from logging.config import fileConfig - -from alembic import context -from sqlalchemy import engine_from_config, pool -from sqlmodel import SQLModel # NEW -from app.auth.models import User # NEW -from app.songs.models import Song, Tag, SongTag # NEW - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config -config.set_main_option( - "sqlalchemy.url", os.environ.get("DATABASE_URL") or "sqlite:///database.db" -) # NEW -# config.set_main_option('sqlalchemy.url', "sqlite:///database.db") # NEW - - -# sqlite+aiosqlite:///database.db -# Interpret the config file for Python logging. -# This line sets up loggers basically. -if config.config_file_name is not None: - fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -# target_metadata = None -target_metadata = SQLModel.metadata # NEW - - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline() -> None: - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - # url = config.get_main_option("sqlalchemy.url") #NEW - context.configure( - # url=url, #NEW - target_metadata=target_metadata, - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online() -> None: - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - connectable = engine_from_config( - config.get_section(config.config_ini_section, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - with connectable.connect() as connection: - context.configure(connection=connection, target_metadata=target_metadata) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/migrations-old/script.py.mako b/migrations-old/script.py.mako deleted file mode 100644 index 4445816..0000000 --- a/migrations-old/script.py.mako +++ /dev/null @@ -1,27 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -import sqlmodel # NEW -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} -branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} -depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} - - -def upgrade() -> None: - ${upgrades if upgrades else "pass"} - - -def downgrade() -> None: - ${downgrades if downgrades else "pass"} diff --git a/migrations-old/versions/982fd2bb72de_init.py b/migrations-old/versions/982fd2bb72de_init.py deleted file mode 100644 index f1760c7..0000000 --- a/migrations-old/versions/982fd2bb72de_init.py +++ /dev/null @@ -1,72 +0,0 @@ -"""init - -Revision ID: 982fd2bb72de -Revises: -Create Date: 2024-02-21 12:21:47.840087 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -import sqlmodel # NEW - - -# revision identifiers, used by Alembic. -revision: str = '982fd2bb72de' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('city', - sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column('desc', sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column('id', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('tag', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('user', - sa.Column('username', sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column('email', sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column('full_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column('password', sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('disabled', sa.Boolean(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('song', - sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column('artist', sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column('year', sa.Integer(), nullable=True), - sa.Column('city_id', sa.Integer(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['city_id'], ['city.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('songtag', - sa.Column('song_id', sa.Integer(), nullable=False), - sa.Column('tag_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['song_id'], ['song.id'], ), - sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], ), - sa.PrimaryKeyConstraint('song_id', 'tag_id') - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('songtag') - op.drop_table('song') - op.drop_table('user') - op.drop_table('tag') - op.drop_table('city') - # ### end Alembic commands ### From 5682d2058382b302ef6a4318ebc7f88dc85a0ce4 Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 10:24:48 +0330 Subject: [PATCH 04/18] feat: changes initial service startup to async sql driver --- .env | 2 +- Dockerfile | 10 ++- app/db.py | 12 +-- app/main.py | 50 ++++++------ docker-compose.yml | 16 ++-- poetry.lock | 146 +++++++++++++++-------------------- pyproject.toml | 3 +- scripts/backend_pre_start.py | 34 ++++---- scripts/prestart.sh | 4 +- scripts/start.sh | 2 +- 10 files changed, 128 insertions(+), 151 deletions(-) diff --git a/.env b/.env index 95ba17b..14743af 100644 --- a/.env +++ b/.env @@ -1,7 +1,7 @@ POSTGRES_DB=foo POSTGRES_USER=payam POSTGRES_PASSWORD=payam -DATABASE_URL=postgresql://payam:payam@pdb:5432/foo +DATABASE_URL=postgresql+asyncpg://payam:payam@pdb:5432/foo # Redis REDIS_HOST=redis REDIS_PORT=6379 diff --git a/Dockerfile b/Dockerfile index ded8c63..e8ff83e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM python:3.11-slim # set working directory -WORKDIR /app/ +WORKDIR /files/ # set environment variables ENV PYTHONDONTWRITEBYTECODE 1 @@ -10,6 +10,7 @@ ENV PYTHONUNBUFFERED 1 # Install dependencies RUN apt-get update && \ + apt-get install -y dos2unix &&\ apt-get install -y --no-install-recommends curl && \ curl -sSL https://install.python-poetry.org | POETRY_HOME=/opt/poetry python && \ ln -s /opt/poetry/bin/poetry /usr/local/bin/poetry && \ @@ -19,7 +20,7 @@ RUN apt-get update && \ rm -rf /var/lib/apt/lists/* # Copy only the necessary files for dependency installation -COPY pyproject.toml poetry.lock* /app/ +COPY pyproject.toml poetry.lock* /files/ # Allow installing dev dependencies to run tests ARG INSTALL_DEV=false @@ -29,7 +30,8 @@ RUN if [ "$INSTALL_DEV" = "true" ] ; then poetry install --no-root ; else poetry COPY . . # set executable permissions in a single RUN command -RUN chmod +x /app/scripts/start.sh /app/scripts/prestart.sh +RUN chmod +x /files/scripts/start.sh /files/scripts/prestart.sh +RUN dos2unix /files/scripts/start.sh /files/scripts/prestart.sh # define the command to run the application -CMD ["/app/scripts/start.sh"] +CMD ["/files/scripts/start.sh"] \ No newline at end of file diff --git a/app/db.py b/app/db.py index 53327aa..47b41d0 100644 --- a/app/db.py +++ b/app/db.py @@ -1,16 +1,16 @@ import os from sqlalchemy.orm import declarative_base -from sqlmodel import Session, create_engine +# from sqlmodel import Session, create_engine # declarative base class Base = declarative_base() # DATABASE_URL = "sqlite+aiosqlite:///database.db" -DATABASE_URL = os.environ.get("DATABASE_URL") or "sqlite:///database.db" +DATABASE_URL = os.environ.get("DATABASE_URL") or "sqlite+aiosqlite:///database.db" -engine = create_engine(DATABASE_URL, echo=True) +# engine = create_engine(DATABASE_URL, echo=True) -def get_session(): - with Session(engine) as session: - yield session +# def get_session(): +# with Session(engine) as session: +# yield session diff --git a/app/main.py b/app/main.py index 2337f7f..54fa40f 100644 --- a/app/main.py +++ b/app/main.py @@ -11,25 +11,25 @@ app = FastAPI() -app.include_router(song_router) -app.include_router(auth_router) -app.include_router(files_router) -app.include_router(ws_router) -app.include_router(nosql_router) - -origins = [ - "http://localhost.tiangolo.com", - "https://localhost.tiangolo.com", - "http://localhost", - "http://localhost:8080", -] -app.add_middleware( - CORSMiddleware, - allow_origins=origins, - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) +# app.include_router(song_router) +# app.include_router(auth_router) +# app.include_router(files_router) +# app.include_router(ws_router) +# app.include_router(nosql_router) +# +# origins = [ +# "http://localhost.tiangolo.com", +# "https://localhost.tiangolo.com", +# "http://localhost", +# "http://localhost:8080", +# ] +# app.add_middleware( +# CORSMiddleware, +# allow_origins=origins, +# allow_credentials=True, +# allow_methods=["*"], +# allow_headers=["*"], +# ) @app.get("/ping") @@ -37,9 +37,9 @@ async def pong(): return {"ping": "pong!"} -@app.get("/bootstrap", response_class=HTMLResponse) -async def read_item(request: Request, user_id: str): - templates = Jinja2Templates(directory="app/templates") - return templates.TemplateResponse( - request=request, name="index.html", context={"id": user_id} - ) +# @app.get("/bootstrap", response_class=HTMLResponse) +# async def read_item(request: Request, user_id: str): +# templates = Jinja2Templates(directory="app/templates") +# return templates.TemplateResponse( +# request=request, name="index.html", context={"id": user_id} +# ) diff --git a/docker-compose.yml b/docker-compose.yml index 03ff84e..9f2cde0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -50,13 +50,13 @@ services: - "5557:5555" depends_on: - redis - mongo: - image: mongo:7.0.5 - restart: always - ports: - - "27017:27017" - volumes: - - mongodb-data:/data/db +# mongo: +# image: mongo:7.0.5 +# restart: always +# ports: +# - "27017:27017" +# volumes: +# - mongodb-data:/data/db nginx: build: ./nginx volumes: @@ -69,4 +69,4 @@ services: volumes: static_volume: postgres_data: - mongodb-data: \ No newline at end of file +# mongodb-data: \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 6992a03..b34ee97 100644 --- a/poetry.lock +++ b/poetry.lock @@ -107,6 +107,63 @@ files = [ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] +[[package]] +name = "asyncpg" +version = "0.29.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"}, + {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"}, + {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"}, + {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"}, + {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"}, + {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"}, + {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"}, + {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"}, + {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"}, + {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"}, + {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"}, + {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"}, + {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"}, + {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"}, + {file = "asyncpg-0.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9"}, + {file = "asyncpg-0.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cad1324dbb33f3ca0cd2074d5114354ed3be2b94d48ddfd88af75ebda7c43cc"}, + {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012d01df61e009015944ac7543d6ee30c2dc1eb2f6b10b62a3f598beb6531548"}, + {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7"}, + {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bfe9c4d3429706cf70d3249089de14d6a01192d617e9093a8e941fea8ee775"}, + {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:642a36eb41b6313ffa328e8a5c5c2b5bea6ee138546c9c3cf1bffaad8ee36dd9"}, + {file = "asyncpg-0.29.0-cp38-cp38-win32.whl", hash = "sha256:a921372bbd0aa3a5822dd0409da61b4cd50df89ae85150149f8c119f23e8c408"}, + {file = "asyncpg-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:103aad2b92d1506700cbf51cd8bb5441e7e72e87a7b3a2ca4e32c840f051a6a3"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"}, + {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"}, + {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"}, + {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.12.0\""} + +[package.extras] +docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"] + [[package]] name = "bcrypt" version = "4.1.2" @@ -489,13 +546,13 @@ files = [ [[package]] name = "commitizen" -version = "3.16.0" +version = "3.17.0" description = "Python commitizen client tool" optional = false python-versions = ">=3.8" files = [ - {file = "commitizen-3.16.0-py3-none-any.whl", hash = "sha256:a880005352fd35b908d9c3951e71e155b157f4a4ec61ca9c080a9637bf98e0a1"}, - {file = "commitizen-3.16.0.tar.gz", hash = "sha256:1269619d383d12809f436ff196fb786a3d49fc50987562e6e566cd9c2908735c"}, + {file = "commitizen-3.17.0-py3-none-any.whl", hash = "sha256:2a9942d52724dc50518ca53997bef0f3b51a7e1c518d1ea3da44f9a9ece2df3a"}, + {file = "commitizen-3.17.0.tar.gz", hash = "sha256:1949025c5485c645656929158a5e2f6f6fa96501eb4461dee88a2493f630cb9c"}, ] [package.dependencies] @@ -1231,87 +1288,6 @@ files = [ [package.dependencies] wcwidth = "*" -[[package]] -name = "psycopg2-binary" -version = "2.9.9" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, -] - [[package]] name = "pyasn1" version = "0.5.1" @@ -2162,4 +2138,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "d805a19f77196e19b8a69a88c01caf12aaef066469bf27cf1949f4c9260ce6a8" +content-hash = "bf3b65c35a3bbb4005b8c8c3799821493ac0ec12fd31d5ac57283dcf1dea77ec" diff --git a/pyproject.toml b/pyproject.toml index df51891..989e1c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ python = "^3.11" fastapi = "^0.110" uvicorn = "^0.27" sqlmodel = "0.0.16" -psycopg2-binary = "^2" +#psycopg2-binary = "^2" redis = "^4" celery = "^5" alembic = "^1" @@ -28,6 +28,7 @@ coverage = "^7" motor = "^3" pymongo = "^4" Jinja2 = "^3" +asyncpg = "^0.29" [tool.poetry.group.dev.dependencies] commitizen = "^3" diff --git a/scripts/backend_pre_start.py b/scripts/backend_pre_start.py index 3401b7e..ef71067 100644 --- a/scripts/backend_pre_start.py +++ b/scripts/backend_pre_start.py @@ -1,8 +1,9 @@ +import asyncio import logging import os -from sqlalchemy import MetaData, create_engine -from sqlmodel import Session, SQLModel, select +from sqlalchemy import MetaData, select +from sqlalchemy.ext.asyncio import create_async_engine from tenacity import ( after_log, before_log, @@ -11,10 +12,9 @@ wait_fixed, ) -DATABASE_URL = os.environ.get("DATABASE_URL") or "sqlite:///database.db" -engine = create_engine(DATABASE_URL, echo=True) - -# from app.db import engine +DATABASE_URL = os.environ.get("DATABASE_URL") or "sqlite+aiosqlite:///database.db" +engine = create_async_engine(DATABASE_URL, echo=True) +meta = MetaData() logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -22,8 +22,6 @@ max_tries = 60 * 5 # 5 minutes wait_seconds = 1 -meta = MetaData() - @retry( stop=stop_after_attempt(max_tries), @@ -31,23 +29,23 @@ before=before_log(logger, logging.INFO), after=after_log(logger, logging.WARN), ) -def init() -> None: +async def init() -> None: try: - with Session(engine) as session: - SQLModel.metadata.create_all(engine) - session.exec(select(1)) + async with engine.begin() as conn: + await conn.run_sync(meta.create_all) + + await conn.execute(select(1)) except Exception as e: print(e) logger.error(e) raise e -def main() -> None: - logger.info("Initializing service") - init() - logger.info("Service finished initializing") +async def main() -> None: + logger.info("Initializing database service") + await init() + logger.info("Database Initialized successfully") if __name__ == "__main__": - main() -# asyncio.run(main()) + asyncio.run(main()) diff --git a/scripts/prestart.sh b/scripts/prestart.sh index 41e11b4..9196e38 100644 --- a/scripts/prestart.sh +++ b/scripts/prestart.sh @@ -1,10 +1,10 @@ #!/bin/bash # Let the DB start -python /app/scripts/backend_pre_start.py +python /files/scripts/backend_pre_start.py # Run migrations alembic upgrade head # Create initial data in DB -# python /app/app/initial_data.py +#python /files/scripts/insert_admin_user.py \ No newline at end of file diff --git a/scripts/start.sh b/scripts/start.sh index 72c9c66..a1a3b46 100644 --- a/scripts/start.sh +++ b/scripts/start.sh @@ -1,4 +1,4 @@ #!/bin/bash -/app/scripts/prestart.sh +/files/scripts/prestart.sh /bin/sh -c "uvicorn app.main:app --reload --host 0.0.0.0 --port 8001" \ No newline at end of file From 2338b3e5f301e309263b2fd45269b3f9e5268ba9 Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 11:01:55 +0330 Subject: [PATCH 05/18] feat: assembled first asyncio alchemy query --- app/auth/handlers_scopes.py | 67 +++++++++++++++++++------------------ app/auth/schema.py | 8 +++++ app/db.py | 21 ++++++++++-- app/main.py | 52 ++++++++++++++-------------- 4 files changed, 87 insertions(+), 61 deletions(-) create mode 100644 app/auth/schema.py diff --git a/app/auth/handlers_scopes.py b/app/auth/handlers_scopes.py index 5575174..b03d0d4 100644 --- a/app/auth/handlers_scopes.py +++ b/app/auth/handlers_scopes.py @@ -2,6 +2,7 @@ from datetime import UTC, datetime, timedelta from typing import Annotated +import bcrypt from fastapi import APIRouter, Depends, HTTPException, Security, status from fastapi.security import ( OAuth2PasswordBearer, @@ -9,18 +10,19 @@ SecurityScopes, ) from jose import JWTError, jwt -from passlib.context import CryptContext from pydantic import BaseModel, ValidationError +from sqlalchemy.ext.asyncio import AsyncSession from sqlmodel import Session, select -from ..db import engine -from .models import BaseUser, User +from ..db import engine, get_db_session +from .models import User +from .schema import UserCreate # to get a string like this run: # openssl rand -hex 32 -SECRET_KEY = os.environ.get("SECRET_KEY") -ALGORITHM = os.environ.get("ALGORITHM") -ACCESS_TOKEN_EXPIRE_MINUTES = os.environ.get("ACCESS_TOKEN_EXPIRE_MINUTES") +SECRET_KEY = os.environ.get("SECRET_KEY") or "sample_secret_key_here!!!" +ALGORITHM = os.environ.get("ALGORITHM") or "HS256" +ACCESS_TOKEN_EXPIRE_MINUTES = os.environ.get("ACCESS_TOKEN_EXPIRE_MINUTES") or 30 router = APIRouter() @@ -28,7 +30,6 @@ tokenUrl="token", scopes={"me": "Read information about the current user.", "items": "Read items."}, ) -pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") class Token(BaseModel): @@ -42,11 +43,12 @@ class TokenData(BaseModel): def verify_password(plain_password, hashed_password): - return pwd_context.verify(plain_password, hashed_password) + return bcrypt.checkpw(plain_password.encode('utf-8'), hashed_password) def get_password_hash(password: str): - return pwd_context.hash(password) + salt = bcrypt.gensalt(rounds=14) + return bcrypt.hashpw(password.encode('utf-8'), salt) def get_user(username: str | None): @@ -78,7 +80,7 @@ def create_access_token(data: dict, expires_delta: timedelta | None = None): async def get_current_user( - security_scopes: SecurityScopes, token: Annotated[str, Depends(oauth2_scheme)] + security_scopes: SecurityScopes, token: Annotated[str, Depends(oauth2_scheme)] ): if security_scopes.scopes: authenticate_value = f'Bearer scope="{security_scopes.scope_str}"' @@ -112,7 +114,7 @@ async def get_current_user( async def get_current_active_user( - current_user: Annotated[User, Security(get_current_user, scopes=["me"])], + current_user: Annotated[User, Security(get_current_user, scopes=["me"])], ): if current_user.disabled: raise HTTPException(status_code=400, detail="Inactive user") @@ -121,7 +123,7 @@ async def get_current_active_user( @router.post("/token") async def login_for_access_token( - form_data: Annotated[OAuth2PasswordRequestForm, Depends()], + form_data: Annotated[OAuth2PasswordRequestForm, Depends()], ) -> Token: user = authenticate_user(form_data.username, form_data.password) if not user: @@ -138,27 +140,26 @@ async def login_for_access_token( return Token(access_token=access_token, token_type="bearer") -@router.get("/user", response_model=User) -async def get_current_active_user_from_token( - current_user: Annotated[User, Depends(get_current_active_user)], -): - return current_user +# @router.get("/user", response_model=User) +# async def get_current_active_user_from_token( +# current_user: Annotated[User, Depends(get_current_active_user)], +# ): +# return current_user @router.post("/user") -async def register_user(user: BaseUser): - with Session(engine) as session: - new_user = User( - username=user.username, - full_name=user.full_name, - email=user.email, - password=get_password_hash(user.password), - ) - session.add(new_user) - session.commit() - session.refresh(new_user) - return { - "username": user.username, - "full_name": user.full_name, - "email": user.email, - } +async def register_user(user: UserCreate, session: AsyncSession = Depends(get_db_session)): + new_user = User( + username=user.username, + full_name=user.full_name, + email=user.email, + password=get_password_hash(user.password), + ) + session.add(new_user) + await session.commit() + await session.refresh(new_user) + return { + "username": user.username, + "full_name": user.full_name, + "email": user.email, + } diff --git a/app/auth/schema.py b/app/auth/schema.py new file mode 100644 index 0000000..7e64267 --- /dev/null +++ b/app/auth/schema.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel + + +class UserCreate(BaseModel): + username: str + email: str + full_name: str + password: str diff --git a/app/db.py b/app/db.py index 47b41d0..fd34fcf 100644 --- a/app/db.py +++ b/app/db.py @@ -1,16 +1,33 @@ import os +from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine from sqlalchemy.orm import declarative_base -# from sqlmodel import Session, create_engine # declarative base class Base = declarative_base() # DATABASE_URL = "sqlite+aiosqlite:///database.db" DATABASE_URL = os.environ.get("DATABASE_URL") or "sqlite+aiosqlite:///database.db" -# engine = create_engine(DATABASE_URL, echo=True) +engine = create_async_engine(DATABASE_URL, echo=True) +async_session = async_sessionmaker(engine, expire_on_commit=False) # def get_session(): # with Session(engine) as session: # yield session + + +# def get_session(): +# db = async_session() +# try: +# yield db +# finally: +# db.close() + + +async def get_db_session(): + db = async_session() + try: + yield db + finally: + await db.close() diff --git a/app/main.py b/app/main.py index 54fa40f..f769824 100644 --- a/app/main.py +++ b/app/main.py @@ -3,33 +3,33 @@ from fastapi.responses import HTMLResponse from fastapi.templating import Jinja2Templates -from .auth.handlers import router as auth_router -from .files.handlers import router as files_router -from .live_socket.handlers import router as ws_router -from .nosql.handlers import router as nosql_router -from .songs.handlers import router as song_router +from .auth.handlers_scopes import router as auth_router +# from .files.handlers import router as files_router +# from .live_socket.handlers import router as ws_router +# from .nosql.handlers import router as nosql_router +# from .songs.handlers import router as song_router app = FastAPI() # app.include_router(song_router) -# app.include_router(auth_router) +app.include_router(auth_router) # app.include_router(files_router) # app.include_router(ws_router) # app.include_router(nosql_router) -# -# origins = [ -# "http://localhost.tiangolo.com", -# "https://localhost.tiangolo.com", -# "http://localhost", -# "http://localhost:8080", -# ] -# app.add_middleware( -# CORSMiddleware, -# allow_origins=origins, -# allow_credentials=True, -# allow_methods=["*"], -# allow_headers=["*"], -# ) + +origins = [ + "http://localhost.tiangolo.com", + "https://localhost.tiangolo.com", + "http://localhost", + "http://localhost:8080", +] +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) @app.get("/ping") @@ -37,9 +37,9 @@ async def pong(): return {"ping": "pong!"} -# @app.get("/bootstrap", response_class=HTMLResponse) -# async def read_item(request: Request, user_id: str): -# templates = Jinja2Templates(directory="app/templates") -# return templates.TemplateResponse( -# request=request, name="index.html", context={"id": user_id} -# ) +@app.get("/bootstrap", response_class=HTMLResponse) +async def read_item(request: Request, user_id: str): + templates = Jinja2Templates(directory="app/templates") + return templates.TemplateResponse( + request=request, name="index.html", context={"id": user_id} + ) From b31f1db9721c510cdd012f82acc2800c95f2f320 Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 11:09:38 +0330 Subject: [PATCH 06/18] feat: added UserRead model --- app/auth/handlers_scopes.py | 23 ++++++++++------------- app/auth/models.py | 11 ----------- app/auth/schema.py | 7 +++++++ app/db.py | 17 +---------------- 4 files changed, 18 insertions(+), 40 deletions(-) diff --git a/app/auth/handlers_scopes.py b/app/auth/handlers_scopes.py index b03d0d4..95a75c6 100644 --- a/app/auth/handlers_scopes.py +++ b/app/auth/handlers_scopes.py @@ -16,7 +16,7 @@ from ..db import engine, get_db_session from .models import User -from .schema import UserCreate +from .schema import UserCreate, UserRead # to get a string like this run: # openssl rand -hex 32 @@ -140,15 +140,16 @@ async def login_for_access_token( return Token(access_token=access_token, token_type="bearer") -# @router.get("/user", response_model=User) -# async def get_current_active_user_from_token( -# current_user: Annotated[User, Depends(get_current_active_user)], -# ): -# return current_user +@router.get("/user", response_model=UserRead) +async def get_current_active_user_from_token( + current_user: Annotated[User, Depends(get_current_active_user)], +): + return current_user -@router.post("/user") -async def register_user(user: UserCreate, session: AsyncSession = Depends(get_db_session)): +@router.post("/user", response_model=UserRead) +async def register_user(user: UserCreate, session: AsyncSession = Depends(get_db_session), + ): new_user = User( username=user.username, full_name=user.full_name, @@ -158,8 +159,4 @@ async def register_user(user: UserCreate, session: AsyncSession = Depends(get_db session.add(new_user) await session.commit() await session.refresh(new_user) - return { - "username": user.username, - "full_name": user.full_name, - "email": user.email, - } + return new_user diff --git a/app/auth/models.py b/app/auth/models.py index 99d2e16..47bed2c 100644 --- a/app/auth/models.py +++ b/app/auth/models.py @@ -2,17 +2,6 @@ from ..db import Base -# class BaseUser(SQLModel): -# username: str -# email: str | None = None -# full_name: str | None = None -# password: str -# -# -# class User(BaseUser, table=True): -# id: int = Field(default=None, nullable=False, primary_key=True) -# disabled: bool | None = None - class User(Base): __tablename__ = "users" diff --git a/app/auth/schema.py b/app/auth/schema.py index 7e64267..476530f 100644 --- a/app/auth/schema.py +++ b/app/auth/schema.py @@ -6,3 +6,10 @@ class UserCreate(BaseModel): email: str full_name: str password: str + + +class UserRead(BaseModel): + id: int + username: str + email: str + full_name: str diff --git a/app/db.py b/app/db.py index fd34fcf..006652e 100644 --- a/app/db.py +++ b/app/db.py @@ -3,28 +3,13 @@ from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine from sqlalchemy.orm import declarative_base -# declarative base class Base = declarative_base() -# DATABASE_URL = "sqlite+aiosqlite:///database.db" DATABASE_URL = os.environ.get("DATABASE_URL") or "sqlite+aiosqlite:///database.db" -engine = create_async_engine(DATABASE_URL, echo=True) +engine = create_async_engine(DATABASE_URL) async_session = async_sessionmaker(engine, expire_on_commit=False) -# def get_session(): -# with Session(engine) as session: -# yield session - - -# def get_session(): -# db = async_session() -# try: -# yield db -# finally: -# db.close() - - async def get_db_session(): db = async_session() try: From 047e9960d7110bad065f6527aeb4e2cab71fe3b3 Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 17:02:09 +0330 Subject: [PATCH 07/18] feat: restructured tables using sqlalchemy --- alembic/env.py | 2 +- ...b634132be_init.py => 03faac49a92e_init.py} | 34 +++- app/auth/handlers.py | 99 ++++++---- app/auth/handlers_scopes.py | 162 ----------------- app/auth/{schema.py => schemas.py} | 0 app/main.py | 18 +- app/songs/handlers.py | 172 +++++++++--------- app/songs/models.py | 86 +++------ app/songs/schemas.py | 49 +++++ 9 files changed, 257 insertions(+), 365 deletions(-) rename alembic/versions/{ab5b634132be_init.py => 03faac49a92e_init.py} (51%) delete mode 100644 app/auth/handlers_scopes.py rename app/auth/{schema.py => schemas.py} (100%) create mode 100644 app/songs/schemas.py diff --git a/alembic/env.py b/alembic/env.py index 1547eae..c21ebb3 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -9,7 +9,7 @@ from alembic import context from app.db import Base from app.auth.models import User # New -from app.songs.tables import Song # New +from app.songs.models import Song, City, Tag, SongTag # New # this is the Alembic Config object, which provides # access to the values within the .ini file in use. diff --git a/alembic/versions/ab5b634132be_init.py b/alembic/versions/03faac49a92e_init.py similarity index 51% rename from alembic/versions/ab5b634132be_init.py rename to alembic/versions/03faac49a92e_init.py index 4331ab1..9109926 100644 --- a/alembic/versions/ab5b634132be_init.py +++ b/alembic/versions/03faac49a92e_init.py @@ -1,8 +1,8 @@ """init -Revision ID: ab5b634132be +Revision ID: 03faac49a92e Revises: -Create Date: 2024-03-05 23:51:31.252368 +Create Date: 2024-03-07 17:01:06.441295 """ from typing import Sequence, Union @@ -12,7 +12,7 @@ # revision identifiers, used by Alembic. -revision: str = 'ab5b634132be' +revision: str = '03faac49a92e' down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None @@ -20,11 +20,15 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('songs', + op.create_table('cities', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id') ) + op.create_table('tags', + sa.Column('id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(), nullable=True), @@ -33,11 +37,31 @@ def upgrade() -> None: sa.Column('password', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id') ) + op.create_table('songs', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=True), + sa.Column('artist', sa.String(), nullable=True), + sa.Column('description', sa.String(), nullable=True), + sa.Column('year', sa.String(), nullable=True), + sa.Column('city_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['city_id'], ['cities.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('song_tag', + sa.Column('song_id', sa.Integer(), nullable=False), + sa.Column('tag_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['song_id'], ['songs.id'], ), + sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ), + sa.PrimaryKeyConstraint('song_id', 'tag_id') + ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('users') + op.drop_table('song_tag') op.drop_table('songs') + op.drop_table('users') + op.drop_table('tags') + op.drop_table('cities') # ### end Alembic commands ### diff --git a/app/auth/handlers.py b/app/auth/handlers.py index 6e3f443..95a75c6 100644 --- a/app/auth/handlers.py +++ b/app/auth/handlers.py @@ -2,26 +2,34 @@ from datetime import UTC, datetime, timedelta from typing import Annotated -from fastapi import APIRouter, Depends, HTTPException, status -from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm +import bcrypt +from fastapi import APIRouter, Depends, HTTPException, Security, status +from fastapi.security import ( + OAuth2PasswordBearer, + OAuth2PasswordRequestForm, + SecurityScopes, +) from jose import JWTError, jwt -from passlib.context import CryptContext -from pydantic import BaseModel +from pydantic import BaseModel, ValidationError +from sqlalchemy.ext.asyncio import AsyncSession from sqlmodel import Session, select -from ..db import engine -from .models import BaseUser, User +from ..db import engine, get_db_session +from .models import User +from .schema import UserCreate, UserRead # to get a string like this run: # openssl rand -hex 32 -SECRET_KEY = os.environ.get("SECRET_KEY") -ALGORITHM = os.environ.get("ALGORITHM") -ACCESS_TOKEN_EXPIRE_MINUTES = os.environ.get("ACCESS_TOKEN_EXPIRE_MINUTES") +SECRET_KEY = os.environ.get("SECRET_KEY") or "sample_secret_key_here!!!" +ALGORITHM = os.environ.get("ALGORITHM") or "HS256" +ACCESS_TOKEN_EXPIRE_MINUTES = os.environ.get("ACCESS_TOKEN_EXPIRE_MINUTES") or 30 router = APIRouter() -oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") -pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") +oauth2_scheme = OAuth2PasswordBearer( + tokenUrl="token", + scopes={"me": "Read information about the current user.", "items": "Read items."}, +) class Token(BaseModel): @@ -31,14 +39,16 @@ class Token(BaseModel): class TokenData(BaseModel): username: str | None = None + scopes: list[str] = [] def verify_password(plain_password, hashed_password): - return pwd_context.verify(plain_password, hashed_password) + return bcrypt.checkpw(plain_password.encode('utf-8'), hashed_password) def get_password_hash(password: str): - return pwd_context.hash(password) + salt = bcrypt.gensalt(rounds=14) + return bcrypt.hashpw(password.encode('utf-8'), salt) def get_user(username: str | None): @@ -69,28 +79,42 @@ def create_access_token(data: dict, expires_delta: timedelta | None = None): return encoded_jwt -async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): +async def get_current_user( + security_scopes: SecurityScopes, token: Annotated[str, Depends(oauth2_scheme)] +): + if security_scopes.scopes: + authenticate_value = f'Bearer scope="{security_scopes.scope_str}"' + else: + authenticate_value = "Bearer" credentials_exception = HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Could not validate credentials", - headers={"WWW-Authenticate": "Bearer"}, + headers={"WWW-Authenticate": authenticate_value}, ) try: payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) username = payload.get("sub") if username is None: raise credentials_exception - token_data = TokenData(username=username) - except JWTError as err: + token_scopes = payload.get("scopes", []) + token_data = TokenData(scopes=token_scopes, username=username) + except (JWTError, ValidationError) as err: raise credentials_exception from err user = get_user(username=token_data.username) if user is None: raise credentials_exception + for scope in security_scopes.scopes: + if scope not in token_data.scopes: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Not enough permissions", + headers={"WWW-Authenticate": authenticate_value}, + ) return user async def get_current_active_user( - current_user: Annotated[User, Depends(get_current_user)], + current_user: Annotated[User, Security(get_current_user, scopes=["me"])], ): if current_user.disabled: raise HTTPException(status_code=400, detail="Inactive user") @@ -99,7 +123,7 @@ async def get_current_active_user( @router.post("/token") async def login_for_access_token( - form_data: Annotated[OAuth2PasswordRequestForm, Depends()], + form_data: Annotated[OAuth2PasswordRequestForm, Depends()], ) -> Token: user = authenticate_user(form_data.username, form_data.password) if not user: @@ -110,32 +134,29 @@ async def login_for_access_token( ) access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) access_token = create_access_token( - data={"sub": user.username}, expires_delta=access_token_expires + data={"sub": user.username, "scopes": form_data.scopes}, + expires_delta=access_token_expires, ) return Token(access_token=access_token, token_type="bearer") -@router.get("/user", response_model=User) +@router.get("/user", response_model=UserRead) async def get_current_active_user_from_token( - current_user: Annotated[User, Depends(get_current_active_user)], + current_user: Annotated[User, Depends(get_current_active_user)], ): return current_user -@router.post("/user") -async def register_user(user: BaseUser): - with Session(engine) as session: - new_user = User( - username=user.username, - full_name=user.full_name, - email=user.email, - password=get_password_hash(user.password), - ) - session.add(new_user) - session.commit() - session.refresh(new_user) - return { - "username": user.username, - "full_name": user.full_name, - "email": user.email, - } +@router.post("/user", response_model=UserRead) +async def register_user(user: UserCreate, session: AsyncSession = Depends(get_db_session), + ): + new_user = User( + username=user.username, + full_name=user.full_name, + email=user.email, + password=get_password_hash(user.password), + ) + session.add(new_user) + await session.commit() + await session.refresh(new_user) + return new_user diff --git a/app/auth/handlers_scopes.py b/app/auth/handlers_scopes.py deleted file mode 100644 index 95a75c6..0000000 --- a/app/auth/handlers_scopes.py +++ /dev/null @@ -1,162 +0,0 @@ -import os -from datetime import UTC, datetime, timedelta -from typing import Annotated - -import bcrypt -from fastapi import APIRouter, Depends, HTTPException, Security, status -from fastapi.security import ( - OAuth2PasswordBearer, - OAuth2PasswordRequestForm, - SecurityScopes, -) -from jose import JWTError, jwt -from pydantic import BaseModel, ValidationError -from sqlalchemy.ext.asyncio import AsyncSession -from sqlmodel import Session, select - -from ..db import engine, get_db_session -from .models import User -from .schema import UserCreate, UserRead - -# to get a string like this run: -# openssl rand -hex 32 -SECRET_KEY = os.environ.get("SECRET_KEY") or "sample_secret_key_here!!!" -ALGORITHM = os.environ.get("ALGORITHM") or "HS256" -ACCESS_TOKEN_EXPIRE_MINUTES = os.environ.get("ACCESS_TOKEN_EXPIRE_MINUTES") or 30 - -router = APIRouter() - -oauth2_scheme = OAuth2PasswordBearer( - tokenUrl="token", - scopes={"me": "Read information about the current user.", "items": "Read items."}, -) - - -class Token(BaseModel): - access_token: str - token_type: str - - -class TokenData(BaseModel): - username: str | None = None - scopes: list[str] = [] - - -def verify_password(plain_password, hashed_password): - return bcrypt.checkpw(plain_password.encode('utf-8'), hashed_password) - - -def get_password_hash(password: str): - salt = bcrypt.gensalt(rounds=14) - return bcrypt.hashpw(password.encode('utf-8'), salt) - - -def get_user(username: str | None): - with Session(engine) as session: - # SQLModel.metadata.create_all(engine) - result = session.exec(select(User).where(User.username == username)) - user = result.first() - return user - - -def authenticate_user(username: str, password: str): - user = get_user(username) - if not user: - return False - if not verify_password(password, user.password): - return False - return user - - -def create_access_token(data: dict, expires_delta: timedelta | None = None): - to_encode = data.copy() - if expires_delta: - expire = datetime.now(UTC) + expires_delta - else: - expire = datetime.now(UTC) + timedelta(minutes=15) - to_encode.update({"exp": expire}) - encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) - return encoded_jwt - - -async def get_current_user( - security_scopes: SecurityScopes, token: Annotated[str, Depends(oauth2_scheme)] -): - if security_scopes.scopes: - authenticate_value = f'Bearer scope="{security_scopes.scope_str}"' - else: - authenticate_value = "Bearer" - credentials_exception = HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Could not validate credentials", - headers={"WWW-Authenticate": authenticate_value}, - ) - try: - payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) - username = payload.get("sub") - if username is None: - raise credentials_exception - token_scopes = payload.get("scopes", []) - token_data = TokenData(scopes=token_scopes, username=username) - except (JWTError, ValidationError) as err: - raise credentials_exception from err - user = get_user(username=token_data.username) - if user is None: - raise credentials_exception - for scope in security_scopes.scopes: - if scope not in token_data.scopes: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Not enough permissions", - headers={"WWW-Authenticate": authenticate_value}, - ) - return user - - -async def get_current_active_user( - current_user: Annotated[User, Security(get_current_user, scopes=["me"])], -): - if current_user.disabled: - raise HTTPException(status_code=400, detail="Inactive user") - return current_user - - -@router.post("/token") -async def login_for_access_token( - form_data: Annotated[OAuth2PasswordRequestForm, Depends()], -) -> Token: - user = authenticate_user(form_data.username, form_data.password) - if not user: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Incorrect username or password", - headers={"WWW-Authenticate": "Bearer"}, - ) - access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) - access_token = create_access_token( - data={"sub": user.username, "scopes": form_data.scopes}, - expires_delta=access_token_expires, - ) - return Token(access_token=access_token, token_type="bearer") - - -@router.get("/user", response_model=UserRead) -async def get_current_active_user_from_token( - current_user: Annotated[User, Depends(get_current_active_user)], -): - return current_user - - -@router.post("/user", response_model=UserRead) -async def register_user(user: UserCreate, session: AsyncSession = Depends(get_db_session), - ): - new_user = User( - username=user.username, - full_name=user.full_name, - email=user.email, - password=get_password_hash(user.password), - ) - session.add(new_user) - await session.commit() - await session.refresh(new_user) - return new_user diff --git a/app/auth/schema.py b/app/auth/schemas.py similarity index 100% rename from app/auth/schema.py rename to app/auth/schemas.py diff --git a/app/main.py b/app/main.py index f769824..2337f7f 100644 --- a/app/main.py +++ b/app/main.py @@ -3,19 +3,19 @@ from fastapi.responses import HTMLResponse from fastapi.templating import Jinja2Templates -from .auth.handlers_scopes import router as auth_router -# from .files.handlers import router as files_router -# from .live_socket.handlers import router as ws_router -# from .nosql.handlers import router as nosql_router -# from .songs.handlers import router as song_router +from .auth.handlers import router as auth_router +from .files.handlers import router as files_router +from .live_socket.handlers import router as ws_router +from .nosql.handlers import router as nosql_router +from .songs.handlers import router as song_router app = FastAPI() -# app.include_router(song_router) +app.include_router(song_router) app.include_router(auth_router) -# app.include_router(files_router) -# app.include_router(ws_router) -# app.include_router(nosql_router) +app.include_router(files_router) +app.include_router(ws_router) +app.include_router(nosql_router) origins = [ "http://localhost.tiangolo.com", diff --git a/app/songs/handlers.py b/app/songs/handlers.py index a08365b..5502f43 100644 --- a/app/songs/handlers.py +++ b/app/songs/handlers.py @@ -1,103 +1,97 @@ -from typing import Annotated - from fastapi import APIRouter, Depends from fastapi.security import OAuth2PasswordBearer -from sqlmodel import Session, select +from sqlalchemy.ext.asyncio import AsyncSession -from ..auth.handlers import get_current_active_user -from ..auth.models import User -from ..db import get_session -from ..redis import r -from .models import City, CityCreate, Song, SongCreate, SongRead, Tag, TagCreate +from ..db import get_db_session +from .models import City +from .schemas import CityCreate, CityRead router = APIRouter() oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") -@router.get("/songs", response_model=list[SongRead]) -async def get_songs( - current_user: Annotated[User, Depends(get_current_active_user)], - session: Session = Depends(get_session), -): - songs = session.exec(select(Song)).all() - return songs - # songs = session.exec(select(Song, City.title).join(City, isouter=True)).all() - # current_user: Annotated[User, Depends(get_current_active_user)], - # return [ - # {"name": song[0].name, - # "artist": song[0].artist, - # "year": song[0].year, - # "id": song[0].id, - # "description": song[0].description, - # "city": song[1]} - # for song in songs - # ] - - -@router.post("/songs") -async def add_song(song: SongCreate, session: Session = Depends(get_session)): - new_song = Song( - name=song.name, - artist=song.artist, - year=song.year, - description=song.description, - ) - session.add(new_song) - session.commit() - session.refresh(new_song) - return new_song - - -@router.post("/city", response_model=City) -def create_city(*, session: Session = Depends(get_session), city: CityCreate): - new_city = City.model_validate(city) +# @router.get("/songs", response_model=list[SongRead]) +# async def get_songs( +# current_user: Annotated[User, Depends(get_current_active_user)], +# session: AsyncSession = Depends(get_db_session)): +# songs = session.exec(select(Song)).all() +# return songs +# songs = session.exec(select(Song, City.title).join(City, isouter=True)).all() +# current_user: Annotated[User, Depends(get_current_active_user)], +# return [ +# {"name": song[0].name, +# "artist": song[0].artist, +# "year": song[0].year, +# "id": song[0].id, +# "description": song[0].description, +# "city": song[1]} +# for song in songs +# ] + + +# @router.post("/songs") +# async def add_song(song: SongCreate, session: AsyncSession = Depends(get_db_session)): +# new_song = Song( +# name=song.name, +# artist=song.artist, +# year=song.year, +# description=song.description, +# ) +# session.add(new_song) +# await session.commit() +# await session.refresh(new_song) +# return new_song + + +@router.post("/city", response_model=CityRead) +def create_city(*, session: AsyncSession = Depends(get_db_session), city: CityCreate): + new_city = City(**city.dict()) session.add(new_city) session.commit() session.refresh(new_city) return new_city - -@router.post("/connect_city_with_song", response_model=Song) -def connect_city_with_song( - city_title: str, song_title: str, session: Session = Depends(get_session) -): - city_in_db = session.exec(select(City).where(City.title == city_title)).first() - song = session.exec(select(Song).where(Song.name.like(f"%{song_title}%"))).first() - song.city = city_in_db - session.add(song) - session.commit() - session.refresh(song) - return song - - -@router.post("/tags", response_model=Tag) -def create_tag(*, session: Session = Depends(get_session), tag: TagCreate): - new_tag = Tag.model_validate(tag) - session.add(new_tag) - session.commit() - session.refresh(new_tag) - return new_tag - - -@router.post("/attach-tags") -def attach_tag_to_song( - tag_title: str, song_name: str, session: Session = Depends(get_session) -): - tag_in_db = session.exec( - select(Tag).where(Tag.title.like(f"%{tag_title}%")) - ).first() - song_in_db = session.exec( - select(Song).where(Song.name.like(f"%{song_name}%")) - ).first() - song_in_db.tags.append(tag_in_db) - session.add(song_in_db) - session.commit() - session.refresh(song_in_db) - return {"done": True} - - -@router.post("/redis") -def save_in_redis(key: str, value: str): - r.set(key, value) - return {"done": True} +# @router.post("/connect_city_with_song", response_model=Song) +# def connect_city_with_song( +# city_title: str, song_title: str, session: AsyncSession = Depends(get_db_session) +# ): +# city_in_db = session.exec(select(City).where(City.title == city_title)).first() +# song = session.exec(select(Song).where(Song.name.like(f"%{song_title}%"))).first() +# song.city = city_in_db +# session.add(song) +# session.commit() +# session.refresh(song) +# return song +# +# +# @router.post("/tags", response_model=Tag) +# def create_tag(*, session: AsyncSession = Depends(get_db_session), tag: TagCreate): +# new_tag = Tag.model_validate(tag) +# session.add(new_tag) +# session.commit() +# session.refresh(new_tag) +# return new_tag +# +# +# @router.post("/attach-tags") +# def attach_tag_to_song( +# tag_title: str, song_name: str, session: AsyncSession = Depends(get_db_session) +# ): +# tag_in_db = session.exec( +# select(Tag).where(Tag.title.like(f"%{tag_title}%")) +# ).first() +# song_in_db = session.exec( +# select(Song).where(Song.name.like(f"%{song_name}%")) +# ).first() +# song_in_db.tags.append(tag_in_db) +# session.add(song_in_db) +# session.commit() +# session.refresh(song_in_db) +# return {"done": True} +# +# +# @router.post("/redis") +# def save_in_redis(key: str, value: str): +# r.set(key, value) +# return {"done": True} diff --git a/app/songs/models.py b/app/songs/models.py index dedf641..a3fd97e 100644 --- a/app/songs/models.py +++ b/app/songs/models.py @@ -1,75 +1,41 @@ -from typing import Optional +from sqlalchemy import Column, ForeignKey, Integer, String +from sqlalchemy.orm import Mapped, mapped_column, relationship -from pydantic import field_validator -from sqlmodel import Field, Relationship, SQLModel +from ..db import Base -class SongTag(SQLModel, table=True): - song_id: int | None = Field(default=None, foreign_key="song.id", primary_key=True) - tag_id: int | None = Field(default=None, foreign_key="tag.id", primary_key=True) +class SongTag(Base): + __tablename__ = "song_tag" + song_id: Mapped[int] = mapped_column(ForeignKey("songs.id"), primary_key=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tags.id"), primary_key=True) -class SongBase(SQLModel): - name: str - artist: str - description: str | None = None - year: int | None = None - city_id: int | None = Field(default=None, foreign_key="city.id") - @field_validator("year") - @classmethod - def validate_year(cls, value): - if value is not None and value < 1900: - raise ValueError("Year must be 1900 or later") - return value +class Song(Base): + __tablename__ = "songs" - @field_validator("description") - @classmethod - def validate_description(cls, value): - if value is not None and len(value) < 5: - raise ValueError("Description must be at least 5 characters long") - return value + id: Mapped[int] = Column(Integer, primary_key=True) + name: Mapped[str] = Column(String) + artist: Mapped[str] = Column(String) + description: Mapped[str] = Column(String) + year: Mapped[str] = Column(String) + city_id: Mapped[int] = mapped_column(ForeignKey("cities.id")) + city: Mapped["City"] = relationship(back_populates="songs") + tags: Mapped[list["Tag"]] = relationship(back_populates="songs", secondary=SongTag) -class SongCreate(SongBase): - pass +class Tag(Base): + __tablename__ = "tags" -class Song(SongBase, table=True): - id: int = Field(default=None, nullable=False, primary_key=True) + id: Mapped[int] = Column(Integer, primary_key=True) + songs: Mapped[list["Song"]] = relationship(back_populates="tags", secondary=SongTag) - city: Optional["City"] = Relationship(back_populates="songs") - tags: list["Tag"] = Relationship(back_populates="songs", link_model=SongTag) +class City(Base): + __tablename__ = "cities" -class SongRead(SQLModel): - id: int - name: str - artist: str - description: str | None = None - year: int | None = None - city: Optional["City"] - tags: list["Tag"] - - -class TagCreate(SQLModel): - title: str - description: str - - -class Tag(TagCreate, table=True): - id: int = Field(default=None, nullable=False, primary_key=True) - - songs: list["Song"] = Relationship(back_populates="tags", link_model=SongTag) - - -class CityCreate(SQLModel): - title: str - desc: str - - -class City(CityCreate, table=True): - id: int = Field(default=None, nullable=False, primary_key=True) - - songs: list["Song"] = Relationship(back_populates="city") + id: Mapped[int] = Column(Integer, primary_key=True) + name: Mapped[str] = Column(String) + songs: Mapped[list["Song"]] = relationship(back_populates="city") diff --git a/app/songs/schemas.py b/app/songs/schemas.py new file mode 100644 index 0000000..2b6ae0b --- /dev/null +++ b/app/songs/schemas.py @@ -0,0 +1,49 @@ +from typing import Optional + +from pydantic import BaseModel, field_validator + + +class SongRead(BaseModel): + id: int + name: str + artist: str + description: str | None = None + year: int | None = None + city: Optional["City"] + tags: list["Tag"] + + +class SongBase(BaseModel): + name: str + artist: str + description: str | None = None + year: int | None = None + city_id: int | None = Field(default=None, foreign_key="city.id") + + @field_validator("year") + @classmethod + def validate_year(cls, value): + if value is not None and value < 1900: + raise ValueError("Year must be 1900 or later") + return value + + @field_validator("description") + @classmethod + def validate_description(cls, value): + if value is not None and len(value) < 5: + raise ValueError("Description must be at least 5 characters long") + return value + + +class CityCreate(BaseModel): + title: str + desc: str + + +class CityRead(CityCreate): + id: int + + +class TagCreate(BaseModel): + title: str + description: str From 0f8727e56659a325aba1e4281a783df895c3e41f Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 18:41:57 +0330 Subject: [PATCH 08/18] feat: restructured tag creations --- ...aac49a92e_init.py => a8599ff7d4b9_init.py} | 8 +++-- app/auth/handlers.py | 2 +- app/songs/handlers.py | 29 ++++++++++--------- app/songs/models.py | 13 +++++---- app/songs/schemas.py | 13 +++++---- 5 files changed, 38 insertions(+), 27 deletions(-) rename alembic/versions/{03faac49a92e_init.py => a8599ff7d4b9_init.py} (90%) diff --git a/alembic/versions/03faac49a92e_init.py b/alembic/versions/a8599ff7d4b9_init.py similarity index 90% rename from alembic/versions/03faac49a92e_init.py rename to alembic/versions/a8599ff7d4b9_init.py index 9109926..2a22e7c 100644 --- a/alembic/versions/03faac49a92e_init.py +++ b/alembic/versions/a8599ff7d4b9_init.py @@ -1,8 +1,8 @@ """init -Revision ID: 03faac49a92e +Revision ID: a8599ff7d4b9 Revises: -Create Date: 2024-03-07 17:01:06.441295 +Create Date: 2024-03-07 18:33:35.591426 """ from typing import Sequence, Union @@ -12,7 +12,7 @@ # revision identifiers, used by Alembic. -revision: str = '03faac49a92e' +revision: str = 'a8599ff7d4b9' down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None @@ -27,6 +27,8 @@ def upgrade() -> None: ) op.create_table('tags', sa.Column('id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(), nullable=True), + sa.Column('description', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('users', diff --git a/app/auth/handlers.py b/app/auth/handlers.py index 95a75c6..9ed0db3 100644 --- a/app/auth/handlers.py +++ b/app/auth/handlers.py @@ -16,7 +16,7 @@ from ..db import engine, get_db_session from .models import User -from .schema import UserCreate, UserRead +from .schemas import UserCreate, UserRead # to get a string like this run: # openssl rand -hex 32 diff --git a/app/songs/handlers.py b/app/songs/handlers.py index 5502f43..3a899cc 100644 --- a/app/songs/handlers.py +++ b/app/songs/handlers.py @@ -3,8 +3,8 @@ from sqlalchemy.ext.asyncio import AsyncSession from ..db import get_db_session -from .models import City -from .schemas import CityCreate, CityRead +from .models import City, Tag +from .schemas import CityCreate, CityRead, TagCreate, TagRead router = APIRouter() @@ -45,13 +45,15 @@ @router.post("/city", response_model=CityRead) -def create_city(*, session: AsyncSession = Depends(get_db_session), city: CityCreate): - new_city = City(**city.dict()) +async def create_city(*, session: AsyncSession = Depends(get_db_session), city: CityCreate): + # new_city = City(**city.dict()) + new_city = City(name=city.name) session.add(new_city) - session.commit() - session.refresh(new_city) + await session.commit() + await session.refresh(new_city) return new_city + # @router.post("/connect_city_with_song", response_model=Song) # def connect_city_with_song( # city_title: str, song_title: str, session: AsyncSession = Depends(get_db_session) @@ -65,13 +67,14 @@ def create_city(*, session: AsyncSession = Depends(get_db_session), city: CityCr # return song # # -# @router.post("/tags", response_model=Tag) -# def create_tag(*, session: AsyncSession = Depends(get_db_session), tag: TagCreate): -# new_tag = Tag.model_validate(tag) -# session.add(new_tag) -# session.commit() -# session.refresh(new_tag) -# return new_tag +@router.post("/tags", response_model=TagRead) +async def create_tag(*, session: AsyncSession = Depends(get_db_session), tag: TagCreate): + input_tag = TagCreate.model_validate(tag) + new_tag = Tag(**input_tag.dict()) + session.add(new_tag) + await session.commit() + await session.refresh(new_tag) + return new_tag # # # @router.post("/attach-tags") diff --git a/app/songs/models.py b/app/songs/models.py index a3fd97e..60e8bef 100644 --- a/app/songs/models.py +++ b/app/songs/models.py @@ -21,15 +21,18 @@ class Song(Base): year: Mapped[str] = Column(String) city_id: Mapped[int] = mapped_column(ForeignKey("cities.id")) - city: Mapped["City"] = relationship(back_populates="songs") - tags: Mapped[list["Tag"]] = relationship(back_populates="songs", secondary=SongTag) + city: Mapped["City"] = relationship("City", back_populates="songs") + tags: Mapped[list["Tag"]] = relationship("Tag", back_populates="songs", secondary=SongTag.__table__) class Tag(Base): __tablename__ = "tags" - id: Mapped[int] = Column(Integer, primary_key=True) - songs: Mapped[list["Song"]] = relationship(back_populates="tags", secondary=SongTag) + + title: Mapped[str] = Column(String) + description: Mapped[str] = Column(String) + + songs: Mapped[list["Song"]] = relationship(back_populates="tags", secondary=SongTag.__table__) class City(Base): @@ -38,4 +41,4 @@ class City(Base): id: Mapped[int] = Column(Integer, primary_key=True) name: Mapped[str] = Column(String) - songs: Mapped[list["Song"]] = relationship(back_populates="city") + songs: Mapped[list["Song"]] = relationship("Song", back_populates="city") diff --git a/app/songs/schemas.py b/app/songs/schemas.py index 2b6ae0b..7d9bc0b 100644 --- a/app/songs/schemas.py +++ b/app/songs/schemas.py @@ -16,9 +16,9 @@ class SongRead(BaseModel): class SongBase(BaseModel): name: str artist: str - description: str | None = None - year: int | None = None - city_id: int | None = Field(default=None, foreign_key="city.id") + description: str | None + year: int | None + city_id: int | None @field_validator("year") @classmethod @@ -36,8 +36,7 @@ def validate_description(cls, value): class CityCreate(BaseModel): - title: str - desc: str + name: str class CityRead(CityCreate): @@ -47,3 +46,7 @@ class CityRead(CityCreate): class TagCreate(BaseModel): title: str description: str + + +class TagRead(TagCreate): + id: int From 07c6735ee76e5643bf29f4b83903bbd32e9e04b9 Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 18:58:02 +0330 Subject: [PATCH 09/18] feat: restructured city creations --- alembic/versions/a8599ff7d4b9_init.py | 69 --------------------------- app/songs/handlers.py | 37 +++++++------- app/songs/models.py | 4 +- app/songs/schemas.py | 23 +++++---- 4 files changed, 34 insertions(+), 99 deletions(-) delete mode 100644 alembic/versions/a8599ff7d4b9_init.py diff --git a/alembic/versions/a8599ff7d4b9_init.py b/alembic/versions/a8599ff7d4b9_init.py deleted file mode 100644 index 2a22e7c..0000000 --- a/alembic/versions/a8599ff7d4b9_init.py +++ /dev/null @@ -1,69 +0,0 @@ -"""init - -Revision ID: a8599ff7d4b9 -Revises: -Create Date: 2024-03-07 18:33:35.591426 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = 'a8599ff7d4b9' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('cities', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('tags', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('title', sa.String(), nullable=True), - sa.Column('description', sa.String(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('users', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('username', sa.String(), nullable=True), - sa.Column('email', sa.String(), nullable=True), - sa.Column('full_name', sa.String(), nullable=True), - sa.Column('password', sa.String(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('songs', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=True), - sa.Column('artist', sa.String(), nullable=True), - sa.Column('description', sa.String(), nullable=True), - sa.Column('year', sa.String(), nullable=True), - sa.Column('city_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['city_id'], ['cities.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('song_tag', - sa.Column('song_id', sa.Integer(), nullable=False), - sa.Column('tag_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['song_id'], ['songs.id'], ), - sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ), - sa.PrimaryKeyConstraint('song_id', 'tag_id') - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('song_tag') - op.drop_table('songs') - op.drop_table('users') - op.drop_table('tags') - op.drop_table('cities') - # ### end Alembic commands ### diff --git a/app/songs/handlers.py b/app/songs/handlers.py index 3a899cc..11e7ad4 100644 --- a/app/songs/handlers.py +++ b/app/songs/handlers.py @@ -1,10 +1,15 @@ +from typing import Annotated + from fastapi import APIRouter, Depends from fastapi.security import OAuth2PasswordBearer +from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from ..auth.handlers import get_current_active_user +from ..auth.models import User from ..db import get_db_session -from .models import City, Tag -from .schemas import CityCreate, CityRead, TagCreate, TagRead +from .models import City, Song, Tag +from .schemas import CityCreate, CityRead, SongRead, TagCreate, TagRead, SongCreate router = APIRouter() @@ -15,8 +20,10 @@ # async def get_songs( # current_user: Annotated[User, Depends(get_current_active_user)], # session: AsyncSession = Depends(get_db_session)): -# songs = session.exec(select(Song)).all() +# songs = await session.execute(select(Song)) # return songs + + # songs = session.exec(select(Song, City.title).join(City, isouter=True)).all() # current_user: Annotated[User, Depends(get_current_active_user)], # return [ @@ -29,19 +36,17 @@ # for song in songs # ] - -# @router.post("/songs") -# async def add_song(song: SongCreate, session: AsyncSession = Depends(get_db_session)): -# new_song = Song( -# name=song.name, -# artist=song.artist, -# year=song.year, -# description=song.description, -# ) -# session.add(new_song) -# await session.commit() -# await session.refresh(new_song) -# return new_song +@router.post("/songs") +async def add_song(song: SongCreate, session: AsyncSession = Depends(get_db_session)): + new_song = Song( + name=song.name, + artist=song.artist, + year=song.year, + description=song.description, + ) + session.add(new_song) + await session.commit() + return new_song @router.post("/city", response_model=CityRead) diff --git a/app/songs/models.py b/app/songs/models.py index 60e8bef..e2f4ce8 100644 --- a/app/songs/models.py +++ b/app/songs/models.py @@ -18,8 +18,8 @@ class Song(Base): name: Mapped[str] = Column(String) artist: Mapped[str] = Column(String) description: Mapped[str] = Column(String) - year: Mapped[str] = Column(String) - city_id: Mapped[int] = mapped_column(ForeignKey("cities.id")) + year: Mapped[int] = Column(Integer) + city_id: Mapped[int | None] = mapped_column(ForeignKey("cities.id")) city: Mapped["City"] = relationship("City", back_populates="songs") tags: Mapped[list["Tag"]] = relationship("Tag", back_populates="songs", secondary=SongTag.__table__) diff --git a/app/songs/schemas.py b/app/songs/schemas.py index 7d9bc0b..6842751 100644 --- a/app/songs/schemas.py +++ b/app/songs/schemas.py @@ -3,22 +3,11 @@ from pydantic import BaseModel, field_validator -class SongRead(BaseModel): - id: int +class SongCreate(BaseModel): name: str artist: str description: str | None = None year: int | None = None - city: Optional["City"] - tags: list["Tag"] - - -class SongBase(BaseModel): - name: str - artist: str - description: str | None - year: int | None - city_id: int | None @field_validator("year") @classmethod @@ -35,6 +24,16 @@ def validate_description(cls, value): return value +class SongRead(BaseModel): + id: int + name: str + artist: str + description: str | None = None + year: int | None = None + city: Optional["City"] + tags: list["Tag"] + + class CityCreate(BaseModel): name: str From 02d491d9545afdff311c403bfd65144e808905e4 Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 19:16:25 +0330 Subject: [PATCH 10/18] feat: restructured get cities query --- alembic/versions/180cc3782c77_init.py | 69 +++++++++++++++++++++++++++ app/songs/handlers.py | 13 ++--- app/songs/schemas.py | 25 +++++----- 3 files changed, 89 insertions(+), 18 deletions(-) create mode 100644 alembic/versions/180cc3782c77_init.py diff --git a/alembic/versions/180cc3782c77_init.py b/alembic/versions/180cc3782c77_init.py new file mode 100644 index 0000000..2f31ded --- /dev/null +++ b/alembic/versions/180cc3782c77_init.py @@ -0,0 +1,69 @@ +"""init + +Revision ID: 180cc3782c77 +Revises: +Create Date: 2024-03-07 18:57:29.839982 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '180cc3782c77' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('cities', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('tags', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(), nullable=True), + sa.Column('description', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('users', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('username', sa.String(), nullable=True), + sa.Column('email', sa.String(), nullable=True), + sa.Column('full_name', sa.String(), nullable=True), + sa.Column('password', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('songs', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=True), + sa.Column('artist', sa.String(), nullable=True), + sa.Column('description', sa.String(), nullable=True), + sa.Column('year', sa.Integer(), nullable=True), + sa.Column('city_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['city_id'], ['cities.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('song_tag', + sa.Column('song_id', sa.Integer(), nullable=False), + sa.Column('tag_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['song_id'], ['songs.id'], ), + sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ), + sa.PrimaryKeyConstraint('song_id', 'tag_id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('song_tag') + op.drop_table('songs') + op.drop_table('users') + op.drop_table('tags') + op.drop_table('cities') + # ### end Alembic commands ### diff --git a/app/songs/handlers.py b/app/songs/handlers.py index 11e7ad4..47bdccb 100644 --- a/app/songs/handlers.py +++ b/app/songs/handlers.py @@ -4,6 +4,7 @@ from fastapi.security import OAuth2PasswordBearer from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload from ..auth.handlers import get_current_active_user from ..auth.models import User @@ -16,12 +17,12 @@ oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") -# @router.get("/songs", response_model=list[SongRead]) -# async def get_songs( -# current_user: Annotated[User, Depends(get_current_active_user)], -# session: AsyncSession = Depends(get_db_session)): -# songs = await session.execute(select(Song)) -# return songs +@router.get("/songs", response_model=list[SongRead]) +async def get_songs( + session: AsyncSession = Depends(get_db_session)): + result = await session.execute(select(Song).options(selectinload(Song.tags))) + songs = result.scalars().all() + return songs # songs = session.exec(select(Song, City.title).join(City, isouter=True)).all() diff --git a/app/songs/schemas.py b/app/songs/schemas.py index 6842751..f919816 100644 --- a/app/songs/schemas.py +++ b/app/songs/schemas.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, field_validator @@ -24,16 +22,6 @@ def validate_description(cls, value): return value -class SongRead(BaseModel): - id: int - name: str - artist: str - description: str | None = None - year: int | None = None - city: Optional["City"] - tags: list["Tag"] - - class CityCreate(BaseModel): name: str @@ -49,3 +37,16 @@ class TagCreate(BaseModel): class TagRead(TagCreate): id: int + + +class SongRead(BaseModel): + id: int + name: str + artist: str + description: str | None = None + year: int | None = None + city: CityRead | None + tags: list[TagRead] + + # class Config: + # from_attributes = True From 65a42c4243a371f7397f34f66ca0c657e78d5d95 Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 20:07:50 +0330 Subject: [PATCH 11/18] feat: added many to many adding boilerplate --- app/songs/handlers.py | 41 +++++++++++++++++++---------------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/app/songs/handlers.py b/app/songs/handlers.py index 47bdccb..d27a509 100644 --- a/app/songs/handlers.py +++ b/app/songs/handlers.py @@ -1,16 +1,12 @@ -from typing import Annotated - from fastapi import APIRouter, Depends from fastapi.security import OAuth2PasswordBearer from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import selectinload -from ..auth.handlers import get_current_active_user -from ..auth.models import User from ..db import get_db_session from .models import City, Song, Tag -from .schemas import CityCreate, CityRead, SongRead, TagCreate, TagRead, SongCreate +from .schemas import CityCreate, CityRead, SongCreate, SongRead, TagCreate, TagRead router = APIRouter() @@ -81,23 +77,24 @@ async def create_tag(*, session: AsyncSession = Depends(get_db_session), tag: Ta await session.commit() await session.refresh(new_tag) return new_tag -# -# -# @router.post("/attach-tags") -# def attach_tag_to_song( -# tag_title: str, song_name: str, session: AsyncSession = Depends(get_db_session) -# ): -# tag_in_db = session.exec( -# select(Tag).where(Tag.title.like(f"%{tag_title}%")) -# ).first() -# song_in_db = session.exec( -# select(Song).where(Song.name.like(f"%{song_name}%")) -# ).first() -# song_in_db.tags.append(tag_in_db) -# session.add(song_in_db) -# session.commit() -# session.refresh(song_in_db) -# return {"done": True} + + +@router.post("/attach-tags") +async def attach_tag_to_song( + tag_title: str, song_name: str, session: AsyncSession = Depends(get_db_session) +): + tag_in_db = await session.scalars( + select(Tag).where(Tag.title.like(f"%{tag_title}%")) + ) + tag = tag_in_db.first() + song_in_db = await session.scalars( + select(Song).options(selectinload(Song.tags)).where(Song.name.like(f"%{song_name}%"))) + song = song_in_db.first() + song.tags.append(tag) + session.add(tag) + await session.commit() + await session.refresh(song) + return {"done": True} # # # @router.post("/redis") From 69e1f4ad08de2aa9ec120ea0f03e13438de9eaf4 Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 20:22:15 +0330 Subject: [PATCH 12/18] fix: added inner join for city --- .gitignore | 3 ++- app/songs/handlers.py | 32 +++++++++++++++++--------------- 2 files changed, 19 insertions(+), 16 deletions(-) diff --git a/.gitignore b/.gitignore index 59ea3c0..69f0893 100644 --- a/.gitignore +++ b/.gitignore @@ -43,4 +43,5 @@ logs* .vscode/ celerybeat-schedule.bak celerybeat-schedule.dat -celerybeat-schedule.dir \ No newline at end of file +celerybeat-schedule.dir +.database.db \ No newline at end of file diff --git a/app/songs/handlers.py b/app/songs/handlers.py index d27a509..0d73f5c 100644 --- a/app/songs/handlers.py +++ b/app/songs/handlers.py @@ -16,8 +16,8 @@ @router.get("/songs", response_model=list[SongRead]) async def get_songs( session: AsyncSession = Depends(get_db_session)): - result = await session.execute(select(Song).options(selectinload(Song.tags))) - songs = result.scalars().all() + result = await session.scalars(select(Song).options(selectinload(Song.tags), selectinload(Song.city))) + songs = result.all() return songs @@ -56,19 +56,21 @@ async def create_city(*, session: AsyncSession = Depends(get_db_session), city: return new_city -# @router.post("/connect_city_with_song", response_model=Song) -# def connect_city_with_song( -# city_title: str, song_title: str, session: AsyncSession = Depends(get_db_session) -# ): -# city_in_db = session.exec(select(City).where(City.title == city_title)).first() -# song = session.exec(select(Song).where(Song.name.like(f"%{song_title}%"))).first() -# song.city = city_in_db -# session.add(song) -# session.commit() -# session.refresh(song) -# return song -# -# +@router.post("/connect_city_with_song") +async def connect_city_with_song( + city_title: str, song_title: str, session: AsyncSession = Depends(get_db_session) +): + city_in_db = await session.scalars(select(City).where(City.name == city_title)) + city = city_in_db.first() + song_in_db = await session.scalars(select(Song).where(Song.name.like(f"%{song_title}%"))) + song = song_in_db.first() + song.city = city + session.add(song) + await session.commit() + await session.refresh(song) + return {"done": True} + + @router.post("/tags", response_model=TagRead) async def create_tag(*, session: AsyncSession = Depends(get_db_session), tag: TagCreate): input_tag = TagCreate.model_validate(tag) From 5db73a40a9d7f7262e0da18a25c59230456c7b59 Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 23:26:42 +0330 Subject: [PATCH 13/18] feat: first asyncio test --- app/conftest.py | 35 +++++++++++++++++++++++------------ app/songs/handlers.py | 24 ++++++------------------ app/songs/test_handlers.py | 30 +++++++++++++++++++----------- poetry.lock | 36 +++++++++++++++++++++++++++--------- pyproject.toml | 1 + 5 files changed, 76 insertions(+), 50 deletions(-) diff --git a/app/conftest.py b/app/conftest.py index 2b72f3a..1e46a17 100644 --- a/app/conftest.py +++ b/app/conftest.py @@ -1,28 +1,39 @@ import pytest -from fastapi.testclient import TestClient -from sqlmodel import Session, SQLModel, create_engine +from httpx import AsyncClient +from sqlalchemy import MetaData +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine -from .db import get_session +from .db import Base, get_db_session from .main import app @pytest.fixture(name="session") -def session_fixture(): - engine = create_engine( - "sqlite:///testing.db", connect_args={"check_same_thread": False} +async def session_fixture(): + engine = create_async_engine( + "sqlite+aiosqlite:///database.db", connect_args={"check_same_thread": False} ) - SQLModel.metadata.create_all(engine) - with Session(engine) as session: + async_session = async_sessionmaker(engine, expire_on_commit=False) + # meta = MetaData() + # async with engine.begin() as conn: + # await conn.run_sync(Base.meta.create_all) + session = async_session() + try: yield session + finally: + await session.close() @pytest.fixture(name="client") -def client_fixture(session: Session): +async def client_fixture(session: AsyncSession): def get_session_override(): return session - app.dependency_overrides[get_session] = get_session_override + app.dependency_overrides[get_db_session] = get_session_override + + async_client = AsyncClient(app=app, base_url="http://127.0.0.1:8000") + try: + yield async_client + finally: + await async_client.aclose() # Close the AsyncClient after the test - client = TestClient(app) - yield client app.dependency_overrides.clear() diff --git a/app/songs/handlers.py b/app/songs/handlers.py index 0d73f5c..29267db 100644 --- a/app/songs/handlers.py +++ b/app/songs/handlers.py @@ -21,18 +21,6 @@ async def get_songs( return songs -# songs = session.exec(select(Song, City.title).join(City, isouter=True)).all() -# current_user: Annotated[User, Depends(get_current_active_user)], -# return [ -# {"name": song[0].name, -# "artist": song[0].artist, -# "year": song[0].year, -# "id": song[0].id, -# "description": song[0].description, -# "city": song[1]} -# for song in songs -# ] - @router.post("/songs") async def add_song(song: SongCreate, session: AsyncSession = Depends(get_db_session)): new_song = Song( @@ -97,9 +85,9 @@ async def attach_tag_to_song( await session.commit() await session.refresh(song) return {"done": True} -# -# -# @router.post("/redis") -# def save_in_redis(key: str, value: str): -# r.set(key, value) -# return {"done": True} + + +@router.post("/redis") +def save_in_redis(key: str, value: str): + r.set(key, value) + return {"done": True} diff --git a/app/songs/test_handlers.py b/app/songs/test_handlers.py index 0f81b8b..e38a5de 100644 --- a/app/songs/test_handlers.py +++ b/app/songs/test_handlers.py @@ -1,11 +1,19 @@ -def test_add_song(client): - response = client.post( - "/songs", - json={"name": "Alen", "artist": "test", "year": 1960}, - ) - assert response.status_code == 200, response.text - data = response.json() - assert data["name"] == "Alen" - assert data["artist"] == "test" - assert data["year"] == 1960 - assert "id" in data +import pytest +from httpx import AsyncClient + +from ..main import app + + +@pytest.mark.asyncio +async def test_add_song(): + async with AsyncClient(app=app, base_url="http://127.0.0.1:8000") as ac: + response = await ac.post( + "/songs", + json={"name": "Alen", "artist": "test", "year": 1960}, + ) + assert response.status_code == 200, response.text + data = response.json() + assert data["name"] == "Alen" + assert data["artist"] == "test" + assert data["year"] == 1960 + assert "id" in data diff --git a/poetry.lock b/poetry.lock index b34ee97..28efa4d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -546,13 +546,13 @@ files = [ [[package]] name = "commitizen" -version = "3.17.0" +version = "3.18.0" description = "Python commitizen client tool" optional = false python-versions = ">=3.8" files = [ - {file = "commitizen-3.17.0-py3-none-any.whl", hash = "sha256:2a9942d52724dc50518ca53997bef0f3b51a7e1c518d1ea3da44f9a9ece2df3a"}, - {file = "commitizen-3.17.0.tar.gz", hash = "sha256:1949025c5485c645656929158a5e2f6f6fa96501eb4461dee88a2493f630cb9c"}, + {file = "commitizen-3.18.0-py3-none-any.whl", hash = "sha256:7e3272725216aa3049e258bb438e0097e07607712325f5ce71d9a0bdf7805370"}, + {file = "commitizen-3.18.0.tar.gz", hash = "sha256:42c1d29ba6a64ddbc6d9952f4bcc2ac7094956e27e47e96c931895e2659e6cee"}, ] [package.dependencies] @@ -935,22 +935,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.0.2" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"}, + {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -1543,6 +1543,24 @@ pluggy = ">=1.3.0,<2.0" [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "0.23.5" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, + {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -2138,4 +2156,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "bf3b65c35a3bbb4005b8c8c3799821493ac0ec12fd31d5ac57283dcf1dea77ec" +content-hash = "38fcef3de3fcd9356d3242f54911cfb447dfd0b62bcbe9c9c88171f2d37d97d8" diff --git a/pyproject.toml b/pyproject.toml index 989e1c8..bfba312 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ motor = "^3" pymongo = "^4" Jinja2 = "^3" asyncpg = "^0.29" +pytest-asyncio = "^0.23" [tool.poetry.group.dev.dependencies] commitizen = "^3" From 13eaa2cfeb23c93b79b1b99deacf977043be0249 Mon Sep 17 00:00:00 2001 From: Payam Date: Thu, 7 Mar 2024 23:48:26 +0330 Subject: [PATCH 14/18] feat: used pytest_asyncio --- app/conftest.py | 14 +++++--------- app/songs/test_handlers.py | 26 +++++++++++--------------- 2 files changed, 16 insertions(+), 24 deletions(-) diff --git a/app/conftest.py b/app/conftest.py index 1e46a17..6a3a022 100644 --- a/app/conftest.py +++ b/app/conftest.py @@ -1,13 +1,12 @@ -import pytest +import pytest_asyncio from httpx import AsyncClient -from sqlalchemy import MetaData from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine -from .db import Base, get_db_session +from .db import get_db_session from .main import app -@pytest.fixture(name="session") +@pytest_asyncio.fixture(name="session") async def session_fixture(): engine = create_async_engine( "sqlite+aiosqlite:///database.db", connect_args={"check_same_thread": False} @@ -23,17 +22,14 @@ async def session_fixture(): await session.close() -@pytest.fixture(name="client") +@pytest_asyncio.fixture(name="client") async def client_fixture(session: AsyncSession): def get_session_override(): return session app.dependency_overrides[get_db_session] = get_session_override - async_client = AsyncClient(app=app, base_url="http://127.0.0.1:8000") - try: + async with AsyncClient(app=app, base_url="http://127.0.0.1:8000") as async_client: yield async_client - finally: - await async_client.aclose() # Close the AsyncClient after the test app.dependency_overrides.clear() diff --git a/app/songs/test_handlers.py b/app/songs/test_handlers.py index e38a5de..743b108 100644 --- a/app/songs/test_handlers.py +++ b/app/songs/test_handlers.py @@ -1,19 +1,15 @@ import pytest -from httpx import AsyncClient - -from ..main import app @pytest.mark.asyncio -async def test_add_song(): - async with AsyncClient(app=app, base_url="http://127.0.0.1:8000") as ac: - response = await ac.post( - "/songs", - json={"name": "Alen", "artist": "test", "year": 1960}, - ) - assert response.status_code == 200, response.text - data = response.json() - assert data["name"] == "Alen" - assert data["artist"] == "test" - assert data["year"] == 1960 - assert "id" in data +async def test_add_song(client): + response = await client.post( + "/songs", + json={"name": "Alen", "artist": "test", "year": 1960}, + ) + assert response.status_code == 200, response.text + data = response.json() + assert data["name"] == "Alen" + assert data["artist"] == "test" + assert data["year"] == 1960 + assert "id" in data From 47bb309be1f1a8d2ae5edf5123043990b1fbc82f Mon Sep 17 00:00:00 2001 From: Payam Date: Fri, 8 Mar 2024 00:11:39 +0330 Subject: [PATCH 15/18] feat: configured test database --- app/conftest.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/app/conftest.py b/app/conftest.py index 6a3a022..cc2b427 100644 --- a/app/conftest.py +++ b/app/conftest.py @@ -2,19 +2,18 @@ from httpx import AsyncClient from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine -from .db import get_db_session +from .db import Base, get_db_session from .main import app @pytest_asyncio.fixture(name="session") async def session_fixture(): engine = create_async_engine( - "sqlite+aiosqlite:///database.db", connect_args={"check_same_thread": False} + "sqlite+aiosqlite:///database2.db", connect_args={"check_same_thread": False} ) async_session = async_sessionmaker(engine, expire_on_commit=False) - # meta = MetaData() - # async with engine.begin() as conn: - # await conn.run_sync(Base.meta.create_all) + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) session = async_session() try: yield session From 8a9303d66d3613bd9887acee7e6c86099603e95f Mon Sep 17 00:00:00 2001 From: Payam Date: Fri, 8 Mar 2024 08:37:24 +0330 Subject: [PATCH 16/18] fix: changed auth module to fully async --- app/auth/handlers.py | 23 ++++++++++------------- app/auth/models.py | 11 ++++++----- poetry.lock | 23 ++++------------------- pyproject.toml | 1 - 4 files changed, 20 insertions(+), 38 deletions(-) diff --git a/app/auth/handlers.py b/app/auth/handlers.py index 9ed0db3..7f2a816 100644 --- a/app/auth/handlers.py +++ b/app/auth/handlers.py @@ -11,15 +11,13 @@ ) from jose import JWTError, jwt from pydantic import BaseModel, ValidationError +from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession -from sqlmodel import Session, select -from ..db import engine, get_db_session +from ..db import async_session, get_db_session from .models import User from .schemas import UserCreate, UserRead -# to get a string like this run: -# openssl rand -hex 32 SECRET_KEY = os.environ.get("SECRET_KEY") or "sample_secret_key_here!!!" ALGORITHM = os.environ.get("ALGORITHM") or "HS256" ACCESS_TOKEN_EXPIRE_MINUTES = os.environ.get("ACCESS_TOKEN_EXPIRE_MINUTES") or 30 @@ -51,16 +49,15 @@ def get_password_hash(password: str): return bcrypt.hashpw(password.encode('utf-8'), salt) -def get_user(username: str | None): - with Session(engine) as session: - # SQLModel.metadata.create_all(engine) - result = session.exec(select(User).where(User.username == username)) - user = result.first() - return user +async def get_user(username: str | None): + session = async_session() + result = await session.execute(select(User).where(User.username == username)) + user = result.scalar() + return user -def authenticate_user(username: str, password: str): - user = get_user(username) +async def authenticate_user(username: str, password: str): + user = await get_user(username) if not user: return False if not verify_password(password, user.password): @@ -125,7 +122,7 @@ async def get_current_active_user( async def login_for_access_token( form_data: Annotated[OAuth2PasswordRequestForm, Depends()], ) -> Token: - user = authenticate_user(form_data.username, form_data.password) + user = await authenticate_user(form_data.username, form_data.password) if not user: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, diff --git a/app/auth/models.py b/app/auth/models.py index 47bed2c..b320859 100644 --- a/app/auth/models.py +++ b/app/auth/models.py @@ -1,4 +1,5 @@ from sqlalchemy import Column, Integer, String +from sqlalchemy.orm import Mapped from ..db import Base @@ -6,8 +7,8 @@ class User(Base): __tablename__ = "users" - id = Column(Integer, primary_key=True) - username = Column(String) - email = Column(String) - full_name = Column(String) - password = Column(String) + id: Mapped[int] = Column(Integer, primary_key=True) + username: Mapped[str] = Column(String) + email: Mapped[str] = Column(String) + full_name: Mapped[str | None] = Column(String) + password: Mapped[str] = Column(String) diff --git a/poetry.lock b/poetry.lock index 28efa4d..1eee1fd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -84,13 +84,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "argcomplete" -version = "3.2.2" +version = "3.2.3" description = "Bash tab completion for argparse" optional = false python-versions = ">=3.8" files = [ - {file = "argcomplete-3.2.2-py3-none-any.whl", hash = "sha256:e44f4e7985883ab3e73a103ef0acd27299dbfe2dfed00142c35d4ddd3005901d"}, - {file = "argcomplete-3.2.2.tar.gz", hash = "sha256:f3e49e8ea59b4026ee29548e24488af46e30c9de57d48638e24f54a1ea1000a2"}, + {file = "argcomplete-3.2.3-py3-none-any.whl", hash = "sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c"}, + {file = "argcomplete-3.2.3.tar.gz", hash = "sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23"}, ] [package.extras] @@ -1868,21 +1868,6 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] -[[package]] -name = "sqlmodel" -version = "0.0.16" -description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "sqlmodel-0.0.16-py3-none-any.whl", hash = "sha256:b972f5d319580d6c37ecc417881f6ec4d1ad3ed3583d0ac0ed43234a28bf605a"}, - {file = "sqlmodel-0.0.16.tar.gz", hash = "sha256:966656f18a8e9a2d159eb215b07fb0cf5222acfae3362707ca611848a8a06bd1"}, -] - -[package.dependencies] -pydantic = ">=1.10.13,<3.0.0" -SQLAlchemy = ">=2.0.0,<2.1.0" - [[package]] name = "starlette" version = "0.36.3" @@ -2156,4 +2141,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "38fcef3de3fcd9356d3242f54911cfb447dfd0b62bcbe9c9c88171f2d37d97d8" +content-hash = "b9613adfafdf0e8a3e83862279aa76f8e6dac6be4d8a046484637de1ba3baf64" diff --git a/pyproject.toml b/pyproject.toml index bfba312..47757ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,6 @@ readme = "README.md" python = "^3.11" fastapi = "^0.110" uvicorn = "^0.27" -sqlmodel = "0.0.16" #psycopg2-binary = "^2" redis = "^4" celery = "^5" From 17759681cd190c1f943e704d08970e7177eed3bd Mon Sep 17 00:00:00 2001 From: Payam Date: Fri, 8 Mar 2024 11:38:00 +0330 Subject: [PATCH 17/18] fix: changed celery task to fully async --- app/tasks/tasks.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/app/tasks/tasks.py b/app/tasks/tasks.py index f1863c4..318f3e6 100644 --- a/app/tasks/tasks.py +++ b/app/tasks/tasks.py @@ -1,21 +1,23 @@ +import asyncio + from celery.utils.log import get_task_logger -from sqlmodel import Session, select +from sqlalchemy import update from ..celery import celery_app -from ..db import engine +from ..db import async_session from ..songs.models import Song logger = get_task_logger(__name__) +async def get_songs(): + async with async_session() as session: + await session.execute(update(Song).where(Song.id == 1).values(year=Song.year + 1)) + await session.commit() + + @celery_app.task def sample_task() -> None: logger.info("Doing some sample task 😄") - with Session(engine) as session: - songs = session.exec(select(Song)).all() - [ - logger.info( - f"Song Record : {song.name},{song.artist},{song.year},{song.id},{song.description}" - ) - for song in songs - ] + loop = asyncio.get_event_loop() + loop.run_until_complete(get_songs()) From 263672323b3d56757728d67940ec70f742d824b9 Mon Sep 17 00:00:00 2001 From: Payam Date: Fri, 8 Mar 2024 12:17:19 +0330 Subject: [PATCH 18/18] doc: updated README --- README.md | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 36ffd51..6aa1000 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,7 @@ # FastAPI + SQLModel + Alembic + Celery + MongoDB + Redis + jwt Auth -This project is an opinionated boilerplate for **FastAPI** micro framework that uses **_SQLAlchemy_**, -_**SQLModel**_, **_PostgresSQL_**, _**Alembic**_, **_Celery_**, **_MongoDB_**, _**Redis**_, **_Docker_** and * -*_jwt Authentication_**. You can use this ready to +This project is an opinionated boilerplate for **FastAPI** micro framework that uses, +_**Asynchronous SQLAlchemy**_, **_PostgresSQL_**, _**Alembic**_, **_Celery_**, **_MongoDB_**, _**Redis**_, **_Docker_** and **_jwt Authentication_**. You can use this ready to use sample and don't worry about CI pipelines and running database migrations and tests inside a FastAPI project. ## Add new tables to PostgresSQL database : @@ -22,13 +21,16 @@ Create `__init__.py` file and a empty `models.py` file inside folder and paste this sample content inside `models.py` file: ```python -from sqlmodel import Field, SQLModel +from sqlalchemy import Column, Integer, String +from sqlalchemy.orm import Mapped, declarative_base +Base = declarative_base() -class Artist(SQLModel, table=True): - id: int = Field(default=None, nullable=False, primary_key=True) - name: str - city: str + +class Artist(Base): + id: Mapped[int] = Column(Integer, primary_key=True) + name: Mapped[str] = Column(String, primary_key=True) + city: Mapped[str] = Column(String, primary_key=True) ``` go to `migrations/env.py` folder in root directory and add this content to it: