Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .env
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
POSTGRES_DB=foo
POSTGRES_USER=payam
POSTGRES_PASSWORD=payam
DATABASE_URL=postgresql://payam:payam@pdb:5432/foo
DATABASE_URL=postgresql+asyncpg://payam:payam@pdb:5432/foo
# Redis
REDIS_HOST=redis
REDIS_PORT=6379
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,5 @@ logs*
.vscode/
celerybeat-schedule.bak
celerybeat-schedule.dat
celerybeat-schedule.dir
celerybeat-schedule.dir
.database.db
10 changes: 6 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@
FROM python:3.11-slim

# set working directory
WORKDIR /app/
WORKDIR /files/

# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1

# Install dependencies
RUN apt-get update && \
apt-get install -y dos2unix &&\
apt-get install -y --no-install-recommends curl && \
curl -sSL https://install.python-poetry.org | POETRY_HOME=/opt/poetry python && \
ln -s /opt/poetry/bin/poetry /usr/local/bin/poetry && \
Expand All @@ -19,7 +20,7 @@ RUN apt-get update && \
rm -rf /var/lib/apt/lists/*

# Copy only the necessary files for dependency installation
COPY pyproject.toml poetry.lock* /app/
COPY pyproject.toml poetry.lock* /files/

# Allow installing dev dependencies to run tests
ARG INSTALL_DEV=false
Expand All @@ -29,7 +30,8 @@ RUN if [ "$INSTALL_DEV" = "true" ] ; then poetry install --no-root ; else poetry
COPY . .

# set executable permissions in a single RUN command
RUN chmod +x /app/scripts/start.sh /app/scripts/prestart.sh
RUN chmod +x /files/scripts/start.sh /files/scripts/prestart.sh
RUN dos2unix /files/scripts/start.sh /files/scripts/prestart.sh

# define the command to run the application
CMD ["/app/scripts/start.sh"]
CMD ["/files/scripts/start.sh"]
18 changes: 10 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
# FastAPI + SQLModel + Alembic + Celery + MongoDB + Redis + jwt Auth

This project is an opinionated boilerplate for **FastAPI** micro framework that uses **_SQLAlchemy_**,
_**SQLModel**_, **_PostgresSQL_**, _**Alembic**_, **_Celery_**, **_MongoDB_**, _**Redis**_, **_Docker_** and *
*_jwt Authentication_**. You can use this ready to
This project is an opinionated boilerplate for **FastAPI** micro framework that uses,
_**Asynchronous SQLAlchemy**_, **_PostgresSQL_**, _**Alembic**_, **_Celery_**, **_MongoDB_**, _**Redis**_, **_Docker_** and **_jwt Authentication_**. You can use this ready to
use sample and don't worry about CI pipelines and running database migrations and tests inside a FastAPI project.

## Add new tables to PostgresSQL database :
Expand All @@ -22,13 +21,16 @@ Create `__init__.py` file and a empty `models.py` file inside folder
and paste this sample content inside `models.py` file:

```python
from sqlmodel import Field, SQLModel
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import Mapped, declarative_base

Base = declarative_base()

class Artist(SQLModel, table=True):
id: int = Field(default=None, nullable=False, primary_key=True)
name: str
city: str

class Artist(Base):
id: Mapped[int] = Column(Integer, primary_key=True)
name: Mapped[str] = Column(String, primary_key=True)
city: Mapped[str] = Column(String, primary_key=True)
```

go to `migrations/env.py` folder in root directory and add this content to it:
Expand Down
21 changes: 13 additions & 8 deletions alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

[alembic]
# path to migration scripts
script_location = migrations
script_location = alembic

# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
Expand All @@ -14,9 +14,9 @@ prepend_sys_path = .

# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =

Expand All @@ -34,10 +34,10 @@ prepend_sys_path = .
# sourceless = false

# version location specification; This defaults
# to migrations/versions. When using multiple version
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions

# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
Expand All @@ -58,9 +58,8 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
# are written from script.py.mako
# output_encoding = utf-8

# sqlalchemy.url = driver://user:pass@localhost/dbname

#sqlalchemy.url = sqlite+aiosqlite:///database.db
#sqlalchemy.url = postgresql+asyncpg://payam:payam@pdb:5432/foo

[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
Expand All @@ -73,6 +72,12 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME

# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME

# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
Expand Down
1 change: 1 addition & 0 deletions alembic/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration with an async dbapi.
54 changes: 32 additions & 22 deletions migrations/env.py → alembic/env.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
import asyncio
import os
from logging.config import fileConfig

from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config

from alembic import context
from sqlalchemy import engine_from_config, pool
from sqlmodel import SQLModel # NEW
from app.auth.models import User # NEW
from app.songs.models import Song, Tag, SongTag # NEW
from app.db import Base
from app.auth.models import User # New
from app.songs.models import Song, City, Tag, SongTag # New

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
config.set_main_option(
"sqlalchemy.url", os.environ.get("DATABASE_URL") or "sqlite:///database.db"
) # NEW
# config.set_main_option('sqlalchemy.url', "sqlite:///database.db") # NEW


# sqlite+aiosqlite:///database.db
"sqlalchemy.url", os.environ.get("DATABASE_URL") or "sqlite+aiosqlite:///database.db"
)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
Expand All @@ -26,8 +26,7 @@
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
# target_metadata = None
target_metadata = SQLModel.metadata # NEW
target_metadata = Base.metadata


# other values from the config, defined by the needs of env.py,
Expand All @@ -48,9 +47,9 @@ def run_migrations_offline() -> None:
script output.

"""
# url = config.get_main_option("sqlalchemy.url") #NEW
url = config.get_main_option("sqlalchemy.url")
context.configure(
# url=url, #NEW
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
Expand All @@ -60,24 +59,35 @@ def run_migrations_offline() -> None:
context.run_migrations()


def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)

with context.begin_transaction():
context.run_migrations()

In this scenario we need to create an Engine

async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.

"""
connectable = engine_from_config(

connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)

await connectable.dispose()


def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""

with context.begin_transaction():
context.run_migrations()
asyncio.run(run_async_migrations())


if context.is_offline_mode():
Expand Down
1 change: 0 additions & 1 deletion migrations/script.py.mako → alembic/script.py.mako
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa
import sqlmodel # NEW
${imports if imports else ""}

# revision identifiers, used by Alembic.
Expand Down
69 changes: 69 additions & 0 deletions alembic/versions/180cc3782c77_init.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
"""init

Revision ID: 180cc3782c77
Revises:
Create Date: 2024-03-07 18:57:29.839982

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = '180cc3782c77'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('cities',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tags',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(), nullable=True),
sa.Column('email', sa.String(), nullable=True),
sa.Column('full_name', sa.String(), nullable=True),
sa.Column('password', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('songs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('artist', sa.String(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.Column('year', sa.Integer(), nullable=True),
sa.Column('city_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['city_id'], ['cities.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('song_tag',
sa.Column('song_id', sa.Integer(), nullable=False),
sa.Column('tag_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['song_id'], ['songs.id'], ),
sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ),
sa.PrimaryKeyConstraint('song_id', 'tag_id')
)
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('song_tag')
op.drop_table('songs')
op.drop_table('users')
op.drop_table('tags')
op.drop_table('cities')
# ### end Alembic commands ###
Loading