diff --git a/.env.sample b/.env.sample deleted file mode 100644 index f7a57fe..0000000 --- a/.env.sample +++ /dev/null @@ -1,6 +0,0 @@ -KEYCLOAK_SERVER_URL=https://sample.com -KEYCLOAK_CLIENT_ID=sample -KEYCLOAK_REALM_NAME=sample -KEYCLOAK_CLIENT_SECRET=sample -KEYCLOAK_ADMIN_CLIENT_SECRET=sample -KEYCLOAK_CALLBACK_URI=http://sample.com/callback diff --git a/.gitignore b/.gitignore index 99cd9f4..6d112de 100644 --- a/.gitignore +++ b/.gitignore @@ -28,7 +28,7 @@ var/ *.egg .venv/ venv/ -.env +.envs/ # Python debug pdb/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c441114..f86edef 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,26 +1,31 @@ -exclude: 'docs|node_modules|migrations|.git|.tox|.mypy_cache|frontend' +--- +exclude: docs|node_modules|migrations|.git|.tox|.mypy_cache|frontend default_stages: [commit] fail_fast: true repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: requirements-txt-fixer - - id: check-merge-conflict - - repo: https://github.com/myint/autoflake - rev: v1.4 - hooks: - - id: autoflake - args: ['--in-place', '--remove-all-unused-imports', '--remove-unused-variables', '--ignore-init-module-imports'] - - repo: https://github.com/psf/black - rev: 23.11.0 - hooks: - - id: black - - repo: https://github.com/PyCQA/bandit - rev: '1.7.5' - hooks: - - id: bandit + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: requirements-txt-fixer + - id: check-merge-conflict + - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt + rev: 0.2.3 # or other specific tag + hooks: + - id: yamlfmt + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.4.10 + hooks: + # Run the linter. + - id: ruff + args: [--fix] + # Run the formatter. + - id: ruff-format + - repo: https://github.com/PyCQA/bandit + rev: 1.7.9 + hooks: + - id: bandit diff --git a/README.md b/README.md new file mode 100644 index 0000000..ce6d03f --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# Edgecutters + +Simple FastAPI template with users and auth. diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..167a66b --- /dev/null +++ b/alembic.ini @@ -0,0 +1,114 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = src/database/migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to src/database/migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:src/database/migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +;sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/app/api/endpoints.py b/app/api/endpoints.py deleted file mode 100644 index 3715212..0000000 --- a/app/api/endpoints.py +++ /dev/null @@ -1,24 +0,0 @@ -from fastapi import APIRouter, Depends -from fastapi_keycloak import OIDCUser - -from app.keycloak import idp - -router = APIRouter() - - -@router.get("/") -def home(): - """Public home page.""" - return {"message": "Hello World! This is a public endpoint"} - - -@router.get("/me/") -def my_user(user: OIDCUser = Depends(idp.get_current_user())): - """Shows the logged-in user's info. No specified role required.""" - return user - - -@router.get("/admin/") -def my_admin(user: OIDCUser = Depends(idp.get_current_user(required_roles=["admin"]))): - """Endpoint specific for admins.""" - return {"message": "Hello admin! This is an admin-only endpoint"} diff --git a/app/api/routers.py b/app/api/routers.py deleted file mode 100644 index 12c716b..0000000 --- a/app/api/routers.py +++ /dev/null @@ -1,6 +0,0 @@ -from fastapi import APIRouter - -from app import api - -api_router_v1 = APIRouter() -api_router_v1.include_router(api.router) diff --git a/app/config/base.py b/app/config/base.py deleted file mode 100644 index 197477c..0000000 --- a/app/config/base.py +++ /dev/null @@ -1,25 +0,0 @@ -from pydantic import BaseModel -from pydantic_settings import BaseSettings, SettingsConfigDict - - -class KeycloakSettings(BaseModel): - server_url: str - client_id: str - client_secret: str - realm_name: str - admin_client_secret: str - callback_uri: str - - -class Settings(BaseSettings): - model_config = SettingsConfigDict(env_file=".env", extra='ignore') - - keycloak_server_url: str - keycloak_client_id: str - keycloak_client_secret: str - keycloak_realm_name: str - keycloak_admin_client_secret: str - keycloak_callback_uri: str - - -settings = Settings() diff --git a/app/config/keycloak.py b/app/config/keycloak.py deleted file mode 100644 index 8967ccb..0000000 --- a/app/config/keycloak.py +++ /dev/null @@ -1,12 +0,0 @@ -from fastapi_keycloak import FastAPIKeycloak - -from app.config.base import settings - -idp = FastAPIKeycloak( - server_url=settings.keycloak_server_url, - client_id=settings.keycloak_client_id, - client_secret=settings.keycloak_client_secret, - admin_client_secret=settings.keycloak_admin_client_secret, - realm=settings.keycloak_realm_name, - callback_uri=settings.keycloak_callback_uri -) diff --git a/app/main.py b/app/main.py deleted file mode 100644 index 3e2c258..0000000 --- a/app/main.py +++ /dev/null @@ -1,10 +0,0 @@ -from fastapi import FastAPI - -from app.api.routers import api_router_v1 - -app = FastAPI() -app.swagger_ui_init_oauth = { - "usePkceWithAuthorizationCodeGrant": True, -} - -app.include_router(api_router_v1) diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000..0f3b8f8 --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,32 @@ +FROM python:3.12-slim + +ENV PYTHONDONTWRITEBYTECODE 1 +ENV DOCKER_BUILDKIT 1 +ENV PYTHONUNBUFFERED 1 + +RUN addgroup --system app && adduser --system --group app + +WORKDIR /app + +RUN rm -rf /var/lib/apt/lists/* && \ + apt-get purge --auto-remove && \ + apt-get clean + +COPY ./requirements.txt /requirements.txt + +RUN --mount=type=cache,target=/root/.cache \ + pip install -r /requirements.txt --no-cache-dir + +COPY ./docker/run.sh /run.sh +RUN chmod +x /run.sh + +COPY ./docker/prestart.sh /prestart.sh +RUN chmod +x /prestart.sh + +COPY --chown=app . /app + +ENV PYTHONPATH=/app + +USER app + +ENTRYPOINT ["/prestart.sh"] diff --git a/docker/prestart.sh b/docker/prestart.sh new file mode 100644 index 0000000..5f679b9 --- /dev/null +++ b/docker/prestart.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset + +# Run migrations +#alembic upgrade head + +exec "$@" diff --git a/docker/run.sh b/docker/run.sh new file mode 100644 index 0000000..4c06e71 --- /dev/null +++ b/docker/run.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset + +export APP_MODULE=${APP_MODULE-app.main:app} +export HOST=${HOST:-0.0.0.0} +export PORT=${PORT:-8000} + +uvicorn --host $HOST --port $PORT "$APP_MODULE" diff --git a/local.yml b/local.yml new file mode 100644 index 0000000..75cd0f0 --- /dev/null +++ b/local.yml @@ -0,0 +1,29 @@ +--- +volumes: + local_postgres_data: {} + local_redis_data: {} + +services: + backend: + image: backend_api + build: + context: . + dockerfile: docker/Dockerfile + command: /run.sh + depends_on: + - postgres + ports: + - 8000:8000 + env_file: + - .envs/.local + extra_hosts: + - host.docker.internal:host-gateway + + postgres: + image: postgres:14 + volumes: + - local_postgres_data:/var/lib/postgresql/data + env_file: + - .envs/.local.postgres + ports: + - 5434:5432 diff --git a/requirements.txt b/requirements.txt index b856c96..4b813e6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,37 +1,49 @@ +advanced_alchemy==0.7.4 +alembic==1.13.1 annotated-types==0.6.0 anyio==4.2.0 asgiref==3.7.2 +async-timeout==4.0.3 +asyncpg==0.29.0 certifi==2023.11.17 cfgv==3.4.0 charset-normalizer==3.3.2 click==8.1.7 deprecation==2.1.0 distlib==0.3.8 -ecdsa==0.18.0 -fastapi==0.109.0 +ecdsa==0.19.0 +fastapi==0.109.1 fastapi_keycloak==1.0.10 filelock==3.13.1 +greenlet==3.0.3 h11==0.14.0 httptools==0.6.1 identify==2.5.33 idna==3.6 itsdangerous==2.1.2 +Mako==1.3.2 +MarkupSafe==2.1.5 nodeenv==1.8.0 packaging==23.2 +passlib==1.7.4 platformdirs==4.1.0 pre-commit==3.6.0 +psycopg2-binary==2.9.9 pyasn1==0.5.1 pydantic==2.5.3 pydantic-settings==2.1.0 pydantic_core==2.14.6 python-dotenv==1.0.0 python-jose==3.3.0 +python-multipart==0.0.9 PyYAML==6.0.1 requests==2.31.0 requests-toolbelt==1.0.0 rsa==4.9 +setuptools==70.1.0 six==1.16.0 sniffio==1.3.0 +SQLAlchemy==2.0.27 starlette==0.35.1 typing_extensions==4.9.0 urllib3==2.1.0 diff --git a/app/__init__.py b/src/__init__.py similarity index 100% rename from app/__init__.py rename to src/__init__.py diff --git a/src/auth/__init__.py b/src/auth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/auth/endpoints.py b/src/auth/endpoints.py new file mode 100644 index 0000000..0ac31fa --- /dev/null +++ b/src/auth/endpoints.py @@ -0,0 +1,56 @@ +from datetime import timedelta +from typing import Annotated + +from fastapi import APIRouter, Depends, HTTPException, Security +from fastapi.security import OAuth2PasswordRequestForm +from starlette import status + +from src.auth.schemas import Token +from src.auth.tokens import create_access_token, authenticate_user, get_current_user +from src.config.base import settings +from src.users.dependencies import provide_user_repo +from src.users.models import User +from src.users.permissions import UserPermission +from src.users.repositories import UserRepository +from src.users.schemas import UserSchema + +router = APIRouter(prefix="/auth", tags=["auth"]) + + +@router.post("/token") +async def login_for_access_token( + form_data: Annotated[OAuth2PasswordRequestForm, Depends()], + repository: Annotated[UserRepository, Depends(provide_user_repo)], +) -> Token: + user: User = await repository.get_one_or_none(username=form_data.username) + if not user and settings.debug: + user: User = await repository.get_one_or_none(username="admin") + elif not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Incorrect username or password", + headers={"WWW-Authenticate": "Bearer"}, + ) + else: + await authenticate_user(user, form_data.password) + + access_token_expires: timedelta = timedelta(minutes=settings.token_expiry) + + access_token: str = await create_access_token( + data={"sub": user.username, "scopes": user.total_permissions}, + expires_delta=access_token_expires, + ) + return Token(access_token=access_token, token_type="bearer") # nosec + + +@router.get("/me", status_code=200, response_model=UserSchema) +async def get( + current_user: Annotated[ + User, + Security( + get_current_user, + scopes=[UserPermission.READ_USER.value], + ), + ], +) -> UserSchema: + return UserSchema.model_validate(current_user) diff --git a/src/auth/models.py b/src/auth/models.py new file mode 100644 index 0000000..0e9a05a --- /dev/null +++ b/src/auth/models.py @@ -0,0 +1,80 @@ +from sqlalchemy import Text, String, Table, ForeignKey, Column +from sqlalchemy.ext.associationproxy import AssociationProxy, association_proxy +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from src.database.base_class import Base + +role_permission_table = Table( + "role_permissions", + Base.metadata, + Column("role_id", ForeignKey("roles.id", ondelete="NO ACTION"), primary_key=True), + Column( + "permission_id", + ForeignKey("permissions.id", ondelete="NO ACTION"), + primary_key=True, + ), +) + +# Association tables for many-to-many relationships between User and Role, User and Permission +user_role_table = Table( + "user_roles", + Base.metadata, + Column("user_id", ForeignKey("users.id", ondelete="NO ACTION"), primary_key=True), + Column("role_id", ForeignKey("roles.id", ondelete="NO ACTION"), primary_key=True), +) + +user_permission_table = Table( + "user_permissions", + Base.metadata, + Column("user_id", ForeignKey("users.id", ondelete="NO ACTION"), primary_key=True), + Column( + "permission_id", + ForeignKey("permissions.id", ondelete="NO ACTION"), + primary_key=True, + ), +) + + +class Role(Base): + __tablename__ = "roles" + name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) + description: Mapped[str] = mapped_column(Text) + + # Many to many relationships + permissions = relationship( + "Permission", + secondary=role_permission_table, + back_populates="roles", + viewonly=True, + lazy="selectin", + ) + users = relationship( + "User", + secondary=user_role_table, + back_populates="roles", + viewonly=True, + ) + + permission_values: AssociationProxy[list[str]] = association_proxy( + "permissions", "name" + ) + + +class Permission(Base): + __tablename__ = "permissions" + name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) + description: Mapped[str] = mapped_column(Text) + + # Many to many relationships + roles = relationship( + "Role", + secondary=role_permission_table, + back_populates="permissions", + viewonly=True, + ) + users = relationship( + "User", + secondary=user_permission_table, + back_populates="permissions", + viewonly=True, + ) diff --git a/src/auth/permissions.py b/src/auth/permissions.py new file mode 100644 index 0000000..105b98c --- /dev/null +++ b/src/auth/permissions.py @@ -0,0 +1,39 @@ +from enum import Enum + + +class RolePermission(Enum): + CREATE_ROLE = "create:role" + READ_ROLE = "read:role" + UPDATE_ROLE = "update:role" + DELETE_ROLE = "delete:role" + FETCH_ROLE = "fetch:role" + + @classmethod + def permissions(cls) -> list[str]: + return [ + cls.CREATE_ROLE.value, + cls.READ_ROLE.value, + cls.UPDATE_ROLE.value, + cls.DELETE_ROLE.value, + cls.FETCH_ROLE.value, + ] + + +class PermissionAction(Enum): + CREATE_PERMISSION = "create:permission" + READ_PERMISSION = "read:permission" + DELETE_PERMISSION = "delete:permission" + FETCH_PERMISSION = "fetch:permission" + + @classmethod + def permissions(cls) -> list[str]: + return [ + cls.CREATE_PERMISSION.value, + cls.READ_PERMISSION.value, + cls.DELETE_PERMISSION.value, + cls.FETCH_PERMISSION.value, + ] + + +def get_permissions() -> list[str]: + return RolePermission.permissions() + PermissionAction.permissions() diff --git a/src/auth/schemas.py b/src/auth/schemas.py new file mode 100644 index 0000000..cc83567 --- /dev/null +++ b/src/auth/schemas.py @@ -0,0 +1,11 @@ +from pydantic import BaseModel + + +class Token(BaseModel): + access_token: str + token_type: str + + +class TokenData(BaseModel): + username: str | None = None + scopes: list[str] = [] diff --git a/src/auth/tokens.py b/src/auth/tokens.py new file mode 100644 index 0000000..51b9f7c --- /dev/null +++ b/src/auth/tokens.py @@ -0,0 +1,90 @@ +from datetime import timedelta, datetime, timezone +from typing import Annotated + +from fastapi import Depends, HTTPException +from fastapi.security import OAuth2PasswordBearer, SecurityScopes +from jose import jwt, JWTError +from passlib.context import CryptContext +from pydantic import ValidationError +from starlette import status + +from src.auth.schemas import TokenData +from src.config.base import settings +from src.users.dependencies import provide_user_repo +from src.users.models import User +from src.users.repositories import UserRepository + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/v1/auth/token") + + +async def verify_password(plain_password, hashed_password) -> bool: + return pwd_context.verify(plain_password, hashed_password) + + +async def get_password_hash(password) -> str: + return pwd_context.hash(password) + + +async def create_access_token( + data: dict, expires_delta: timedelta | None = None +) -> str: + to_encode = data.copy() + if expires_delta: + expire = datetime.now(timezone.utc) + expires_delta + else: + expire = datetime.now(timezone.utc) + timedelta(minutes=15) + to_encode.update({"exp": expire}) + encoded_jwt = jwt.encode( + to_encode, settings.token_secret_key, algorithm=settings.token_algorithm + ) + return encoded_jwt + + +async def get_current_user( + token: Annotated[str, Depends(oauth2_scheme)], + repository: Annotated[UserRepository, Depends(provide_user_repo)], + security_scopes: SecurityScopes, +) -> User: + if security_scopes.scopes: + authenticate_value = f'Bearer scope="{security_scopes.scope_str}"' + else: + authenticate_value = "Bearer" + credentials_exception = HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": authenticate_value}, + ) + + try: + payload = jwt.decode( + token, settings.token_secret_key, algorithms=[settings.token_algorithm] + ) + username: str = payload.get("sub") + if username is None: + raise credentials_exception + token_scopes = payload.get("scopes", []) + token_data = TokenData(scopes=token_scopes, username=username) + except (JWTError, ValidationError): + raise credentials_exception + user: User = await repository.get_one_or_none(username=token_data.username) + if user is None: + raise credentials_exception + for scope in security_scopes.scopes: + if scope not in token_data.scopes: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Not enough permissions", + headers={"WWW-Authenticate": authenticate_value}, + ) + return user + + +async def authenticate_user(user: User | None, password: str): + if not user or not await verify_password(password, user.hashed_password): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Incorrect username or password", + headers={"WWW-Authenticate": "Bearer"}, + ) diff --git a/src/config/base.py b/src/config/base.py new file mode 100644 index 0000000..b7e9427 --- /dev/null +++ b/src/config/base.py @@ -0,0 +1,16 @@ +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + model_config = SettingsConfigDict(env_file=".envs/.local", extra="ignore") + + sqlalchemy_database_uri: str = Field( + "postgresql+asyncpg://baseuser:password@localhost:5432/edgecutters" + ) + token_expiry: int = Field(5, env="TOKEN_EXPIRY") + token_algorithm: str = Field("HS256", env="ALGORITHM") + token_secret_key: str = Field("secret", env="SECRET_KEY") + + +settings = Settings() diff --git a/src/database/__init__.py b/src/database/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/database/base.py b/src/database/base.py new file mode 100644 index 0000000..b8b9449 --- /dev/null +++ b/src/database/base.py @@ -0,0 +1,3 @@ +from src.database.base_class import Base # noqa: F401 +from src.users.models import User # noqa: F401 +from src.auth.models import Role, Permission # noqa: F401 diff --git a/src/database/base_class.py b/src/database/base_class.py new file mode 100644 index 0000000..465e80a --- /dev/null +++ b/src/database/base_class.py @@ -0,0 +1,31 @@ +from datetime import datetime, timezone + +from advanced_alchemy.base import ( + orm_registry, + CommonTableAttributes, + BigIntPrimaryKey, +) +from advanced_alchemy.types import DateTimeUTC +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + + +class Base(CommonTableAttributes, BigIntPrimaryKey, DeclarativeBase): + """This is for new tables that should be managed by this app.""" + + registry = orm_registry + + created_at: Mapped[datetime] = mapped_column( + DateTimeUTC(timezone=True), + default=lambda: datetime.now(timezone.utc), + ) + """Date/time of instance creation.""" + updated_at: Mapped[datetime] = mapped_column( + DateTimeUTC(timezone=True), + default=lambda: datetime.now(timezone.utc), + ) + """Date/time of instance last update.""" + + deleted_at: Mapped[datetime] = mapped_column( + DateTimeUTC(timezone=True), + nullable=True, + ) diff --git a/src/database/migrations/README b/src/database/migrations/README new file mode 100644 index 0000000..e0d0858 --- /dev/null +++ b/src/database/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration with an async dbapi. \ No newline at end of file diff --git a/src/database/migrations/env.py b/src/database/migrations/env.py new file mode 100644 index 0000000..8c58b5d --- /dev/null +++ b/src/database/migrations/env.py @@ -0,0 +1,100 @@ +import asyncio +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +from src.config.base import settings +from src.database.base import Base + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +def get_url(): + # Heroku workaround: https://help.heroku.com/ZKNTJQSK/why-is-sqlalchemy-1-4-x-not-connecting-to-heroku-postgres + connection_uri = settings.sqlalchemy_database_uri + if connection_uri.startswith("postgres+asyncpg://"): + connection_uri = connection_uri.replace("postgres+asyncpg://", "postgresql+asyncpg://", 1) + return connection_uri + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = get_url() + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + dialect_name="postgresql", + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """In this scenario we need to create an Engine + and associate a connection with the context. + + """ + configuration = config.get_section(config.config_ini_section) + configuration["sqlalchemy.url"] = get_url() + connectable = async_engine_from_config( + configuration, + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/src/database/migrations/script.py.mako b/src/database/migrations/script.py.mako new file mode 100644 index 0000000..0b2aa64 --- /dev/null +++ b/src/database/migrations/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +import src +import advanced_alchemy +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/src/database/migrations/versions/406eb81159d5_create_users_roles_and_permissions_table.py b/src/database/migrations/versions/406eb81159d5_create_users_roles_and_permissions_table.py new file mode 100644 index 0000000..5950530 --- /dev/null +++ b/src/database/migrations/versions/406eb81159d5_create_users_roles_and_permissions_table.py @@ -0,0 +1,88 @@ +"""Create users, roles and permissions table + +Revision ID: 406eb81159d5 +Revises: +Create Date: 2024-06-23 08:37:13.004653 + +""" +from typing import Sequence, Union + +import src +import advanced_alchemy +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '406eb81159d5' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('permissions', + sa.Column('name', sa.String(length=50), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('created_at', advanced_alchemy.types.datetime.DateTimeUTC(timezone=True), nullable=False), + sa.Column('updated_at', advanced_alchemy.types.datetime.DateTimeUTC(timezone=True), nullable=False), + sa.Column('deleted_at', advanced_alchemy.types.datetime.DateTimeUTC(timezone=True), nullable=True), + sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_permissions')), + sa.UniqueConstraint('name', name=op.f('uq_permissions_name')) + ) + op.create_table('roles', + sa.Column('name', sa.String(length=50), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('created_at', advanced_alchemy.types.datetime.DateTimeUTC(timezone=True), nullable=False), + sa.Column('updated_at', advanced_alchemy.types.datetime.DateTimeUTC(timezone=True), nullable=False), + sa.Column('deleted_at', advanced_alchemy.types.datetime.DateTimeUTC(timezone=True), nullable=True), + sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_roles')), + sa.UniqueConstraint('name', name=op.f('uq_roles_name')) + ) + op.create_table('users', + sa.Column('username', sa.String(length=50), nullable=False), + sa.Column('name', sa.String(length=50), nullable=False), + sa.Column('hashed_password', sa.String(length=250), nullable=False), + sa.Column('created_at', advanced_alchemy.types.datetime.DateTimeUTC(timezone=True), nullable=False), + sa.Column('updated_at', advanced_alchemy.types.datetime.DateTimeUTC(timezone=True), nullable=False), + sa.Column('deleted_at', advanced_alchemy.types.datetime.DateTimeUTC(timezone=True), nullable=True), + sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_users')), + sa.UniqueConstraint('username', name=op.f('uq_users_username')) + ) + op.create_table('role_permissions', + sa.Column('role_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False), + sa.Column('permission_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False), + sa.ForeignKeyConstraint(['permission_id'], ['permissions.id'], name=op.f('fk_role_permissions_permission_id_permissions'), ondelete='NO ACTION'), + sa.ForeignKeyConstraint(['role_id'], ['roles.id'], name=op.f('fk_role_permissions_role_id_roles'), ondelete='NO ACTION'), + sa.PrimaryKeyConstraint('role_id', 'permission_id', name=op.f('pk_role_permissions')) + ) + op.create_table('user_permissions', + sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False), + sa.Column('permission_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False), + sa.ForeignKeyConstraint(['permission_id'], ['permissions.id'], name=op.f('fk_user_permissions_permission_id_permissions'), ondelete='NO ACTION'), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_user_permissions_user_id_users'), ondelete='NO ACTION'), + sa.PrimaryKeyConstraint('user_id', 'permission_id', name=op.f('pk_user_permissions')) + ) + op.create_table('user_roles', + sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False), + sa.Column('role_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False), + sa.ForeignKeyConstraint(['role_id'], ['roles.id'], name=op.f('fk_user_roles_role_id_roles'), ondelete='NO ACTION'), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_user_roles_user_id_users'), ondelete='NO ACTION'), + sa.PrimaryKeyConstraint('user_id', 'role_id', name=op.f('pk_user_roles')) + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('user_roles') + op.drop_table('user_permissions') + op.drop_table('role_permissions') + op.drop_table('users') + op.drop_table('roles') + op.drop_table('permissions') + # ### end Alembic commands ### diff --git a/src/database/migrations/versions/812c97994acf_setup_default_data.py b/src/database/migrations/versions/812c97994acf_setup_default_data.py new file mode 100644 index 0000000..fe4bb2a --- /dev/null +++ b/src/database/migrations/versions/812c97994acf_setup_default_data.py @@ -0,0 +1,84 @@ +"""Setup default data + +Revision ID: 812c97994acf +Revises: 406eb81159d5 +Create Date: 2024-06-23 08:41:34.235490 + +""" +from typing import Sequence, Union + +import src +import advanced_alchemy +from alembic import op +import sqlalchemy as sa + +from src.auth.tokens import pwd_context +from src.users.permissions import UserPermission +from src.auth.permissions import get_permissions as auth_permissions + +# revision identifiers, used by Alembic. +revision: str = '812c97994acf' +down_revision: Union[str, None] = '406eb81159d5' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def get_all_permissions() -> list[str]: + return ( + auth_permissions() + + UserPermission.permissions() + ) + + +def upgrade() -> None: + hashed_password = pwd_context.hash('admin') + perms_data = ",".join( + [f"('{perm}', 'System generated permissions', NOW(), NOW())" for perm in get_all_permissions()] + ) + + op.execute(f""" + insert into "users" (username, name, hashed_password, created_at, updated_at) values ('admin', 'Main admin', '{hashed_password}', NOW(), NOW()); + """) + op.execute(f""" + insert into permissions (name, description, created_at, updated_at) values {perms_data} + """) + op.execute(""" + insert into "roles" (name, description, created_at, updated_at) values ('admin', '', NOW(), NOW()); + """) + op.execute(""" + insert into "role_permissions" (role_id, permission_id) + select roles.id, permissions.id + from roles, permissions + where roles.name = 'admin'; + """) + op.execute(""" + insert into "user_roles" (user_id, role_id) + select users.id, roles.id + from "users", "roles" + where users.username = 'admin' and roles.name = 'admin' + """) + + +def downgrade() -> None: + perms_data = ",".join([f"'{perm}'" for perm in get_all_permissions()]) + + op.execute(""" + delete from users + where username = 'admin'; + """) + op.execute(f""" + DELETE FROM permissions WHERE name IN ({perms_data}); + """) + op.execute(""" + delete from "user_roles" + where "user_id" = (select users.id from "users" where users.username = 'admin') + and "role_id" = (select roles.id from "roles" where roles.name = 'admin') + """) + op.execute(""" + delete from "role_permissions" + where "role_id" = (select roles.id from "roles" where roles.name = 'admin') + """) + op.execute(""" + delete from roles + where name = 'admin' + """) diff --git a/src/database/session.py b/src/database/session.py new file mode 100644 index 0000000..9b9f3db --- /dev/null +++ b/src/database/session.py @@ -0,0 +1,9 @@ +from advanced_alchemy import AsyncSessionConfig, SQLAlchemyAsyncConfig + +from src.config.base import settings + +session_config = AsyncSessionConfig(expire_on_commit=False) +sqlalchemy_config = SQLAlchemyAsyncConfig( + connection_string=settings.sqlalchemy_database_uri, + session_config=session_config, +) # Create 'db_session' dependency. diff --git a/src/dependencies.py b/src/dependencies.py new file mode 100644 index 0000000..7769981 --- /dev/null +++ b/src/dependencies.py @@ -0,0 +1,8 @@ +from sqlalchemy.ext.asyncio import AsyncSession +from starlette.requests import Request + + +async def provide_db_session(request: Request) -> AsyncSession: + from src.main import alchemy + + return alchemy.get_session(request) diff --git a/src/main.py b/src/main.py new file mode 100644 index 0000000..b6a045c --- /dev/null +++ b/src/main.py @@ -0,0 +1,15 @@ +from advanced_alchemy.extensions.starlette import StarletteAdvancedAlchemy +from fastapi import FastAPI + +from src.database.session import sqlalchemy_config +from src.routers import v1_router + +app = FastAPI() +app.swagger_ui_init_oauth = { + "usePkceWithAuthorizationCodeGrant": True, +} + +app.include_router(v1_router) + + +alchemy = StarletteAdvancedAlchemy(config=sqlalchemy_config, app=app) diff --git a/src/routers.py b/src/routers.py new file mode 100644 index 0000000..e1975c3 --- /dev/null +++ b/src/routers.py @@ -0,0 +1,7 @@ +from fastapi import APIRouter +from src.users.endpoints import router as users_router +from src.auth.endpoints import router as auth_router + +v1_router = APIRouter(prefix="/v1") +v1_router.include_router(auth_router) +v1_router.include_router(users_router) diff --git a/src/schemas.py b/src/schemas.py new file mode 100644 index 0000000..3c3590b --- /dev/null +++ b/src/schemas.py @@ -0,0 +1,24 @@ +from dataclasses import dataclass +from typing import Generic, TypeVar + +from pydantic import BaseModel as _BaseModel + +T = TypeVar("T") + + +class BaseSchema(_BaseModel): + """Extend Pydantic's BaseModel to enable ORM mode""" + + model_config = {"from_attributes": True} + + +@dataclass +class OffsetPagination(Generic[T]): + """Container for data returned using limit/offset pagination.""" + + __slots__ = ("items", "limit", "offset", "total") + + items: list[T] + limit: int + offset: int + total: int diff --git a/src/users/__init__.py b/src/users/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/users/dependencies.py b/src/users/dependencies.py new file mode 100644 index 0000000..16554f0 --- /dev/null +++ b/src/users/dependencies.py @@ -0,0 +1,18 @@ +from typing import Annotated + +from fastapi import Depends +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from src.dependencies import provide_db_session +from src.users.models import User +from src.users.repositories import UserRepository + + +async def provide_user_repo( + db_session: Annotated[AsyncSession, Depends(provide_db_session)], +) -> UserRepository: + return UserRepository( + session=db_session, + statement=select(User).where(lambda: User.deleted_at.is_(None)), + ) diff --git a/src/users/endpoints.py b/src/users/endpoints.py new file mode 100644 index 0000000..a819130 --- /dev/null +++ b/src/users/endpoints.py @@ -0,0 +1,116 @@ +from typing import Annotated + +from advanced_alchemy.exceptions import NotFoundError +from advanced_alchemy.filters import LimitOffset +from fastapi import APIRouter, Depends, HTTPException, Security +from pydantic import TypeAdapter + +from src.auth.tokens import get_current_user, get_password_hash +from src.users.dependencies import provide_user_repo +from src.schemas import OffsetPagination +from src.users.models import User +from src.users.permissions import UserPermission +from src.users.repositories import UserRepository +from src.users.schemas import UserSchema, UserCreateSchema, UserUpdateSchema +from src.utils import provide_limit_offset_pagination + +router = APIRouter(prefix="/users", tags=["users"]) + + +@router.get("", status_code=200, response_model=OffsetPagination[UserSchema]) +async def fetch( + repository: Annotated[UserRepository, Depends(provide_user_repo)], + limit_offset: Annotated[LimitOffset, Depends(provide_limit_offset_pagination)], + current_user: Annotated[ + User, + Security( + get_current_user, + scopes=[UserPermission.FETCH_USER.value], + ), + ], +) -> OffsetPagination[UserSchema]: + results, total = await repository.list_and_count(limit_offset) + type_adapter = TypeAdapter(list[UserSchema]) + return OffsetPagination[UserSchema]( + items=type_adapter.validate_python(results), + total=total, + limit=limit_offset.limit, + offset=limit_offset.offset, + ) + + +@router.post("", status_code=201, response_model=UserSchema) +async def create( + repository: Annotated[UserRepository, Depends(provide_user_repo)], + data: UserCreateSchema, + current_user: Annotated[ + User, + Security( + get_current_user, + scopes=[UserPermission.CREATE_USER.value], + ), + ], +) -> UserSchema: + user_data = User( + username=data.username, + name=data.name, + hashed_password=await get_password_hash(data.password), + ) + obj = await repository.add(user_data) + await repository.session.commit() + return UserSchema.model_validate(obj) + + +@router.get("/{user_id}", status_code=200, response_model=UserSchema) +async def get( + user_id: int, + repository: Annotated[UserRepository, Depends(provide_user_repo)], + current_user: Annotated[ + User, + Security( + get_current_user, + scopes=[UserPermission.READ_USER.value], + ), + ], +) -> UserSchema: + try: + obj = await repository.get(user_id) + except NotFoundError as exc: + raise HTTPException(status_code=404, detail=str(exc)) + return UserSchema.model_validate(obj) + + +@router.put("/{user_id}", status_code=200, response_model=UserSchema) +async def update( + user_id: int, + repository: Annotated[UserRepository, Depends(provide_user_repo)], + data: UserUpdateSchema, + current_user: Annotated[ + User, + Security( + get_current_user, + scopes=[UserPermission.UPDATE_USER.value], + ), + ], +) -> UserSchema: + raw_obj = data.model_dump(exclude_unset=True, exclude_none=True) + raw_obj.update({"id": user_id}) + obj = await repository.update(User(**raw_obj)) + await repository.session.commit() + return UserSchema.model_validate(obj) + + +@router.delete("/{user_id}", status_code=204) +async def delete( + user_id: int, + repository: Annotated[UserRepository, Depends(provide_user_repo)], + current_user: Annotated[ + User, + Security( + get_current_user, + scopes=[UserPermission.DELETE_USER.value], + ), + ], +) -> None: + _ = await repository.delete(user_id) + await repository.session.commit() diff --git a/src/users/models.py b/src/users/models.py new file mode 100644 index 0000000..6fb5704 --- /dev/null +++ b/src/users/models.py @@ -0,0 +1,43 @@ +from sqlalchemy import String +from sqlalchemy.ext.associationproxy import AssociationProxy, association_proxy +from sqlalchemy.ext.hybrid import hybrid_property +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from src.auth.models import user_role_table, user_permission_table +from src.database.base_class import Base + + +class User(Base): + __tablename__ = "users" + + username: Mapped[str] = mapped_column(String(50), unique=True, nullable=False) + name: Mapped[str] = mapped_column(String(50), nullable=False) + hashed_password: Mapped[str] = mapped_column(String(250), nullable=False) + + # Many-to-many relationships + roles = relationship( + "Role", + secondary=user_role_table, + back_populates="users", + viewonly=True, + lazy="selectin", + ) + permissions = relationship( + "Permission", + secondary=user_permission_table, + back_populates="users", + viewonly=True, + lazy="selectin", + ) + + roles_permissions: AssociationProxy[list[str]] = association_proxy( + "roles", "permission_values" + ) + permissions_perms: AssociationProxy[list[str]] = association_proxy( + "permissions", "name" + ) + + @hybrid_property + def total_permissions(self) -> list[str]: + role_permissions = [perm for role in self.roles_permissions for perm in role] + return role_permissions + self.permissions_perms diff --git a/src/users/permissions.py b/src/users/permissions.py new file mode 100644 index 0000000..b878914 --- /dev/null +++ b/src/users/permissions.py @@ -0,0 +1,19 @@ +from enum import Enum + + +class UserPermission(Enum): + CREATE_USER = "create:user" + READ_USER = "read:user" + UPDATE_USER = "update:user" + DELETE_USER = "delete:user" + FETCH_USER = "fetch:user" + + @classmethod + def permissions(cls) -> list[str]: + return [ + cls.CREATE_USER.value, + cls.READ_USER.value, + cls.UPDATE_USER.value, + cls.DELETE_USER.value, + cls.FETCH_USER.value, + ] diff --git a/src/users/repositories.py b/src/users/repositories.py new file mode 100644 index 0000000..1f7065d --- /dev/null +++ b/src/users/repositories.py @@ -0,0 +1,7 @@ +from advanced_alchemy import SQLAlchemyAsyncRepository + +from src.users.models import User + + +class UserRepository(SQLAlchemyAsyncRepository[User]): + model_type = User diff --git a/src/users/schemas.py b/src/users/schemas.py new file mode 100644 index 0000000..22c9ac5 --- /dev/null +++ b/src/users/schemas.py @@ -0,0 +1,20 @@ +from pydantic import PositiveInt + +from src.schemas import BaseSchema + + +class BaseUserSchema(BaseSchema): + username: str + name: str + + +class UserSchema(BaseUserSchema): + id: PositiveInt + + +class UserCreateSchema(BaseUserSchema): + password: str + confirm_password: str + + +class UserUpdateSchema(UserSchema): ... diff --git a/src/utils.py b/src/utils.py new file mode 100644 index 0000000..fbc66d4 --- /dev/null +++ b/src/utils.py @@ -0,0 +1,19 @@ +from advanced_alchemy.filters import LimitOffset + + +def provide_limit_offset_pagination( + current_page: int = 1, + page_size: int = 10, +) -> LimitOffset: + """Add offset/limit pagination. + + Return type consumed by `Repository.apply_limit_offset_pagination()`. + + Parameters + ---------- + current_page : int + LIMIT to apply to select. + page_size : int + OFFSET to apply to select. + """ + return LimitOffset(page_size, page_size * (current_page - 1))