From cedd67887ed24200fb130851e5bffbedb242eebf Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Sat, 8 Nov 2025 11:57:37 -0300
Subject: [PATCH 01/19] Uploading first draft of the new README.md. File:
NEW_README.md
---
new_readme.md | 138 ++++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 138 insertions(+)
create mode 100644 new_readme.md
diff --git a/new_readme.md b/new_readme.md
new file mode 100644
index 0000000..86bb54f
--- /dev/null
+++ b/new_readme.md
@@ -0,0 +1,138 @@
+# Benav Labs FastAPI Boilerplate
+
+> **Batteries-included FastAPI starter** with Pydantic v2, SQLAlchemy 2.0, PostgreSQL, Redis, ARQ jobs, rate-limiting and a minimal admin. Production-ready defaults, optional modules, and clear docs.
+
+
+ FastAPI
+ Pydantic v2
+ SQLAlchemy 2.0
+ PostgreSQL
+ Redis
+ ARQ
+
+
+**Docs:**
+
+* ๐ [https://benavlabs.github.io/FastAPI-boilerplate/](https://benavlabs.github.io/FastAPI-boilerplate/)
+* ๐ง DeepWiki: [https://deepwiki.com/benavlabs/FastAPI-boilerplate](https://deepwiki.com/benavlabs/FastAPI-boilerplate)
+* ๐ฌ Discord: [https://discord.com/invite/TEmPs22gqB](https://discord.com/invite/TEmPs22gqB)
+
+---
+
+## TL;DR - Quickstart
+
+Use the template on GitHub, create your repo, then:
+
+```bash
+# Clone your new repository
+git clone https://github.com//
+cd
+
+# NOTE (added by me):
+# Running locally with Uvicorn.
+# The .env and docker-compose.yml files were taken from this Gist:
+# https://gist.github.com/igorbenav/48ad745120c3f77817e094f3a609111a
+# I kept the local Dockerfile since it uses 'uv' instead of Poetry
+# (the Gist version relies on Poetry).
+
+# TODO: Decide where to put the example file, since it is currently
+# being copied from the Gist.
+
+# Copy and create your environment file
+cp src/.env.example src/.env
+# Fill in the minimal environment variables as described in the docs
+
+# Run everything using Docker
+docker compose up
+
+# Open the API documentation
+open http://127.0.0.1:8000/docs
+```
+
+> Full setup (from-scratch, .env examples, PostgreSQL & Redis, gunicorn, nginx) lives in the docs.
+
+---
+
+## Features
+
+* โก๏ธ Fully async FastAPI + SQLAlchemy 2.0
+* ๐งฑ Pydantic v2 models & validation
+* ๐ JWT auth (access + refresh), cookies for refresh
+* ๐ฎ Rate limiter + tiers (free/pro/etc.)
+* ๐งฐ FastCRUD for efficient CRUD & pagination
+* ๐งโ๐ผ **CRUDAdmin**: minimal admin panel (optional)
+* ๐ฆ ARQ background jobs (Redis)
+* ๐ง Redis caching (server + client-side headers)
+* ๐ณ One-command Docker Compose
+* ๐ NGINX & Gunicorn recipes for prod
+
+---
+
+## When to use it
+
+* You want a pragmatic starter with auth, CRUD, jobs, caching and rate-limits.
+* You value **sensible defaults** with the freedom to opt-out of modules.
+* You prefer **docs over boilerplate** in README - depth lives in the site.
+
+Not a fit if you need a monorepo microservices scaffold - see the docs for pointers.
+
+---
+
+## What's inside (high-level)
+
+* **App**: FastAPI app factory, env-aware docs exposure
+* **Auth**: JWT access/refresh, logout via token blacklist
+* **DB**: Postgres + SQLAlchemy 2.0, Alembic migrations
+* **CRUD**: FastCRUD generics (get, get_multi, create, update, delete, joins)
+* **Caching**: decorator-based endpoints cache; client cache headers
+* **Queues**: ARQ worker (async jobs), Redis connection helpers
+* **Rate limits**: per-tier + per-path rules
+* **Admin**: CRUDAdmin views for common models (optional)
+
+> The full tree and deep dives are in **Project Structure**, **Database**, **CRUD Operations**, **API**, **Caching**, **Background Tasks**, **Rate Limiting**, and **Production** sections of the docs.
+
+---
+
+## Configuration (minimal)
+
+Create `src/.env` and set **app**, **database**, **JWT**, and **environment** settings. See the docs for a copy-pasteable example and production guidance.
+
+* `ENVIRONMENT=local|staging|production` controls API docs exposure
+* Set `ADMIN_*` to enable the first admin user
+
+---
+
+## Common tasks
+
+```bash
+# run locally with reload (without Docker)
+uv sync && uv run uvicorn src.app.main:app --reload
+
+# run Alembic migrations
+cd src && uv run alembic revision --autogenerate && uv run alembic upgrade head
+
+# enqueue a background job (example endpoint)
+curl -X POST 'http://127.0.0.1:8000/api/v1/tasks/task?message=hello'
+```
+
+More examples (superuser creation, tiers, rate limits, admin usage) - **docs**.
+
+---
+
+## Contributing
+
+Issues and PRs are welcome. Please read **CONTRIBUTING.md** and follow the style of existing modules (type hints, async/await, explicit None checks, and paginated responses).
+
+---
+
+## License
+
+MIT - see `LICENSE.md`.
+
+---
+
+
+
+
+
+
From ec4f2dd6148cb78dc7e77fc98f03f67c20e6414b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Mon, 10 Nov 2025 17:29:55 -0300
Subject: [PATCH 02/19] Rebasing.
---
README.md | 2267 ++-----------------------------------------------
new_readme.md | 138 ---
2 files changed, 90 insertions(+), 2315 deletions(-)
delete mode 100644 new_readme.md
diff --git a/README.md b/README.md
index a7e74ed..86bb54f 100644
--- a/README.md
+++ b/README.md
@@ -1,2225 +1,138 @@
- Benav Labs FastAPI boilerplate
-
- Yet another template to speed your FastAPI development up.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
----
-
-## ๐ Documentation
-
-๐ **[Visit our comprehensive documentation at benavlabs.github.io/FastAPI-boilerplate](https://benavlabs.github.io/FastAPI-boilerplate/)**
-
-๐ง **DeepWiki Docs: [deepwiki.com/benavlabs/FastAPI-boilerplate](https://deepwiki.com/benavlabs/FastAPI-boilerplate)**
-
-> **โ ๏ธ Documentation Status**
->
-> This is our first version of the documentation. While functional, we acknowledge it's rough around the edges - there's a huge amount to document and we needed to start somewhere! We built this foundation (with a lot of AI assistance) so we can improve upon it.
->
-> Better documentation, examples, and guides are actively being developed. Contributions and feedback are greatly appreciated!
-
-This README provides a quick reference for LLMs and developers, but the full documentation contains detailed guides, examples, and best practices.
-
----
-
-## 0. About
-
-**FastAPI boilerplate** creates an extendable async API using FastAPI, Pydantic V2, SQLAlchemy 2.0 and PostgreSQL:
-
-- [`FastAPI`](https://fastapi.tiangolo.com): modern Python web framework for building APIs
-- [`Pydantic V2`](https://docs.pydantic.dev/2.4/): the most widely used data Python validation library, rewritten in Rust [`(5x-50x faster)`](https://docs.pydantic.dev/latest/blog/pydantic-v2-alpha/)
-- [`SQLAlchemy 2.0`](https://docs.sqlalchemy.org/en/20/changelog/whatsnew_20.html): Python SQL toolkit and Object Relational Mapper
-- [`PostgreSQL`](https://www.postgresql.org): The World's Most Advanced Open Source Relational Database
-- [`Redis`](https://redis.io): Open source, in-memory data store used by millions as a cache, message broker and more.
-- [`ARQ`](https://arq-docs.helpmanual.io) Job queues and RPC in python with asyncio and redis.
-- [`Docker Compose`](https://docs.docker.com/compose/) With a single command, create and start all the services from your configuration.
-- [`NGINX`](https://nginx.org/en/) High-performance low resource consumption web server used for Reverse Proxy and Load Balancing.
-
-
-
-
-
-
-
-## ๐ Join Our Community
-
-๐ฌ **[Join our Discord community](https://discord.com/invite/TEmPs22gqB)** - Connect with other developers using the FastAPI boilerplate!
-
-Our Discord server features:
-- **๐ค Networking** - Connect with fellow developers and share experiences
-- **๐ก Product Updates** - Stay updated with FastroAI and our other products
-- **๐ธ Showcase** - Share what you've built using our tools
-- **๐๏ธ Blog** - Latest blog posts and technical insights
-- **๐ฌ General Discussion** - Open space for questions and discussions
-- **๐ค Community Voice** - Join live talks and community events
-
-Whether you're just getting started or building production applications, our community is here to help you succeed!
-
-## 1. Features
-
-- โก๏ธ Fully async
-- ๐ Pydantic V2 and SQLAlchemy 2.0
-- ๐ User authentication with JWT
-- ๐ช Cookie based refresh token
-- ๐ฌ Easy redis caching
-- ๐ Easy client-side caching
-- ๐ฆ ARQ integration for task queue
-- โ๏ธ Efficient and robust queries with fastcrud
-- โ Out of the box offset and cursor pagination support with fastcrud
-- ๐ Rate Limiter dependency
-- ๐ฎ FastAPI docs behind authentication and hidden based on the environment
-- ๐ง Modern and light admin interface powered by [CRUDAdmin](https://github.com/benavlabs/crudadmin)
-- ๐ Easy running with docker compose
-- โ๏ธ NGINX Reverse Proxy and Load Balancing
-
-## 2. Contents
-
-0. [About](#0-about)
-1. [Features](#1-features)
-1. [Contents](#2-contents)
-1. [Prerequisites](#3-prerequisites)
- 1. [Environment Variables (.env)](#31-environment-variables-env)
- 1. [Docker Compose](#32-docker-compose-preferred)
- 1. [From Scratch](#33-from-scratch)
-1. [Usage](#4-usage)
- 1. [Docker Compose](#41-docker-compose)
- 1. [From Scratch](#42-from-scratch)
- 1. [Packages](#421-packages)
- 1. [Running PostgreSQL With Docker](#422-running-postgresql-with-docker)
- 1. [Running Redis with Docker](#423-running-redis-with-docker)
- 1. [Running the API](#424-running-the-api)
- 1. [Creating the first superuser](#43-creating-the-first-superuser)
- 1. [Database Migrations](#44-database-migrations)
-1. [Extending](#5-extending)
- 1. [Project Structure](#51-project-structure)
- 1. [Database Model](#52-database-model)
- 1. [SQLAlchemy Models](#53-sqlalchemy-models)
- 1. [Pydantic Schemas](#54-pydantic-schemas)
- 1. [Alembic Migrations](#55-alembic-migrations)
- 1. [CRUD](#56-crud)
- 1. [Routes](#57-routes)
- 1. [Paginated Responses](#571-paginated-responses)
- 1. [HTTP Exceptions](#572-http-exceptions)
- 1. [Caching](#58-caching)
- 1. [More Advanced Caching](#59-more-advanced-caching)
- 1. [ARQ Job Queues](#510-arq-job-queues)
- 1. [Rate Limiting](#511-rate-limiting)
- 1. [JWT Authentication](#512-jwt-authentication)
- 1. [Admin Panel](#513-admin-panel)
- 1. [Running](#514-running)
- 1. [Create Application](#515-create-application)
- 2. [Opting Out of Services](#516-opting-out-of-services)
-1. [Running in Production](#6-running-in-production)
- 1. [Uvicorn Workers with Gunicorn](#61-uvicorn-workers-with-gunicorn)
- 1. [Running With NGINX](#62-running-with-nginx)
- 1. [One Server](#621-one-server)
- 1. [Multiple Servers](#622-multiple-servers)
-1. [Testing](#7-testing)
-1. [Contributing](#8-contributing)
-1. [References](#9-references)
-1. [License](#10-license)
-1. [Contact](#11-contact)
-
-______________________________________________________________________
-
-## 3. Prerequisites
-
-> ๐ **[See detailed installation guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/installation/)**
-
-### 3.0 Start
-
-Start by using the template, and naming the repository to what you want.
-
-
-
-
-
-Then clone your created repository (I'm using the base for the example)
-
-```sh
-git clone https://github.com/igormagalhaesr/FastAPI-boilerplate
-```
-
-> \[!TIP\]
-> If you are in a hurry, you may use one of the following templates (containing a `.env`, `docker-compose.yml` and `Dockerfile`):
-
-- [Running locally with uvicorn](https://gist.github.com/igorbenav/48ad745120c3f77817e094f3a609111a)
-- [Runing in staging with gunicorn managing uvicorn workers](https://gist.github.com/igorbenav/d0518d4f6bdfb426d4036090f74905ee)
-- [Running in production with NGINX](https://gist.github.com/igorbenav/232c3b73339d6ca74e2bf179a5ef48a1)
-
-> \[!WARNING\]
-> Do not forget to place `docker-compose.yml` and `Dockerfile` in the `root` folder, while `.env` should be in the `src` folder.
-
-### 3.1 Environment Variables (.env)
-
-> ๐ **[See complete configuration guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/configuration/)**
-
-Then create a `.env` file inside `src` directory:
-
-```sh
-touch .env
-```
-
-Inside of `.env`, create the following app settings variables:
-
-```
-# ------------- app settings -------------
-APP_NAME="Your app name here"
-APP_DESCRIPTION="Your app description here"
-APP_VERSION="0.1"
-CONTACT_NAME="Your name"
-CONTACT_EMAIL="Your email"
-LICENSE_NAME="The license you picked"
-```
-
-For the database ([`if you don't have a database yet, click here`](#422-running-postgresql-with-docker)), create:
-
-```
-# ------------- database -------------
-POSTGRES_USER="your_postgres_user"
-POSTGRES_PASSWORD="your_password"
-POSTGRES_SERVER="your_server" # default "localhost", if using docker compose you should use "db"
-POSTGRES_PORT=5432 # default "5432", if using docker compose you should use "5432"
-POSTGRES_DB="your_db"
-```
-
-For database administration using PGAdmin create the following variables in the .env file
-
-```
-# ------------- pgadmin -------------
-PGADMIN_DEFAULT_EMAIL="your_email_address"
-PGADMIN_DEFAULT_PASSWORD="your_password"
-PGADMIN_LISTEN_PORT=80
-```
-
-To connect to the database, log into the PGAdmin console with the values specified in `PGADMIN_DEFAULT_EMAIL` and `PGADMIN_DEFAULT_PASSWORD`.
-
-Once in the main PGAdmin screen, click Add Server:
-
-
-
-1. Hostname/address is `db` (if using containers)
-1. Is the value you specified in `POSTGRES_PORT`
-1. Leave this value as `postgres`
-1. is the value you specified in `POSTGRES_USER`
-1. Is the value you specified in `POSTGRES_PASSWORD`
-
-For crypt:
-Start by running
-
-```sh
-openssl rand -hex 32
-```
-
-And then create in `.env`:
-
-```
-# ------------- crypt -------------
-SECRET_KEY= # result of openssl rand -hex 32
-ALGORITHM= # pick an algorithm, default HS256
-ACCESS_TOKEN_EXPIRE_MINUTES= # minutes until token expires, default 30
-REFRESH_TOKEN_EXPIRE_DAYS= # days until token expires, default 7
-```
-
-Then for the first admin user:
-
-```
-# ------------- admin -------------
-ADMIN_NAME="your_name"
-ADMIN_EMAIL="your_email"
-ADMIN_USERNAME="your_username"
-ADMIN_PASSWORD="your_password"
-```
-
-For the CRUDAdmin panel:
-
-```
-# ------------- crud admin -------------
-CRUD_ADMIN_ENABLED=true # default=true, set to false to disable admin panel
-CRUD_ADMIN_MOUNT_PATH="/admin" # default="/admin", path where admin panel will be mounted
-
-# ------------- crud admin security -------------
-CRUD_ADMIN_MAX_SESSIONS=10 # default=10, maximum concurrent sessions per user
-CRUD_ADMIN_SESSION_TIMEOUT=1440 # default=1440 (24 hours), session timeout in minutes
-SESSION_SECURE_COOKIES=true # default=true, use secure cookies
-
-# ------------- crud admin tracking -------------
-CRUD_ADMIN_TRACK_EVENTS=true # default=true, track admin events
-CRUD_ADMIN_TRACK_SESSIONS=true # default=true, track admin sessions in database
-
-# ------------- crud admin redis (optional for production) -------------
-CRUD_ADMIN_REDIS_ENABLED=false # default=false, use Redis for session storage
-CRUD_ADMIN_REDIS_HOST="localhost" # default="localhost", Redis host for admin sessions
-CRUD_ADMIN_REDIS_PORT=6379 # default=6379, Redis port for admin sessions
-CRUD_ADMIN_REDIS_DB=0 # default=0, Redis database for admin sessions
-CRUD_ADMIN_REDIS_PASSWORD="" # optional, Redis password for admin sessions
-CRUD_ADMIN_REDIS_SSL=false # default=false, use SSL for Redis connection
-```
-
-**Session Backend Options:**
-- **Memory** (default): Development-friendly, sessions reset on restart
-- **Redis** (production): High performance, scalable, persistent sessions
-- **Database**: Audit-friendly with admin visibility
-- **Hybrid**: Redis performance + database audit trail
-
-For redis caching:
-
-```
-# ------------- redis cache-------------
-REDIS_CACHE_HOST="your_host" # default "localhost", if using docker compose you should use "redis"
-REDIS_CACHE_PORT=6379 # default "6379", if using docker compose you should use "6379"
-```
-
-And for client-side caching:
-
-```
-# ------------- redis client-side cache -------------
-CLIENT_CACHE_MAX_AGE=30 # default "30"
-```
-
-For ARQ Job Queues:
-
-```
-# ------------- redis queue -------------
-REDIS_QUEUE_HOST="your_host" # default "localhost", if using docker compose you should use "redis"
-REDIS_QUEUE_PORT=6379 # default "6379", if using docker compose you should use "6379"
-```
-
-> \[!WARNING\]
-> You may use the same redis for both caching and queue while developing, but the recommendation is using two separate containers for production.
-
-To create the first tier:
-
-```
-# ------------- first tier -------------
-TIER_NAME="free"
-```
-
-For the rate limiter:
-
-```
-# ------------- redis rate limit -------------
-REDIS_RATE_LIMIT_HOST="localhost" # default="localhost", if using docker compose you should use "redis"
-REDIS_RATE_LIMIT_PORT=6379 # default=6379, if using docker compose you should use "6379"
-
-
-# ------------- default rate limit settings -------------
-DEFAULT_RATE_LIMIT_LIMIT=10 # default=10
-DEFAULT_RATE_LIMIT_PERIOD=3600 # default=3600
-```
-
-And Finally the environment:
-
-```
-# ------------- environment -------------
-ENVIRONMENT="local"
-```
-
-`ENVIRONMENT` can be one of `local`, `staging` and `production`, defaults to local, and changes the behavior of api `docs` endpoints:
-
-- **local:** `/docs`, `/redoc` and `/openapi.json` available
-- **staging:** `/docs`, `/redoc` and `/openapi.json` available for superusers
-- **production:** `/docs`, `/redoc` and `/openapi.json` not available
-
-### 3.2 Docker Compose (preferred)
-
-To do it using docker compose, ensure you have docker and docker compose installed, then:
-While in the base project directory (FastAPI-boilerplate here), run:
-
-```sh
-docker compose up
-```
-
-You should have a `web` container, `postgres` container, a `worker` container and a `redis` container running.
-Then head to `http://127.0.0.1:8000/docs`.
-
-### 3.3 From Scratch
-
-Install uv:
-
-```sh
-pip install uv
-```
-
-## 4. Usage
-
-> ๐ **[See complete first run guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/first-run/)**
-
-### 4.1 Docker Compose
-
-If you used docker compose, your setup is done. You just need to ensure that when you run (while in the base folder):
-
-```sh
-docker compose up
-```
-
-You get the following outputs (in addition to many other outputs):
-
-```sh
-fastapi-boilerplate-worker-1 | ... redis_version=x.x.x mem_usage=999K clients_connected=1 db_keys=0
-...
-fastapi-boilerplate-db-1 | ... [1] LOG: database system is ready to accept connections
-...
-fastapi-boilerplate-web-1 | INFO: Application startup complete.
-```
-
-So you may skip to [5. Extending](#5-extending).
-
-### 4.2 From Scratch
-
-#### 4.2.1. Packages
-
-In the `root` directory (`FastAPI-boilerplate` if you didn't change anything), run to install required packages:
-
-```sh
-uv sync
-```
-
-Ensuring it ran without any problem.
-
-#### 4.2.2. Running PostgreSQL With Docker
-
-> \[!NOTE\]
-> If you already have a PostgreSQL running, you may skip this step.
-
-Install docker if you don't have it yet, then run:
-
-```sh
-docker pull postgres
-```
-
-And pick the port, name, user and password, replacing the fields:
-
-```sh
-docker run -d \
- -p {PORT}:{PORT} \
- --name {NAME} \
- -e POSTGRES_PASSWORD={PASSWORD} \
- -e POSTGRES_USER={USER} \
- postgres
-```
-
-Such as:
-
-```sh
-docker run -d \
- -p 5432:5432 \
- --name postgres \
- -e POSTGRES_PASSWORD=1234 \
- -e POSTGRES_USER=postgres \
- postgres
-```
-
-#### 4.2.3. Running redis With Docker
-
-> \[!NOTE\]
-> If you already have a redis running, you may skip this step.
-
-Install docker if you don't have it yet, then run:
-
-```sh
-docker pull redis:alpine
-```
-
-And pick the name and port, replacing the fields:
-
-```sh
-docker run -d \
- --name {NAME} \
- -p {PORT}:{PORT} \
-redis:alpine
-```
-
-Such as
-
-```sh
-docker run -d \
- --name redis \
- -p 6379:6379 \
-redis:alpine
-```
-
-#### 4.2.4. Running the API
-
-While in the `root` folder, run to start the application with uvicorn server:
-
-```sh
-uv run uvicorn src.app.main:app --reload
-```
-
-> \[!TIP\]
-> The --reload flag enables auto-reload once you change (and save) something in the project
-
-### 4.3 Creating the first superuser
-
-#### 4.3.1 Docker Compose
-
-> \[!WARNING\]
-> Make sure DB and tables are created before running create_superuser (db should be running and the api should run at least once before)
-
-If you are using docker compose, you should uncomment this part of the docker-compose.yml:
-
-```
- #-------- uncomment to create first superuser --------
- # create_superuser:
- # build:
- # context: .
- # dockerfile: Dockerfile
- # env_file:
- # - ./src/.env
- # depends_on:
- # - db
- # command: python -m src.scripts.create_first_superuser
- # volumes:
- # - ./src:/code/src
-```
-
-Getting:
-
-```
- #-------- uncomment to create first superuser --------
- create_superuser:
- build:
- context: .
- dockerfile: Dockerfile
- env_file:
- - ./src/.env
- depends_on:
- - db
- command: python -m src.scripts.create_first_superuser
- volumes:
- - ./src:/code/src
-```
-
-While in the base project folder run to start the services:
-
-```sh
-docker-compose up -d
-```
-
-It will automatically run the create_superuser script as well, but if you want to rerun eventually:
-
-```sh
-docker-compose run --rm create_superuser
-```
-
-to stop the create_superuser service:
-
-```sh
-docker-compose stop create_superuser
-```
-
-#### 4.3.2 From Scratch
-
-While in the `root` folder, run (after you started the application at least once to create the tables):
-
-```sh
-uv run python -m src.scripts.create_first_superuser
-```
-
-### 4.3.3 Creating the first tier
-
-> \[!WARNING\]
-> Make sure DB and tables are created before running create_tier (db should be running and the api should run at least once before)
-
-To create the first tier it's similar, you just replace `create_superuser` for `create_tier` service or `create_first_superuser` to `create_first_tier` for scripts. If using `docker compose`, do not forget to uncomment the `create_tier` service in `docker-compose.yml`.
-
-### 4.4 Database Migrations
-
-> \[!WARNING\]
-> To create the tables if you did not create the endpoints, ensure that you import the models in src/app/models/__init__.py. This step is crucial to create the new tables.
-
-If you are using the db in docker, you need to change this in `docker-compose.yml` to run migrations:
-
-```sh
- db:
- image: postgres:13
- env_file:
- - ./src/.env
- volumes:
- - postgres-data:/var/lib/postgresql/data
- # -------- replace with comment to run migrations with docker --------
- expose:
- - "5432"
- # ports:
- # - 5432:5432
-```
-
-Getting:
-
-```sh
- db:
- ...
- # expose:
- # - "5432"
- ports:
- - 5432:5432
-```
-
-While in the `src` folder, run Alembic migrations:
-
-```sh
-uv run alembic revision --autogenerate
-```
-
-And to apply the migration
-
-```sh
-uv run alembic upgrade head
-```
-
-> [!NOTE]
-> If you do not have uv, you may run it without uv after running `pip install alembic`
-
-## 5. Extending
-
-> ๐ **[See comprehensive development guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/development/)**
-
-### 5.1 Project Structure
-
-> ๐ **[See detailed project structure guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/project-structure/)**
-
-First, you may want to take a look at the project structure and understand what each file is doing.
-
-```sh
-.
-โโโ Dockerfile # Dockerfile for building the application container.
-โโโ docker-compose.yml # Docker Compose file for defining multi-container applications.
-โโโ pyproject.toml # Project configuration file with metadata and dependencies (PEP 621).
-โโโ uv.lock # uv lock file specifying exact versions of dependencies.
-โโโ README.md # Project README providing information and instructions.
-โโโ LICENSE.md # License file for the project.
-โ
-โโโ tests # Unit tests for the application.
-โ โโโhelpers # Helper functions for tests.
-โ โ โโโ generators.py # Helper functions for generating test data.
-โ โ โโโ mocks.py # Mock functions for testing.
-โ โโโ __init__.py
-โ โโโ conftest.py # Configuration and fixtures for pytest.
-โ โโโ test_user_unit.py # Unit test cases for user-related functionality.
-โ
-โโโ src # Source code directory.
- โโโ __init__.py # Initialization file for the src package.
- โโโ alembic.ini # Configuration file for Alembic (database migration tool).
- โ
- โโโ app # Main application directory.
- โ โโโ __init__.py # Initialization file for the app package.
- โ โโโ main.py # Main entry point of the FastAPI application.
- โ โ
- โ โ
- โ โโโ api # Folder containing API-related logic.
- โ โ โโโ __init__.py
- โ โ โโโ dependencies.py # Defines dependencies for use across API endpoints.
- โ โ โ
- โ โ โโโ v1 # Version 1 of the API.
- โ โ โโโ __init__.py
- โ โ โโโ login.py # API route for user login.
- โ โ โโโ logout.py # API route for user logout.
- โ โ โโโ posts.py # API routes for post operations.
- โ โ โโโ rate_limits.py # API routes for rate limiting functionalities.
- โ โ โโโ tasks.py # API routes for task management.
- โ โ โโโ tiers.py # API routes for user tier functionalities.
- โ โ โโโ users.py # API routes for user management.
- โ โ
- โ โโโ core # Core utilities and configurations for the application.
- โ โ โโโ __init__.py
- โ โ โโโ config.py # Configuration settings for the application.
- โ โ โโโ logger.py # Configuration for application logging.
- โ โ โโโ schemas.py # Pydantic schemas for data validation.
- โ โ โโโ security.py # Security utilities, such as password hashing.
- โ โ โโโ setup.py # Setup file for the FastAPI app instance.
- โ โ โ
- โ โ โโโ db # Core Database related modules.
- โ โ โ โโโ __init__.py
- โ โ โ โโโ crud_token_blacklist.py # CRUD operations for token blacklist.
- โ โ โ โโโ database.py # Database connectivity and session management.
- โ โ โ โโโ models.py # Core Database models.
- โ โ โ โโโ token_blacklist.py # Model for token blacklist functionality.
- โ โ โ
- โ โ โโโ exceptions # Custom exception classes.
- โ โ โ โโโ __init__.py
- โ โ โ โโโ cache_exceptions.py # Exceptions related to cache operations.
- โ โ โ โโโ http_exceptions.py # HTTP-related exceptions.
- โ โ โ
- โ โ โโโ utils # Utility functions and helpers.
- โ โ โ โโโ __init__.py
- โ โ โ โโโ cache.py # Cache-related utilities.
- โ โ โ โโโ queue.py # Utilities for task queue management.
- โ โ โ โโโ rate_limit.py # Rate limiting utilities.
- โ โ โ
- โ โ โโโ worker # Worker script for background tasks.
- โ โ โโโ __init__.py
- โ โ โโโ settings.py # Worker configuration and settings.
- โ โ โโโ functions.py # Async task definitions and management.
- โ โ
- โ โโโ crud # CRUD operations for the application.
- โ โ โโโ __init__.py
- โ โ โโโ crud_base.py # Base class for CRUD operations.
- โ โ โโโ crud_posts.py # CRUD operations for posts.
- โ โ โโโ crud_rate_limit.py # CRUD operations for rate limiting.
- โ โ โโโ crud_tier.py # CRUD operations for user tiers.
- โ โ โโโ crud_users.py # CRUD operations for users.
- โ โ โโโ helper.py # Helper functions for CRUD operations.
- โ โ
- โ โโโ logs # Directory for log files.
- โ โ โโโ app.log # Log file for the application.
- โ โ
- โ โโโ middleware # Middleware components for the application.
- โ โ โโโ client_cache_middleware.py # Middleware for client-side caching.
- โ โ
- โ โโโ models # ORM models for the application.
- โ โ โโโ __init__.py
- โ โ โโโ post.py # ORM model for posts.
- โ โ โโโ rate_limit.py # ORM model for rate limiting.
- โ โ โโโ tier.py # ORM model for user tiers.
- โ โ โโโ user.py # ORM model for users.
- โ โ
- โ โโโ schemas # Pydantic schemas for data validation.
- โ โโโ __init__.py
- โ โโโ job.py # Schema for background jobs.
- โ โโโ post.py # Schema for post data.
- โ โโโ rate_limit.py # Schema for rate limiting data.
- โ โโโ tier.py # Schema for user tier data.
- โ โโโ user.py # Schema for user data.
- โ
- โโโ migrations # Alembic migration scripts for database changes.
- โ โโโ README
- โ โโโ env.py # Environment configuration for Alembic.
- โ โโโ script.py.mako # Template script for Alembic migrations.
- โ โ
- โ โโโ versions # Individual migration scripts.
- โ โโโ README.MD
- โ
- โโโ scripts # Utility scripts for the application.
- โโโ __init__.py
- โโโ create_first_superuser.py # Script to create the first superuser.
- โโโ create_first_tier.py # Script to create the first user tier.
-```
-
-### 5.2 Database Model
-
-Create the new entities and relationships and add them to the model
-
-
-#### 5.2.1 Token Blacklist
-
-Note that this table is used to blacklist the `JWT` tokens (it's how you log a user out)
-
-
-### 5.3 SQLAlchemy Models
-
-> ๐ **[See database models guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/database/models/)**
-
-Inside `app/models`, create a new `entity.py` for each new entity (replacing entity with the name) and define the attributes according to [SQLAlchemy 2.0 standards](https://docs.sqlalchemy.org/en/20/orm/mapping_styles.html#orm-mapping-styles):
-
-> \[!WARNING\]
-> Note that since it inherits from `Base`, the new model is mapped as a python `dataclass`, so optional attributes (arguments with a default value) should be defined after required attributes.
-
-```python
-from sqlalchemy import String, DateTime
-from sqlalchemy.orm import Mapped, mapped_column, relationship
-
-from app.core.db.database import Base
-
-
-class Entity(Base):
- __tablename__ = "entity"
-
- id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False)
- name: Mapped[str] = mapped_column(String(30))
- ...
-```
-
-### 5.4 Pydantic Schemas
-
-> ๐ **[See database schemas guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/database/schemas/)**
-
-Inside `app/schemas`, create a new `entity.py` for each new entity (replacing entity with the name) and create the schemas according to [Pydantic V2](https://docs.pydantic.dev/latest/#pydantic-examples) standards:
-
-```python
-from typing import Annotated
-
-from pydantic import BaseModel, EmailStr, Field, HttpUrl, ConfigDict
-
-
-class EntityBase(BaseModel):
- name: Annotated[
- str,
- Field(min_length=2, max_length=30, examples=["Entity Name"]),
- ]
-
-
-class Entity(EntityBase):
- ...
-
-
-class EntityRead(EntityBase):
- ...
-
-
-class EntityCreate(EntityBase):
- ...
-
-
-class EntityCreateInternal(EntityCreate):
- ...
-
-
-class EntityUpdate(BaseModel):
- ...
-
-
-class EntityUpdateInternal(BaseModel):
- ...
-
-
-class EntityDelete(BaseModel):
- model_config = ConfigDict(extra="forbid")
-
- is_deleted: bool
- deleted_at: datetime
-```
-
-### 5.5 Alembic Migrations
-
-> ๐ **[See database migrations guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/database/migrations/)**
-
-> \[!WARNING\]
-> To create the tables if you did not create the endpoints, ensure that you import the models in src/app/models/__init__.py. This step is crucial to create the new models.
-
-Then, while in the `src` folder, run Alembic migrations:
-
-```sh
-uv run alembic revision --autogenerate
-```
-
-And to apply the migration
-
-```sh
-uv run alembic upgrade head
-```
-
-### 5.6 CRUD
-
-> ๐ **[See CRUD operations guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/database/crud/)**
-
-Inside `app/crud`, create a new `crud_entity.py` inheriting from `FastCRUD` for each new entity:
-
-```python
-from fastcrud import FastCRUD
-
-from app.models.entity import Entity
-from app.schemas.entity import EntityCreateInternal, EntityUpdate, EntityUpdateInternal, EntityDelete
-
-CRUDEntity = FastCRUD[Entity, EntityCreateInternal, EntityUpdate, EntityUpdateInternal, EntityDelete]
-crud_entity = CRUDEntity(Entity)
-```
-
-So, for users:
-
-```python
-# crud_users.py
-from app.model.user import User
-from app.schemas.user import UserCreateInternal, UserUpdate, UserUpdateInternal, UserDelete
-
-CRUDUser = FastCRUD[User, UserCreateInternal, UserUpdate, UserUpdateInternal, UserDelete]
-crud_users = CRUDUser(User)
-```
-
-#### 5.6.1 Get
-
-When actually using the crud in an endpoint, to get data you just pass the database connection and the attributes as kwargs:
-
-```python
-# Here I'm getting the first user with email == user.email (email is unique in this case)
-user = await crud_users.get(db=db, email=user.email)
-```
-
-#### 5.6.2 Get Multi
-
-To get a list of objects with the attributes, you should use the get_multi:
-
-```python
-# Here I'm getting at most 10 users with the name 'User Userson' except for the first 3
-user = await crud_users.get_multi(db=db, offset=3, limit=100, name="User Userson")
-```
-
-> \[!WARNING\]
-> Note that get_multi returns a python `dict`.
-
-Which will return a python dict with the following structure:
-
-```javascript
-{
- "data": [
- {
- "id": 4,
- "name": "User Userson",
- "username": "userson4",
- "email": "user.userson4@example.com",
- "profile_image_url": "https://profileimageurl.com"
- },
- {
- "id": 5,
- "name": "User Userson",
- "username": "userson5",
- "email": "user.userson5@example.com",
- "profile_image_url": "https://profileimageurl.com"
- }
- ],
- "total_count": 2,
- "has_more": false,
- "page": 1,
- "items_per_page": 10
-}
-```
-
-#### 5.6.3 Create
-
-To create, you pass a `CreateSchemaType` object with the attributes, such as a `UserCreate` pydantic schema:
-
-```python
-from app.schemas.user import UserCreate
-
-# Creating the object
-user_internal = UserCreate(name="user", username="myusername", email="user@example.com")
-
-# Passing the object to be created
-crud_users.create(db=db, object=user_internal)
-```
-
-#### 5.6.4 Exists
-
-To just check if there is at least one row that matches a certain set of attributes, you should use `exists`
-
-```python
-# This queries only the email variable
-# It returns True if there's at least one or False if there is none
-crud_users.exists(db=db, email=user @ example.com)
-```
-
-#### 5.6.5 Count
-
-You can also get the count of a certain object with the specified filter:
-
-```python
-# Here I'm getting the count of users with the name 'User Userson'
-user = await crud_users.count(db=db, name="User Userson")
-```
-
-#### 5.6.6 Update
-
-To update you pass an `object` which may be a `pydantic schema` or just a regular `dict`, and the kwargs.
-You will update with `objects` the rows that match your `kwargs`.
-
-```python
-# Here I'm updating the user with username == "myusername".
-# #I'll change his name to "Updated Name"
-crud_users.update(db=db, object={"name": "Updated Name"}, username="myusername")
-```
-
-#### 5.6.7 Delete
-
-To delete we have two options:
-
-- db_delete: actually deletes the row from the database
-- delete:
- - adds `"is_deleted": True` and `deleted_at: datetime.now(UTC)` if the model inherits from `PersistentDeletion` (performs a soft delete), but keeps the object in the database.
- - actually deletes the row from the database if the model does not inherit from `PersistentDeletion`
-
-```python
-# Here I'll just change is_deleted to True
-crud_users.delete(db=db, username="myusername")
-
-# Here I actually delete it from the database
-crud_users.db_delete(db=db, username="myusername")
-```
-
-#### 5.6.8 Get Joined
-
-To retrieve data with a join operation, you can use the get_joined method from your CRUD module. Here's how to do it:
-
-```python
-# Fetch a single record with a join on another model (e.g., User and Tier).
-result = await crud_users.get_joined(
- db=db, # The SQLAlchemy async session.
- join_model=Tier, # The model to join with (e.g., Tier).
- schema_to_select=UserSchema, # Pydantic schema for selecting User model columns (optional).
- join_schema_to_select=TierSchema, # Pydantic schema for selecting Tier model columns (optional).
-)
-```
-
-**Relevant Parameters:**
-
-- `join_model`: The model you want to join with (e.g., Tier).
-- `join_prefix`: Optional prefix to be added to all columns of the joined model. If None, no prefix is added.
-- `join_on`: SQLAlchemy Join object for specifying the ON clause of the join. If None, the join condition is auto-detected based on foreign keys.
-- `schema_to_select`: A Pydantic schema to select specific columns from the primary model (e.g., UserSchema).
-- `join_schema_to_select`: A Pydantic schema to select specific columns from the joined model (e.g., TierSchema).
-- `join_type`: pecifies the type of join operation to perform. Can be "left" for a left outer join or "inner" for an inner join. Default "left".
-- `kwargs`: Filters to apply to the primary query.
-
-This method allows you to perform a join operation, selecting columns from both models, and retrieve a single record.
-
-#### 5.6.9 Get Multi Joined
-
-Similarly, to retrieve multiple records with a join operation, you can use the get_multi_joined method. Here's how:
-
-```python
-# Retrieve a list of objects with a join on another model (e.g., User and Tier).
-result = await crud_users.get_multi_joined(
- db=db, # The SQLAlchemy async session.
- join_model=Tier, # The model to join with (e.g., Tier).
- join_prefix="tier_", # Optional prefix for joined model columns.
- join_on=and_(User.tier_id == Tier.id, User.is_superuser == True), # Custom join condition.
- schema_to_select=UserSchema, # Pydantic schema for selecting User model columns.
- join_schema_to_select=TierSchema, # Pydantic schema for selecting Tier model columns.
- username="john_doe", # Additional filter parameters.
-)
-```
-
-**Relevant Parameters:**
-
-- `join_model`: The model you want to join with (e.g., Tier).
-- `join_prefix`: Optional prefix to be added to all columns of the joined model. If None, no prefix is added.
-- `join_on`: SQLAlchemy Join object for specifying the ON clause of the join. If None, the join condition is auto-detected based on foreign keys.
-- `schema_to_select`: A Pydantic schema to select specific columns from the primary model (e.g., UserSchema).
-- `join_schema_to_select`: A Pydantic schema to select specific columns from the joined model (e.g., TierSchema).
-- `join_type`: pecifies the type of join operation to perform. Can be "left" for a left outer join or "inner" for an inner join. Default "left".
-- `kwargs`: Filters to apply to the primary query.
-- `offset`: The offset (number of records to skip) for pagination. Default 0.
-- `limit`: The limit (maximum number of records to return) for pagination. Default 100.
-- `kwargs`: Filters to apply to the primary query.
-
-#### More Efficient Selecting
-
-For the `get` and `get_multi` methods we have the option to define a `schema_to_select` attribute, which is what actually makes the queries more efficient. When you pass a `pydantic schema` (preferred) or a list of the names of the attributes in `schema_to_select` to the `get` or `get_multi` methods, only the attributes in the schema will be selected.
-
-```python
-from app.schemas.user import UserRead
-
-# Here it's selecting all of the user's data
-crud_user.get(db=db, username="myusername")
-
-# Now it's only selecting the data that is in UserRead.
-# Since that's my response_model, it's all I need
-crud_user.get(db=db, username="myusername", schema_to_select=UserRead)
-```
-
-### 5.7 Routes
-
-> ๐ **[See API endpoints guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/api/endpoints/)**
-
-Inside `app/api/v1`, create a new `entities.py` file and create the desired routes with proper dependency injection:
-
-```python
-from typing import Annotated, List
-from fastapi import Depends, Request, APIRouter
-from sqlalchemy.ext.asyncio import AsyncSession
-
-from app.schemas.entity import EntityRead
-from app.core.db.database import async_get_db
-from app.crud.crud_entity import crud_entity
-
-router = APIRouter(tags=["entities"])
-
-
-@router.get("/entities/{id}", response_model=EntityRead)
-async def read_entity(
- request: Request,
- id: int,
- db: Annotated[AsyncSession, Depends(async_get_db)]
-):
- entity = await crud_entity.get(db=db, id=id)
-
- if entity is None: # Explicit None check
- raise NotFoundException("Entity not found")
-
- return entity
-
-
-@router.get("/entities", response_model=List[EntityRead])
-async def read_entities(
- request: Request,
- db: Annotated[AsyncSession, Depends(async_get_db)]
-):
- entities = await crud_entity.get_multi(db=db, is_deleted=False)
- return entities
-```
-
-Then in `app/api/v1/__init__.py` add the router:
-
-```python
-from fastapi import APIRouter
-from app.api.v1.entities import router as entity_router
-from app.api.v1.users import router as user_router
-from app.api.v1.posts import router as post_router
-
-router = APIRouter(prefix="/v1")
-
-router.include_router(user_router)
-router.include_router(post_router)
-router.include_router(entity_router) # Add your new router
-```
-
-#### 5.7.1 Paginated Responses
-
-> ๐ **[See API pagination guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/api/pagination/)**
-
-With the `get_multi` method we get a python `dict` with full suport for pagination:
-
-```javascript
-{
- "data": [
- {
- "id": 4,
- "name": "User Userson",
- "username": "userson4",
- "email": "user.userson4@example.com",
- "profile_image_url": "https://profileimageurl.com"
- },
- {
- "id": 5,
- "name": "User Userson",
- "username": "userson5",
- "email": "user.userson5@example.com",
- "profile_image_url": "https://profileimageurl.com"
- }
- ],
- "total_count": 2,
- "has_more": false,
- "page": 1,
- "items_per_page": 10
-}
-```
-
-And in the endpoint, we can import from `fastcrud` the following functions and Pydantic Schema:
-
-```python
-from typing import Annotated
-from fastapi import Depends, Request
-from sqlalchemy.ext.asyncio import AsyncSession
-from fastcrud import (
- PaginatedListResponse, # What you'll use as a response_model to validate
- paginated_response, # Creates a paginated response based on the parameters
- compute_offset, # Calculate the offset for pagination ((page - 1) * items_per_page)
-)
-```
-
-Then let's create the endpoint:
-
-```python
-import fastapi
-
-from app.schemas.entity import EntityRead
-
-...
-
-
-@router.get("/entities", response_model=PaginatedListResponse[EntityRead])
-async def read_entities(
- request: Request,
- db: Annotated[AsyncSession, Depends(async_get_db)],
- page: int = 1,
- items_per_page: int = 10
-):
- entities_data = await crud_entity.get_multi(
- db=db,
- offset=compute_offset(page, items_per_page),
- limit=items_per_page,
- schema_to_select=EntityRead,
- is_deleted=False,
- )
-
- return paginated_response(crud_data=entities_data, page=page, items_per_page=items_per_page)
-```
-
-#### 5.7.2 HTTP Exceptions
-
-> ๐ **[See API exceptions guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/api/exceptions/)**
-
-To add exceptions you may just import from `app/core/exceptions/http_exceptions` and optionally add a detail:
-
-```python
-from app.core.exceptions.http_exceptions import (
- NotFoundException,
- ForbiddenException,
- DuplicateValueException
-)
-
-@router.post("/entities", response_model=EntityRead, status_code=201)
-async def create_entity(
- request: Request,
- entity_data: EntityCreate,
- db: Annotated[AsyncSession, Depends(async_get_db)],
- current_user: Annotated[UserRead, Depends(get_current_user)]
-):
- # Check if entity already exists
- if await crud_entity.exists(db=db, name=entity_data.name) is True:
- raise DuplicateValueException("Entity with this name already exists")
-
- # Check user permissions
- if current_user.is_active is False: # Explicit boolean check
- raise ForbiddenException("User account is disabled")
-
- # Create the entity
- entity = await crud_entity.create(db=db, object=entity_data)
-
- if entity is None: # Explicit None check
- raise CustomException("Failed to create entity")
-
- return entity
-
-
-@router.get("/entities/{id}", response_model=EntityRead)
-async def read_entity(
- request: Request,
- id: int,
- db: Annotated[AsyncSession, Depends(async_get_db)]
-):
- entity = await crud_entity.get(db=db, id=id)
-
- if entity is None: # Explicit None check
- raise NotFoundException("Entity not found")
-
- return entity
-```
-
-**The predefined possibilities in http_exceptions are the following:**
-
-- `CustomException`: 500 internal error
-- `BadRequestException`: 400 bad request
-- `NotFoundException`: 404 not found
-- `ForbiddenException`: 403 forbidden
-- `UnauthorizedException`: 401 unauthorized
-- `UnprocessableEntityException`: 422 unprocessable entity
-- `DuplicateValueException`: 422 unprocessable entity
-- `RateLimitException`: 429 too many requests
-
-### 5.8 Caching
-
-> ๐ **[See comprehensive caching guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/caching/)**
-
-The `cache` decorator allows you to cache the results of FastAPI endpoint functions, enhancing response times and reducing the load on your application by storing and retrieving data in a cache.
-
-Caching the response of an endpoint is really simple, just apply the `cache` decorator to the endpoint function.
-
-> \[!WARNING\]
-> Note that you should always pass request as a variable to your endpoint function if you plan to use the cache decorator.
-
-```python
-...
-from app.core.utils.cache import cache
-
-
-@app.get("/sample/{my_id}")
-@cache(key_prefix="sample_data", expiration=3600, resource_id_name="my_id")
-async def sample_endpoint(request: Request, my_id: int):
- # Endpoint logic here
- return {"data": "my_data"}
-```
-
-The way it works is:
-
-- the data is saved in redis with the following cache key: `sample_data:{my_id}`
-- then the time to expire is set as 3600 seconds (that's the default)
-
-Another option is not passing the `resource_id_name`, but passing the `resource_id_type` (default int):
-
-```python
-...
-from app.core.utils.cache import cache
-
-
-@app.get("/sample/{my_id}")
-@cache(key_prefix="sample_data", resource_id_type=int)
-async def sample_endpoint(request: Request, my_id: int):
- # Endpoint logic here
- return {"data": "my_data"}
-```
-
-In this case, what will happen is:
-
-- the `resource_id` will be inferred from the keyword arguments (`my_id` in this case)
-- the data is saved in redis with the following cache key: `sample_data:{my_id}`
-- then the the time to expire is set as 3600 seconds (that's the default)
-
-Passing resource_id_name is usually preferred.
-
-### 5.9 More Advanced Caching
-
-The behaviour of the `cache` decorator changes based on the request method of your endpoint.
-It caches the result if you are passing it to a **GET** endpoint, and it invalidates the cache with this key_prefix and id if passed to other endpoints (**PATCH**, **DELETE**).
-
-#### Invalidating Extra Keys
-
-If you also want to invalidate cache with a different key, you can use the decorator with the `to_invalidate_extra` variable.
-
-In the following example, I want to invalidate the cache for a certain `user_id`, since I'm deleting it, but I also want to invalidate the cache for the list of users, so it will not be out of sync.
-
-```python
-# The cache here will be saved as "{username}_posts:{username}":
-@router.get("/{username}/posts", response_model=List[PostRead])
-@cache(key_prefix="{username}_posts", resource_id_name="username")
-async def read_posts(request: Request, username: str, db: Annotated[AsyncSession, Depends(async_get_db)]):
- ...
-
-
-...
-
-# Invalidating cache for the former endpoint by just passing the key_prefix and id as a dictionary:
-@router.delete("/{username}/post/{id}")
-@cache(
- "{username}_post_cache",
- resource_id_name="id",
- to_invalidate_extra={"{username}_posts": "{username}"}, # also invalidate "{username}_posts:{username}" cache
-)
-async def erase_post(
- request: Request,
- username: str,
- id: int,
- current_user: Annotated[UserRead, Depends(get_current_user)],
- db: Annotated[AsyncSession, Depends(async_get_db)],
-):
- ...
-
-
-# And now I'll also invalidate when I update the user:
-@router.patch("/{username}/post/{id}", response_model=PostRead)
-@cache("{username}_post_cache", resource_id_name="id", to_invalidate_extra={"{username}_posts": "{username}"})
-async def patch_post(
- request: Request,
- username: str,
- id: int,
- values: PostUpdate,
- current_user: Annotated[UserRead, Depends(get_current_user)],
- db: Annotated[AsyncSession, Depends(async_get_db)],
-):
- ...
-```
-
-> \[!WARNING\]
-> Note that adding `to_invalidate_extra` will not work for **GET** requests.
-
-#### Invalidate Extra By Pattern
-
-Let's assume we have an endpoint with a paginated response, such as:
-
-```python
-@router.get("/{username}/posts", response_model=PaginatedListResponse[PostRead])
-@cache(
- key_prefix="{username}_posts:page_{page}:items_per_page:{items_per_page}",
- resource_id_name="username",
- expiration=60,
-)
-async def read_posts(
- request: Request,
- username: str,
- db: Annotated[AsyncSession, Depends(async_get_db)],
- page: int = 1,
- items_per_page: int = 10,
-):
- db_user = await crud_users.get(db=db, schema_to_select=UserRead, username=username, is_deleted=False)
- if not db_user:
- raise HTTPException(status_code=404, detail="User not found")
-
- posts_data = await crud_posts.get_multi(
- db=db,
- offset=compute_offset(page, items_per_page),
- limit=items_per_page,
- schema_to_select=PostRead,
- created_by_user_id=db_user["id"],
- is_deleted=False,
- )
-
- return paginated_response(crud_data=posts_data, page=page, items_per_page=items_per_page)
-```
-
-Just passing `to_invalidate_extra` will not work to invalidate this cache, since the key will change based on the `page` and `items_per_page` values.
-To overcome this we may use the `pattern_to_invalidate_extra` parameter:
-
-```python
-@router.patch("/{username}/post/{id}")
-@cache("{username}_post_cache", resource_id_name="id", pattern_to_invalidate_extra=["{username}_posts:*"])
-async def patch_post(
- request: Request,
- username: str,
- id: int,
- values: PostUpdate,
- current_user: Annotated[UserRead, Depends(get_current_user)],
- db: Annotated[AsyncSession, Depends(async_get_db)],
-):
- ...
-```
-
-Now it will invalidate all caches with a key that matches the pattern `"{username}_posts:*`, which will work for the paginated responses.
-
-> \[!CAUTION\]
-> Using `pattern_to_invalidate_extra` can be resource-intensive on large datasets. Use it judiciously and consider the potential impact on Redis performance. Be cautious with patterns that could match a large number of keys, as deleting many keys simultaneously may impact the performance of the Redis server.
-
-#### Client-side Caching
-
-For `client-side caching`, all you have to do is let the `Settings` class defined in `app/core/config.py` inherit from the `ClientSideCacheSettings` class. You can set the `CLIENT_CACHE_MAX_AGE` value in `.env,` it defaults to 60 (seconds).
-
-### 5.10 ARQ Job Queues
-
-> ๐ **[See background tasks guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/background-tasks/)**
-
-Depending on the problem your API is solving, you might want to implement a job queue. A job queue allows you to run tasks in the background, and is usually aimed at functions that require longer run times and don't directly impact user response in your frontend. As a rule of thumb, if a task takes more than 2 seconds to run, can be executed asynchronously, and its result is not needed for the next step of the user's interaction, then it is a good candidate for the job queue.
-
-> [!TIP]
-> Very common candidates for background functions are calls to and from LLM endpoints (e.g. OpenAI or Openrouter). This is because they span tens of seconds and often need to be further parsed and saved.
-
-#### Background task creation
-
-For simple background tasks, you can just create a function in the `app/core/worker/functions.py` file. For more complex tasks, we recommend you to create a new file in the `app/core/worker` directory.
-
-```python
-async def sample_background_task(ctx, name: str) -> str:
- await asyncio.sleep(5)
- return f"Task {name} is complete!"
-```
-
-Then add the function to the `WorkerSettings` class `functions` variable in `app/core/worker/settings.py` to make it available to the worker. If you created a new file in the `app/core/worker` directory, then simply import this function in the `app/core/worker/settings.py` file:
-
-```python
-from .functions import sample_background_task
-from .your_module import sample_complex_background_task
-
-class WorkerSettings:
- functions = [sample_background_task, sample_complex_background_task]
- ...
-```
-
-#### Add the task to an endpoint
-
-Once you have created the background task, you can add it to any endpoint of your choice to be enqueued. The best practice is to enqueue the task in a **POST** endpoint, while having a **GET** endpoint to get more information on the task. For more details on how job results are handled, check the [ARQ docs](https://arq-docs.helpmanual.io/#job-results).
-
-```python
-@router.post("/task", response_model=Job, status_code=201)
-async def create_task(message: str):
- job = await queue.pool.enqueue_job("sample_background_task", message)
- return {"id": job.job_id}
-
-
-@router.get("/task/{task_id}")
-async def get_task(task_id: str):
- job = ArqJob(task_id, queue.pool)
- return await job.info()
-```
-
-And finally run the worker in parallel to your fastapi application.
-
-> [!IMPORTANT]
-> For any change to the `sample_background_task` to be reflected in the worker, you need to restart the worker (e.g. the docker container).
-
-If you are using `docker compose`, the worker is already running.
-If you are doing it from scratch, run while in the `root` folder:
-
-```sh
-uv run arq src.app.core.worker.settings.WorkerSettings
-```
-
-#### Database session with background tasks
-
-With time your background functions will become 'workflows' increasing in complexity and requirements. Probably, you will need to use a database session to get, create, update, or delete data as part of this workflow.
-
-To do this, you can add the database session to the `ctx` object in the `startup` and `shutdown` functions in `app/core/worker/functions.py`, like in the example below:
-
-```python
-from arq.worker import Worker
-from ...core.db.database import async_get_db
-
-async def startup(ctx: Worker) -> None:
- ctx["db"] = await anext(async_get_db())
- logging.info("Worker Started")
-
-
-async def shutdown(ctx: Worker) -> None:
- await ctx["db"].close()
- logging.info("Worker end")
-```
-
-This will allow you to have the async database session always available in any background function and automatically close it on worker shutdown. Once you have this database session, you can use it as follows:
-
-```python
-from arq.worker import Worker
-
-async def your_background_function(
- ctx: Worker,
- post_id: int,
- ...
-) -> Any:
- db = ctx["db"]
- post = crud_posts.get(db=db, schema_to_select=PostRead, id=post_id)
- ...
-```
-
-> [!WARNING]
-> When using database sessions, you will want to use Pydantic objects. However, these objects don't mingle well with the seralization required by ARQ tasks and will be retrieved as a dictionary.
-
-### 5.11 Rate Limiting
-
-> ๐ **[See rate limiting guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/rate-limiting/)**
-
-To limit how many times a user can make a request in a certain interval of time (very useful to create subscription plans or just to protect your API against DDOS), you may just use the `rate_limiter_dependency` dependency:
-
-```python
-from fastapi import Depends
-
-from app.api.dependencies import rate_limiter_dependency
-from app.core.utils import queue
-from app.schemas.job import Job
-
-
-@router.post("/task", response_model=Job, status_code=201, dependencies=[Depends(rate_limiter_dependency)])
-async def create_task(message: str):
- job = await queue.pool.enqueue_job("sample_background_task", message)
- return {"id": job.job_id}
-```
-
-By default, if no token is passed in the header (that is - the user is not authenticated), the user will be limited by his IP address with the default `limit` (how many times the user can make this request every period) and `period` (time in seconds) defined in `.env`.
-
-Even though this is useful, real power comes from creating `tiers` (categories of users) and standard `rate_limits` (`limits` and `periods` defined for specific `paths` - that is - endpoints) for these tiers.
-
-All of the `tier` and `rate_limit` models, schemas, and endpoints are already created in the respective folders (and usable only by superusers). You may use the `create_tier` script to create the first tier (it uses the `.env` variable `TIER_NAME`, which is all you need to create a tier) or just use the api:
-
-Here I'll create a `free` tier:
-
-
-
-
-
-And a `pro` tier:
-
-
-
-
-
-Then I'll associate a `rate_limit` for the path `api/v1/tasks/task` for each of them, I'll associate a `rate limit` for the path `api/v1/tasks/task`.
-
-> \[!WARNING\]
-> Do not forget to add `api/v1/...` or any other prefix to the beggining of your path. For the structure of the boilerplate, `api/v1/`
+# Benav Labs FastAPI Boilerplate
-1 request every hour (3600 seconds) for the free tier:
+> **Batteries-included FastAPI starter** with Pydantic v2, SQLAlchemy 2.0, PostgreSQL, Redis, ARQ jobs, rate-limiting and a minimal admin. Production-ready defaults, optional modules, and clear docs.
-
+ FastAPI
+ Pydantic v2
+ SQLAlchemy 2.0
+ PostgreSQL
+ Redis
+ ARQ
-10 requests every hour for the pro tier:
-
-
-
-
-
-Now let's read all the tiers available (`GET api/v1/tiers`):
-
-```javascript
-{
- "data": [
- {
- "name": "free",
- "id": 1,
- "created_at": "2023-11-11T05:57:25.420360"
- },
- {
- "name": "pro",
- "id": 2,
- "created_at": "2023-11-12T00:40:00.759847"
- }
- ],
- "total_count": 2,
- "has_more": false,
- "page": 1,
- "items_per_page": 10
-}
-```
-
-And read the `rate_limits` for the `pro` tier to ensure it's working (`GET api/v1/tier/pro/rate_limits`):
-
-```javascript
-{
- "data": [
- {
- "path": "api_v1_tasks_task",
- "limit": 10,
- "period": 3600,
- "id": 1,
- "tier_id": 2,
- "name": "api_v1_tasks:10:3600"
- }
- ],
- "total_count": 1,
- "has_more": false,
- "page": 1,
- "items_per_page": 10
-}
-```
-
-Now, whenever an authenticated user makes a `POST` request to the `api/v1/tasks/task`, they'll use the quota that is defined by their tier.
-You may check this getting the token from the `api/v1/login` endpoint, then passing it in the request header:
-
-```sh
-curl -X POST 'http://127.0.0.1:8000/api/v1/tasks/task?message=test' \
--H 'Authorization: Bearer '
-```
-
-> \[!TIP\]
-> Since the `rate_limiter_dependency` dependency uses the `get_optional_user` dependency instead of `get_current_user`, it will not require authentication to be used, but will behave accordingly if the user is authenticated (and token is passed in header). If you want to ensure authentication, also use `get_current_user` if you need.
-
-To change a user's tier, you may just use the `PATCH api/v1/user/{username}/tier` endpoint.
-Note that for flexibility (since this is a boilerplate), it's not necessary to previously inform a tier_id to create a user, but you probably should set every user to a certain tier (let's say `free`) once they are created.
-
-> \[!WARNING\]
-> If a user does not have a `tier` or the tier does not have a defined `rate limit` for the path and the token is still passed to the request, the default `limit` and `period` will be used, this will be saved in `app/logs`.
-
-### 5.12 JWT Authentication
-
-> ๐ **[See authentication guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/authentication/)**
-
-#### 5.12.1 Details
-
-The JWT in this boilerplate is created in the following way:
-
-1. **JWT Access Tokens:** how you actually access protected resources is passing this token in the request header.
-1. **Refresh Tokens:** you use this type of token to get an `access token`, which you'll use to access protected resources.
-
-The `access token` is short lived (default 30 minutes) to reduce the damage of a potential leak. The `refresh token`, on the other hand, is long lived (default 7 days), and you use it to renew your `access token` without the need to provide username and password every time it expires.
-
-Since the `refresh token` lasts for a longer time, it's stored as a cookie in a secure way:
-
-```python
-# app/api/v1/login
-
-...
-response.set_cookie(
- key="refresh_token",
- value=refresh_token,
- httponly=True, # Prevent access through JavaScript
- secure=True, # Ensure cookie is sent over HTTPS only
- samesite="Lax", # Default to Lax for reasonable balance between security and usability
- max_age=number_of_seconds, # Set a max age for the cookie
-)
-...
-```
-
-You may change it to suit your needs. The possible options for `samesite` are:
-
-- `Lax`: Cookies will be sent in top-level navigations (like clicking on a link to go to another site), but not in API requests or images loaded from other sites.
-- `Strict`: Cookies are sent only on top-level navigations from the same site that set the cookie, enhancing privacy but potentially disrupting user sessions.
-- `None`: Cookies will be sent with both same-site and cross-site requests.
-
-#### 5.12.2 Usage
-
-What you should do with the client is:
-
-- `Login`: Send credentials to `/api/v1/login`. Store the returned access token in memory for subsequent requests.
-- `Accessing Protected Routes`: Include the access token in the Authorization header.
-- `Token Renewal`: On access token expiry, the front end should automatically call `/api/v1/refresh` for a new token.
-- `Login Again`: If refresh token is expired, credentials should be sent to `/api/v1/login` again, storing the new access token in memory.
-- `Logout`: Call /api/v1/logout to end the session securely.
-
-This authentication setup in the provides a robust, secure, and user-friendly way to handle user sessions in your API applications.
-
-### 5.13 Admin Panel
-
-> ๐ **[See admin panel guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/admin-panel/)**
-
-The boilerplate includes a powerful web-based admin interface built with [CRUDAdmin](https://github.com/benavlabs/crudadmin) that provides a comprehensive database management system.
-
-> **About CRUDAdmin**: CRUDAdmin is a modern admin interface generator for FastAPI applications. Learn more at:
-> - **๐ Documentation**: [benavlabs.github.io/crudadmin](https://benavlabs.github.io/crudadmin/)
-> - **๐ป GitHub**: [github.com/benavlabs/crudadmin](https://github.com/benavlabs/crudadmin)
-
-#### 5.13.1 Features
-
-The admin panel includes:
-
-- **User Management**: Create, view, update users with password hashing
-- **Tier Management**: Manage user tiers and permissions
-- **Post Management**: Full CRUD operations for posts
-- **Authentication**: Secure login system with session management
-- **Security**: IP restrictions, session timeouts, and secure cookies
-- **Redis Integration**: Optional Redis support for session storage
-- **Event Tracking**: Track admin actions and sessions
-
-#### 5.13.2 Access
-
-Once your application is running, you can access the admin panel at:
-
-```
-http://localhost:8000/admin
-```
-
-Use the admin credentials you defined in your `.env` file:
-- Username: `ADMIN_USERNAME`
-- Password: `ADMIN_PASSWORD`
-
-#### 5.13.3 Configuration
-
-The admin panel is highly configurable through environment variables:
-
-- **Basic Settings**: Enable/disable, mount path
-- **Security**: Session limits, timeouts, IP restrictions
-- **Tracking**: Event and session tracking
-- **Redis**: Optional Redis session storage
-
-See the [environment variables section](#31-environment-variables-env) for complete configuration options.
-
-#### 5.13.4 Customization
-
-**Adding New Models**
-
-To add new models to the admin panel, edit `src/app/admin/views.py`:
-
-```python
-from your_app.models import YourModel
-from your_app.schemas import YourCreateSchema, YourUpdateSchema
-
-def register_admin_views(admin: CRUDAdmin) -> None:
- # ... existing models ...
-
- admin.add_view(
- model=YourModel,
- create_schema=YourCreateSchema,
- update_schema=YourUpdateSchema,
- allowed_actions={"view", "create", "update", "delete"}
- )
-```
-
-**Advanced Configuration**
+**Docs:**
-For more complex model configurations:
+* ๐ [https://benavlabs.github.io/FastAPI-boilerplate/](https://benavlabs.github.io/FastAPI-boilerplate/)
+* ๐ง DeepWiki: [https://deepwiki.com/benavlabs/FastAPI-boilerplate](https://deepwiki.com/benavlabs/FastAPI-boilerplate)
+* ๐ฌ Discord: [https://discord.com/invite/TEmPs22gqB](https://discord.com/invite/TEmPs22gqB)
-```python
-# Handle models with problematic fields (e.g., TSVector)
-admin.add_view(
- model=Article,
- create_schema=ArticleCreate,
- update_schema=ArticleUpdate,
- select_schema=ArticleSelect, # Exclude problematic fields from read operations
- allowed_actions={"view", "create", "update", "delete"}
-)
-
-# Password field handling
-admin.add_view(
- model=User,
- create_schema=UserCreateWithPassword,
- update_schema=UserUpdateWithPassword,
- password_transformer=password_transformer, # Handles password hashing
- allowed_actions={"view", "create", "update"}
-)
+---
-# Read-only models
-admin.add_view(
- model=AuditLog,
- create_schema=AuditLogSchema,
- update_schema=AuditLogSchema,
- allowed_actions={"view"} # Only viewing allowed
-)
-```
+## TL;DR - Quickstart
-**Session Backend Configuration**
+Use the template on GitHub, create your repo, then:
-For production environments, consider using Redis for better performance:
+```bash
+# Clone your new repository
+git clone https://github.com//
+cd
-```python
-# Enable Redis sessions in your environment
-CRUD_ADMIN_REDIS_ENABLED=true
-CRUD_ADMIN_REDIS_HOST=localhost
-CRUD_ADMIN_REDIS_PORT=6379
-```
+# NOTE (added by me):
+# Running locally with Uvicorn.
+# The .env and docker-compose.yml files were taken from this Gist:
+# https://gist.github.com/igorbenav/48ad745120c3f77817e094f3a609111a
+# I kept the local Dockerfile since it uses 'uv' instead of Poetry
+# (the Gist version relies on Poetry).
-### 5.14 Running
+# TODO: Decide where to put the example file, since it is currently
+# being copied from the Gist.
-If you are using docker compose, just running the following command should ensure everything is working:
+# Copy and create your environment file
+cp src/.env.example src/.env
+# Fill in the minimal environment variables as described in the docs
-```sh
+# Run everything using Docker
docker compose up
-```
-
-If you are doing it from scratch, ensure your postgres and your redis are running, then
-while in the `root` folder, run to start the application with uvicorn server:
-
-```sh
-uv run uvicorn src.app.main:app --reload
-```
-
-And for the worker:
-
-```sh
-uv run arq src.app.core.worker.settings.WorkerSettings
-```
-### 5.15 Create Application
-
-If you want to stop tables from being created every time you run the api, you should disable this here:
-
-```python
-# app/main.py
-
-from .api import router
-from .core.config import settings
-from .core.setup import create_application
-
-# create_tables_on_start defaults to True
-app = create_application(router=router, settings=settings, create_tables_on_start=False)
-```
-
-This `create_application` function is defined in `app/core/setup.py`, and it's a flexible way to configure the behavior of your application.
-
-A few examples:
-
-- Deactivate or password protect /docs
-- Add client-side cache middleware
-- Add Startup and Shutdown event handlers for cache, queue and rate limit
-
-### 5.16 Opting Out of Services
-
-To opt out of services (like `Redis`, `Queue`, `Rate Limiter`), head to the `Settings` class in `src/app/core/config`:
-
-```python
-# src/app/core/config
-import os
-from enum import Enum
-
-from pydantic_settings import BaseSettings
-from starlette.config import Config
-
-current_file_dir = os.path.dirname(os.path.realpath(__file__))
-env_path = os.path.join(current_file_dir, "..", "..", ".env")
-config = Config(env_path)
-...
-
-class Settings(
- AppSettings,
- PostgresSettings,
- CryptSettings,
- FirstUserSettings,
- TestSettings,
- RedisCacheSettings,
- ClientSideCacheSettings,
- RedisQueueSettings,
- RedisRateLimiterSettings,
- DefaultRateLimitSettings,
- CRUDAdminSettings,
- EnvironmentSettings,
-):
- pass
-
-
-settings = Settings()
-```
-
-And remove the Settings of the services you do not need. For example, without using redis (removed `Cache`, `Queue` and `Rate limit`):
-
-```python
-class Settings(
- AppSettings,
- PostgresSettings,
- CryptSettings,
- FirstUserSettings,
- TestSettings,
- ClientSideCacheSettings,
- DefaultRateLimitSettings,
- EnvironmentSettings,
-):
- pass
-```
-
-Then comment or remove the services you do not want from `docker-compose.yml`. Here, I removed `redis` and `worker` services:
-
-```yml
-version: '3.8'
-
-services:
- web:
- build:
- context: .
- dockerfile: Dockerfile
- # -------- replace with comment to run with gunicorn --------
- command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
- # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
- env_file:
- - ./src/.env
- # -------- replace with comment if you are using nginx --------
- ports:
- - "8000:8000"
- # expose:
- # - "8000"
- depends_on:
- - db
- - redis
- volumes:
- - ./src/app:/code/app
- - ./src/.env:/code/.env
- db:
- image: postgres:13
- env_file:
- - ./src/.env
- volumes:
- - postgres-data:/var/lib/postgresql/data
- # -------- replace with comment to run migrations with docker --------
- expose:
- - "5432"
- # ports:
- # - 5432:5432
-
-volumes:
- postgres-data:
- redis-data:
- #pgadmin-data:
-```
-
-## 6. Running in Production
-
-> ๐ **[See production deployment guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/production/)**
-
-### 6.1 Uvicorn Workers with Gunicorn
-
-In production you may want to run using gunicorn to manage uvicorn workers:
-
-```sh
-command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
-```
-
-Here it's running with 4 workers, but you should test it depending on how many cores your machine has.
-
-To do this if you are using docker compose, just replace the comment:
-This part in `docker-compose.yml`:
-
-```YAML
-# docker-compose.yml
-
-# -------- replace with comment to run with gunicorn --------
-command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
-# command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
-```
-
-Should be changed to:
-
-```YAML
-# docker-compose.yml
-
-# -------- replace with comment to run with uvicorn --------
-# command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
-command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
-```
-
-And the same in `Dockerfile`:
-This part:
-
-```Dockerfile
-# Dockerfile
-
-CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
-# CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"]
-```
-
-Should be changed to:
-
-```Dockerfile
-# Dockerfile
-
-# CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
-CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"]
-```
-
-> \[!CAUTION\]
-> Do not forget to set the `ENVIRONMENT` in `.env` to `production` unless you want the API docs to be public.
-
-### 6.2 Running with NGINX
-
-NGINX is a high-performance web server, known for its stability, rich feature set, simple configuration, and low resource consumption. NGINX acts as a reverse proxy, that is, it receives client requests, forwards them to the FastAPI server (running via Uvicorn or Gunicorn), and then passes the responses back to the clients.
-
-To run with NGINX, you start by uncommenting the following part in your `docker-compose.yml`:
-
-```python
-# docker-compose.yml
-...
-# -------- uncomment to run with nginx --------
-# nginx:
-# image: nginx:latest
-# ports:
-# - "80:80"
-# volumes:
-# - ./default.conf:/etc/nginx/conf.d/default.conf
-# depends_on:
-# - web
-...
+# Open the API documentation
+open http://127.0.0.1:8000/docs
```
-Which should be changed to:
+> Full setup (from-scratch, .env examples, PostgreSQL & Redis, gunicorn, nginx) lives in the docs.
-```YAML
-# docker-compose.yml
-
-...
- #-------- uncomment to run with nginx --------
- nginx:
- image: nginx:latest
- ports:
- - "80:80"
- volumes:
- - ./default.conf:/etc/nginx/conf.d/default.conf
- depends_on:
- - web
-...
-```
-
-Then comment the following part:
-
-```YAML
-# docker-compose.yml
-
-services:
- web:
- ...
- # -------- Both of the following should be commented to run with nginx --------
- command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
- # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
-```
-
-Which becomes:
-
-```YAML
-# docker-compose.yml
-
-services:
- web:
- ...
- # -------- Both of the following should be commented to run with nginx --------
- # command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
- # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
-```
-
-Then pick the way you want to run (uvicorn or gunicorn managing uvicorn workers) in `Dockerfile`.
-The one you want should be uncommented, comment the other one.
-
-```Dockerfile
-# Dockerfile
-
-CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
-# CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"]
-```
-
-And finally head to `http://localhost/docs`.
-
-#### 6.2.1 One Server
-
-If you want to run with one server only, your setup should be ready. Just make sure the only part that is not a comment in `default.conf` is:
-
-```conf
-# default.conf
-
-# ---------------- Running With One Server ----------------
-server {
- listen 80;
-
- location / {
- proxy_pass http://web:8000;
- proxy_set_header Host $host;
- proxy_set_header X-Real-IP $remote_addr;
- proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
- proxy_set_header X-Forwarded-Proto $scheme;
- }
-}
-```
-
-So just type on your browser: `http://localhost/docs`.
-
-#### 6.2.2 Multiple Servers
-
-NGINX can distribute incoming network traffic across multiple servers, improving the efficiency and capacity utilization of your application.
-
-To run with multiple servers, just comment the `Running With One Server` part in `default.conf` and Uncomment the other one:
-
-```conf
-# default.conf
-
-# ---------------- Running With One Server ----------------
-...
-
-# ---------------- To Run with Multiple Servers, Uncomment below ----------------
-upstream fastapi_app {
- server fastapi1:8000; # Replace with actual server names or IP addresses
- server fastapi2:8000;
- # Add more servers as needed
-}
-
-server {
- listen 80;
-
- location / {
- proxy_pass http://fastapi_app;
- proxy_set_header Host $host;
- proxy_set_header X-Real-IP $remote_addr;
- proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
- proxy_set_header X-Forwarded-Proto $scheme;
- }
-}
-```
-
-And finally, on your browser: `http://localhost/docs`.
-
-> \[!WARNING\]
-> Note that we are using `fastapi1:8000` and `fastapi2:8000` as examples, you should replace it with the actual name of your service and the port it's running on.
-
-## 7. Testing
-
-> ๐ **[See comprehensive testing guide in our docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/testing/)**
-
-This project uses **fast unit tests** that don't require external services like databases or Redis. Tests are isolated using mocks and run in milliseconds.
-
-### 7.1 Writing Tests
-
-Create test files with the name `test_{entity}.py` in the `tests/` folder, replacing `{entity}` with what you're testing:
-
-```sh
-touch tests/test_items.py
-```
-
-Follow the structure in `tests/test_user.py` for examples. Our tests use:
-
-- **pytest** with **pytest-asyncio** for async support
-- **unittest.mock** for mocking dependencies
-- **AsyncMock** for async function mocking
-- **Faker** for generating test data
+---
-Example test structure:
+## Features
-```python
-import pytest
-from unittest.mock import AsyncMock, patch
-from src.app.api.v1.users import write_user
+* โก๏ธ Fully async FastAPI + SQLAlchemy 2.0
+* ๐งฑ Pydantic v2 models & validation
+* ๐ JWT auth (access + refresh), cookies for refresh
+* ๐ฎ Rate limiter + tiers (free/pro/etc.)
+* ๐งฐ FastCRUD for efficient CRUD & pagination
+* ๐งโ๐ผ **CRUDAdmin**: minimal admin panel (optional)
+* ๐ฆ ARQ background jobs (Redis)
+* ๐ง Redis caching (server + client-side headers)
+* ๐ณ One-command Docker Compose
+* ๐ NGINX & Gunicorn recipes for prod
-class TestWriteUser:
- @pytest.mark.asyncio
- async def test_create_user_success(self, mock_db, sample_user_data):
- """Test successful user creation."""
- with patch("src.app.api.v1.users.crud_users") as mock_crud:
- mock_crud.exists = AsyncMock(return_value=False)
- mock_crud.create = AsyncMock(return_value=Mock(id=1))
+---
- result = await write_user(Mock(), sample_user_data, mock_db)
+## When to use it
- assert result.id == 1
- mock_crud.create.assert_called_once()
-```
+* You want a pragmatic starter with auth, CRUD, jobs, caching and rate-limits.
+* You value **sensible defaults** with the freedom to opt-out of modules.
+* You prefer **docs over boilerplate** in README - depth lives in the site.
-### 7.2 Running Tests
+Not a fit if you need a monorepo microservices scaffold - see the docs for pointers.
-Run all unit tests:
+---
-```sh
-uv run pytest
-```
+## What's inside (high-level)
-Run specific test file:
+* **App**: FastAPI app factory, env-aware docs exposure
+* **Auth**: JWT access/refresh, logout via token blacklist
+* **DB**: Postgres + SQLAlchemy 2.0, Alembic migrations
+* **CRUD**: FastCRUD generics (get, get_multi, create, update, delete, joins)
+* **Caching**: decorator-based endpoints cache; client cache headers
+* **Queues**: ARQ worker (async jobs), Redis connection helpers
+* **Rate limits**: per-tier + per-path rules
+* **Admin**: CRUDAdmin views for common models (optional)
-```sh
-uv run pytest tests/test_user_unit.py
-```
+> The full tree and deep dives are in **Project Structure**, **Database**, **CRUD Operations**, **API**, **Caching**, **Background Tasks**, **Rate Limiting**, and **Production** sections of the docs.
-Run specific test file:
+---
-```sh
-uv run pytest tests/test_user_unit.py
-```
+## Configuration (minimal)
-Run with verbose output:
+Create `src/.env` and set **app**, **database**, **JWT**, and **environment** settings. See the docs for a copy-pasteable example and production guidance.
-```sh
-uv run pytest -v
-```
+* `ENVIRONMENT=local|staging|production` controls API docs exposure
+* Set `ADMIN_*` to enable the first admin user
-Run specific test:
+---
-```sh
-uv run pytest tests/test_user_unit.py::TestWriteUser::test_create_user_success
-```
+## Common tasks
-### 7.3 Test Configuration
+```bash
+# run locally with reload (without Docker)
+uv sync && uv run uvicorn src.app.main:app --reload
-Tests are configured in `pyproject.toml`:
+# run Alembic migrations
+cd src && uv run alembic revision --autogenerate && uv run alembic upgrade head
-```toml
-[tool.pytest.ini_options]
-filterwarnings = [
- "ignore::PendingDeprecationWarning:starlette.formparsers",
-]
+# enqueue a background job (example endpoint)
+curl -X POST 'http://127.0.0.1:8000/api/v1/tasks/task?message=hello'
```
-### 7.4 Test Structure
-
-- **Unit Tests** (`test_*_unit.py`): Fast, isolated tests with mocked dependencies
-- **Fixtures** (`conftest.py`): Shared test fixtures and mock setups
-- **Helpers** (`tests/helpers/`): Utilities for generating test data and mocks
-
-### 7.5 Benefits of Our Approach
-
-โ
**Fast**: Tests run in ~0.04 seconds
-โ
**Reliable**: No external dependencies required
-โ
**Isolated**: Each test focuses on one piece of functionality
-โ
**Maintainable**: Easy to understand and modify
-โ
**CI/CD Ready**: Run anywhere without infrastructure setup
+More examples (superuser creation, tiers, rate limits, admin usage) - **docs**.
-## 8. Contributing
-
-Read [contributing](CONTRIBUTING.md).
-
-## 9. References
+---
-This project was inspired by a few projects, it's based on them with things changed to the way I like (and pydantic, sqlalchemy updated)
+## Contributing
-- [`Full Stack FastAPI and PostgreSQL`](https://github.com/tiangolo/full-stack-fastapi-postgresql) by @tiangolo himself
-- [`FastAPI Microservices`](https://github.com/Kludex/fastapi-microservices) by @kludex which heavily inspired this boilerplate
-- [`Async Web API with FastAPI + SQLAlchemy 2.0`](https://github.com/rhoboro/async-fastapi-sqlalchemy) for sqlalchemy 2.0 ORM examples
-- [`FastaAPI Rocket Boilerplate`](https://github.com/asacristani/fastapi-rocket-boilerplate/tree/main) for docker compose
+Issues and PRs are welcome. Please read **CONTRIBUTING.md** and follow the style of existing modules (type hints, async/await, explicit None checks, and paginated responses).
-## 10. License
+---
-[`MIT`](LICENSE.md)
+## License
-## 11. Contact
+MIT - see `LICENSE.md`.
-Benav Labs โ [benav.io](https://benav.io)
-[github.com/benavlabs](https://github.com/benavlabs/)
+---
-
-
-
-
+
+
+
+
+
diff --git a/new_readme.md b/new_readme.md
deleted file mode 100644
index 86bb54f..0000000
--- a/new_readme.md
+++ /dev/null
@@ -1,138 +0,0 @@
-# Benav Labs FastAPI Boilerplate
-
-> **Batteries-included FastAPI starter** with Pydantic v2, SQLAlchemy 2.0, PostgreSQL, Redis, ARQ jobs, rate-limiting and a minimal admin. Production-ready defaults, optional modules, and clear docs.
-
-
- FastAPI
- Pydantic v2
- SQLAlchemy 2.0
- PostgreSQL
- Redis
- ARQ
-
-
-**Docs:**
-
-* ๐ [https://benavlabs.github.io/FastAPI-boilerplate/](https://benavlabs.github.io/FastAPI-boilerplate/)
-* ๐ง DeepWiki: [https://deepwiki.com/benavlabs/FastAPI-boilerplate](https://deepwiki.com/benavlabs/FastAPI-boilerplate)
-* ๐ฌ Discord: [https://discord.com/invite/TEmPs22gqB](https://discord.com/invite/TEmPs22gqB)
-
----
-
-## TL;DR - Quickstart
-
-Use the template on GitHub, create your repo, then:
-
-```bash
-# Clone your new repository
-git clone https://github.com//
-cd
-
-# NOTE (added by me):
-# Running locally with Uvicorn.
-# The .env and docker-compose.yml files were taken from this Gist:
-# https://gist.github.com/igorbenav/48ad745120c3f77817e094f3a609111a
-# I kept the local Dockerfile since it uses 'uv' instead of Poetry
-# (the Gist version relies on Poetry).
-
-# TODO: Decide where to put the example file, since it is currently
-# being copied from the Gist.
-
-# Copy and create your environment file
-cp src/.env.example src/.env
-# Fill in the minimal environment variables as described in the docs
-
-# Run everything using Docker
-docker compose up
-
-# Open the API documentation
-open http://127.0.0.1:8000/docs
-```
-
-> Full setup (from-scratch, .env examples, PostgreSQL & Redis, gunicorn, nginx) lives in the docs.
-
----
-
-## Features
-
-* โก๏ธ Fully async FastAPI + SQLAlchemy 2.0
-* ๐งฑ Pydantic v2 models & validation
-* ๐ JWT auth (access + refresh), cookies for refresh
-* ๐ฎ Rate limiter + tiers (free/pro/etc.)
-* ๐งฐ FastCRUD for efficient CRUD & pagination
-* ๐งโ๐ผ **CRUDAdmin**: minimal admin panel (optional)
-* ๐ฆ ARQ background jobs (Redis)
-* ๐ง Redis caching (server + client-side headers)
-* ๐ณ One-command Docker Compose
-* ๐ NGINX & Gunicorn recipes for prod
-
----
-
-## When to use it
-
-* You want a pragmatic starter with auth, CRUD, jobs, caching and rate-limits.
-* You value **sensible defaults** with the freedom to opt-out of modules.
-* You prefer **docs over boilerplate** in README - depth lives in the site.
-
-Not a fit if you need a monorepo microservices scaffold - see the docs for pointers.
-
----
-
-## What's inside (high-level)
-
-* **App**: FastAPI app factory, env-aware docs exposure
-* **Auth**: JWT access/refresh, logout via token blacklist
-* **DB**: Postgres + SQLAlchemy 2.0, Alembic migrations
-* **CRUD**: FastCRUD generics (get, get_multi, create, update, delete, joins)
-* **Caching**: decorator-based endpoints cache; client cache headers
-* **Queues**: ARQ worker (async jobs), Redis connection helpers
-* **Rate limits**: per-tier + per-path rules
-* **Admin**: CRUDAdmin views for common models (optional)
-
-> The full tree and deep dives are in **Project Structure**, **Database**, **CRUD Operations**, **API**, **Caching**, **Background Tasks**, **Rate Limiting**, and **Production** sections of the docs.
-
----
-
-## Configuration (minimal)
-
-Create `src/.env` and set **app**, **database**, **JWT**, and **environment** settings. See the docs for a copy-pasteable example and production guidance.
-
-* `ENVIRONMENT=local|staging|production` controls API docs exposure
-* Set `ADMIN_*` to enable the first admin user
-
----
-
-## Common tasks
-
-```bash
-# run locally with reload (without Docker)
-uv sync && uv run uvicorn src.app.main:app --reload
-
-# run Alembic migrations
-cd src && uv run alembic revision --autogenerate && uv run alembic upgrade head
-
-# enqueue a background job (example endpoint)
-curl -X POST 'http://127.0.0.1:8000/api/v1/tasks/task?message=hello'
-```
-
-More examples (superuser creation, tiers, rate limits, admin usage) - **docs**.
-
----
-
-## Contributing
-
-Issues and PRs are welcome. Please read **CONTRIBUTING.md** and follow the style of existing modules (type hints, async/await, explicit None checks, and paginated responses).
-
----
-
-## License
-
-MIT - see `LICENSE.md`.
-
----
-
-
-
-
-
-
From be4aea215024178ed4c1b8bfc9ea0061c9b7b09e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Sat, 8 Nov 2025 16:44:22 -0300
Subject: [PATCH 03/19] Adding files for Running locally with Uvicorn,
previously living in a Gist.
---
.gitignore | 5 ++
Dockerfile | 44 -------------
README.md | 18 +++---
docker-compose.test.yml | 8 ---
docker-compose.yml | 111 ---------------------------------
scripts/local_with_uvicorn/env | 53 ++++++++++++++++
6 files changed, 65 insertions(+), 174 deletions(-)
delete mode 100644 Dockerfile
delete mode 100644 docker-compose.test.yml
delete mode 100644 docker-compose.yml
create mode 100644 scripts/local_with_uvicorn/env
diff --git a/.gitignore b/.gitignore
index ab9ad70..00f65d8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -135,3 +135,8 @@ cython_debug/
.idea
.vscode/
+
+# Config files:
+src/.env
+docker-compose.yml
+Dockerfile
diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index 2c3795a..0000000
--- a/Dockerfile
+++ /dev/null
@@ -1,44 +0,0 @@
-# --------- Builder Stage ---------
-FROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim AS builder
-
-# Set environment variables for uv
-ENV UV_COMPILE_BYTECODE=1
-ENV UV_LINK_MODE=copy
-
-WORKDIR /app
-
-# Install dependencies first (for better layer caching)
-RUN --mount=type=cache,target=/root/.cache/uv \
- --mount=type=bind,source=uv.lock,target=uv.lock \
- --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
- uv sync --locked --no-install-project
-
-# Copy the project source code
-COPY . /app
-
-# Install the project in non-editable mode
-RUN --mount=type=cache,target=/root/.cache/uv \
- uv sync --locked --no-editable
-
-# --------- Final Stage ---------
-FROM python:3.11-slim-bookworm
-
-# Create a non-root user for security
-RUN groupadd --gid 1000 app \
- && useradd --uid 1000 --gid app --shell /bin/bash --create-home app
-
-# Copy the virtual environment from the builder stage
-COPY --from=builder --chown=app:app /app/.venv /app/.venv
-
-# Ensure the virtual environment is in the PATH
-ENV PATH="/app/.venv/bin:$PATH"
-
-# Switch to the non-root user
-USER app
-
-# Set the working directory
-WORKDIR /code
-
-# -------- replace with comment to run with gunicorn --------
-CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
-# CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:8000"]
diff --git a/README.md b/README.md
index 86bb54f..fba6d84 100644
--- a/README.md
+++ b/README.md
@@ -28,21 +28,17 @@ Use the template on GitHub, create your repo, then:
git clone https://github.com//
cd
-# NOTE (added by me):
-# Running locally with Uvicorn.
-# The .env and docker-compose.yml files were taken from this Gist:
-# https://gist.github.com/igorbenav/48ad745120c3f77817e094f3a609111a
-# I kept the local Dockerfile since it uses 'uv' instead of Poetry
-# (the Gist version relies on Poetry).
+# Running locally with Uvicorn:
-# TODO: Decide where to put the example file, since it is currently
-# being copied from the Gist.
+# Copy Dockerfile and Docker Compose files:
+cp scripts/local_with_uvicorn/Dockerfile Dockerfile
+cp scripts/local_with_uvicorn/docker-compose.yml docker-compose.yml
# Copy and create your environment file
-cp src/.env.example src/.env
-# Fill in the minimal environment variables as described in the docs
+cp scripts/local_with_uvicorn/env src/.env
+# If you want, modify in the minimal environment variables as described in the docs.
-# Run everything using Docker
+# Run everything using Docker:
docker compose up
# Open the API documentation
diff --git a/docker-compose.test.yml b/docker-compose.test.yml
deleted file mode 100644
index ad0be15..0000000
--- a/docker-compose.test.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-services:
- web:
- user: root # Run as root for tests to allow global package installation
- environment:
- - PYTHONPATH=/usr/local/lib/python3.11/site-packages
- command: bash -c "pip install faker pytest-asyncio pytest-mock && pytest tests/ -v"
- volumes:
- - ./tests:/code/tests
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
deleted file mode 100644
index b6daecc..0000000
--- a/docker-compose.yml
+++ /dev/null
@@ -1,111 +0,0 @@
-services:
- web:
- build:
- context: .
- dockerfile: Dockerfile
- # -------- replace with comment to run with gunicorn --------
- command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
- # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
- env_file:
- - ./src/.env
- # -------- replace with comment if you are using nginx --------
- ports:
- - "8000:8000"
- # expose:
- # - "8000"
- depends_on:
- - db
- - redis
- volumes:
- - ./src/app:/code/app
- - ./src/.env:/code/.env
-
- worker:
- build:
- context: .
- dockerfile: Dockerfile
- command: arq app.core.worker.settings.WorkerSettings
- env_file:
- - ./src/.env
- depends_on:
- - db
- - redis
- volumes:
- - ./src/app:/code/app
- - ./src/.env:/code/.env
-
- db:
- image: postgres:13
- env_file:
- - ./src/.env
- volumes:
- - postgres-data:/var/lib/postgresql/data
- # -------- replace with comment to run migrations with docker --------
- expose:
- - "5432"
- # ports:
- # - 5432:5432
-
- redis:
- image: redis:alpine
- volumes:
- - redis-data:/data
- expose:
- - "6379"
-
- #-------- uncomment to run with pgadmin --------
- # pgadmin:
- # container_name: pgadmin4
- # image: dpage/pgadmin4:latest
- # restart: always
- # ports:
- # - "5050:80"
- # volumes:
- # - pgadmin-data:/var/lib/pgadmin
- # env_file:
- # - ./src/.env
- # depends_on:
- # - db
-
- #-------- uncomment to run with nginx --------
- # nginx:
- # image: nginx:latest
- # ports:
- # - "80:80"
- # volumes:
- # - ./default.conf:/etc/nginx/conf.d/default.conf
- # depends_on:
- # - web
-
- #-------- uncomment to create first superuser --------
- # create_superuser:
- # build:
- # context: .
- # dockerfile: Dockerfile
- # env_file:
- # - ./src/.env
- # depends_on:
- # - db
- # - web
- # command: python -m src.scripts.create_first_superuser
- # volumes:
- # - ./src:/code/src
-
- #-------- uncomment to create first tier --------
- # create_tier:
- # build:
- # context: .
- # dockerfile: Dockerfile
- # env_file:
- # - ./src/.env
- # depends_on:
- # - db
- # - web
- # command: python -m src.scripts.create_first_tier
- # volumes:
- # - ./src:/code/src
-
-volumes:
- postgres-data:
- redis-data:
- #pgadmin-data:
diff --git a/scripts/local_with_uvicorn/env b/scripts/local_with_uvicorn/env
new file mode 100644
index 0000000..d312bed
--- /dev/null
+++ b/scripts/local_with_uvicorn/env
@@ -0,0 +1,53 @@
+# ------------- app settings -------------
+APP_NAME="My Project"
+APP_DESCRIPTION="My Project Description"
+APP_VERSION="0.1"
+CONTACT_NAME="Me"
+CONTACT_EMAIL="my.email@example.com"
+LICENSE_NAME="MIT"
+
+# ------------- database -------------
+POSTGRES_USER="postgres"
+POSTGRES_PASSWORD=1234
+POSTGRES_SERVER="db"
+POSTGRES_PORT=5432
+POSTGRES_DB="postgres"
+POSTGRES_ASYNC_PREFIX="postgresql+asyncpg://"
+
+# ------------- crypt -------------
+SECRET_KEY=de2132a4a3a029d6a93a2aefcb519f0219990f92ca258a7c5ed938a444dbe1c8
+ALGORITHM=HS256
+ACCESS_TOKEN_EXPIRE_MINUTES=60
+
+# ------------- admin -------------
+ADMIN_NAME="admin"
+ADMIN_EMAIL="admin@example.com"
+ADMIN_USERNAME="admin"
+ADMIN_PASSWORD="Str1ngst!"
+
+# ------------- redis cache -------------
+REDIS_CACHE_HOST="redis"
+REDIS_CACHE_PORT=6379
+
+# ------------- redis queue -------------
+REDIS_QUEUE_HOST="redis"
+REDIS_QUEUE_PORT=6379
+
+# ------------- redis rate limit -------------
+REDIS_RATE_LIMIT_HOST="redis"
+REDIS_RATE_LIMIT_PORT=6379
+
+# ------------- client side cache -------------
+CLIENT_CACHE_MAX_AGE=60
+
+# ------------- test -------------
+TEST_NAME="Tester User"
+TEST_EMAIL="test@tester.com"
+TEST_USERNAME="testeruser"
+TEST_PASSWORD="Str1ng$t"
+
+# ------------- environment -------------
+ENVIRONMENT="local"
+
+# ------------- first tier -------------
+TIER_NAME="free"
From 19945ee3469f02f5b79af004e56f753d5e69f74d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Sat, 8 Nov 2025 16:47:31 -0300
Subject: [PATCH 04/19] Adding example files for running locally with Uvicorn.
---
.gitignore | 10 +-
scripts/local_with_uvicorn/Dockerfile | 44 +++++++
scripts/local_with_uvicorn/docker-compose.yml | 112 ++++++++++++++++++
3 files changed, 164 insertions(+), 2 deletions(-)
create mode 100644 scripts/local_with_uvicorn/Dockerfile
create mode 100644 scripts/local_with_uvicorn/docker-compose.yml
diff --git a/.gitignore b/.gitignore
index 00f65d8..9dec447 100644
--- a/.gitignore
+++ b/.gitignore
@@ -138,5 +138,11 @@ cython_debug/
# Config files:
src/.env
-docker-compose.yml
-Dockerfile
+
+# Ignore root files:
+/Dockerfile
+/docker-compose.yml
+
+# Don't ignore files inside of script folder:
+!scripts/*
+
diff --git a/scripts/local_with_uvicorn/Dockerfile b/scripts/local_with_uvicorn/Dockerfile
new file mode 100644
index 0000000..2c3795a
--- /dev/null
+++ b/scripts/local_with_uvicorn/Dockerfile
@@ -0,0 +1,44 @@
+# --------- Builder Stage ---------
+FROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim AS builder
+
+# Set environment variables for uv
+ENV UV_COMPILE_BYTECODE=1
+ENV UV_LINK_MODE=copy
+
+WORKDIR /app
+
+# Install dependencies first (for better layer caching)
+RUN --mount=type=cache,target=/root/.cache/uv \
+ --mount=type=bind,source=uv.lock,target=uv.lock \
+ --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
+ uv sync --locked --no-install-project
+
+# Copy the project source code
+COPY . /app
+
+# Install the project in non-editable mode
+RUN --mount=type=cache,target=/root/.cache/uv \
+ uv sync --locked --no-editable
+
+# --------- Final Stage ---------
+FROM python:3.11-slim-bookworm
+
+# Create a non-root user for security
+RUN groupadd --gid 1000 app \
+ && useradd --uid 1000 --gid app --shell /bin/bash --create-home app
+
+# Copy the virtual environment from the builder stage
+COPY --from=builder --chown=app:app /app/.venv /app/.venv
+
+# Ensure the virtual environment is in the PATH
+ENV PATH="/app/.venv/bin:$PATH"
+
+# Switch to the non-root user
+USER app
+
+# Set the working directory
+WORKDIR /code
+
+# -------- replace with comment to run with gunicorn --------
+CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
+# CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:8000"]
diff --git a/scripts/local_with_uvicorn/docker-compose.yml b/scripts/local_with_uvicorn/docker-compose.yml
new file mode 100644
index 0000000..14cf968
--- /dev/null
+++ b/scripts/local_with_uvicorn/docker-compose.yml
@@ -0,0 +1,112 @@
+services:
+ web:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ # -------- Both of the following commands should be commented to run with nginx --------
+
+ # -------- replace with comment to run with gunicorn --------
+ command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
+ # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
+ env_file:
+ - ./src/.env
+ # -------- replace with expose if you are using nginx --------
+ ports:
+ - "8000:8000"
+ # expose:
+ # - "8000"
+ depends_on:
+ - db
+ - redis
+ volumes:
+ - ./src/app:/code/app
+ - ./src/.env:/code/.env
+
+ worker:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ command: arq app.core.worker.settings.WorkerSettings
+ env_file:
+ - ./src/.env
+ depends_on:
+ - db
+ - redis
+ volumes:
+ - ./src/app:/code/app
+ - ./src/.env:/code/.env
+
+ db:
+ image: postgres:13
+ env_file:
+ - ./src/.env
+ volumes:
+ - postgres-data:/var/lib/postgresql/data
+ expose:
+ - "5432"
+
+ redis:
+ image: redis:alpine
+ volumes:
+ - redis-data:/data
+ expose:
+ - "6379"
+
+ #-------- uncomment to run with nginx --------
+ # nginx:
+ # image: nginx:latest
+ # ports:
+ # - "80:80"
+ # volumes:
+ # - ./default.conf:/etc/nginx/conf.d/default.conf
+ # depends_on:
+ # - web
+
+ #-------- uncomment to create first superuser --------
+ create_superuser:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ env_file:
+ - ./src/.env
+ depends_on:
+ - db
+ - web
+ command: python -m src.scripts.create_first_superuser
+ volumes:
+ - ./src:/code/src
+
+ #-------- uncomment to run tests --------
+ pytest:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ env_file:
+ - ./src/.env
+ depends_on:
+ - db
+ - create_superuser
+ - redis
+ command: python -m pytest ./tests
+ volumes:
+ - .:/code
+
+ #-------- uncomment to create first tier --------
+ # create_tier:
+ # build:
+ # context: .
+ # dockerfile: Dockerfile
+ # env_file:
+ # - ./src/.env
+ # depends_on:
+ # - create_superuser
+ # - db
+ # - web
+ # command: python -m src.scripts.create_first_tier
+ # volumes:
+ # - ./src:/code/src
+
+volumes:
+ postgres-data:
+ redis-data:
+
From d8196a7e19ef9ba2eb14e73a0f9a377d11c2fb42 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Sat, 8 Nov 2025 16:54:46 -0300
Subject: [PATCH 05/19] Improving README.md
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index fba6d84..afdc0db 100644
--- a/README.md
+++ b/README.md
@@ -25,8 +25,8 @@ Use the template on GitHub, create your repo, then:
```bash
# Clone your new repository
-git clone https://github.com//
-cd
+git clone https://github.com//FastAPI-boilerplate
+cd FastAPI-boilerplate
# Running locally with Uvicorn:
From 67284ff85346cd56e52cdcd3a235b3729383f5dd Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Sat, 8 Nov 2025 17:01:01 -0300
Subject: [PATCH 06/19] Adding the rest of the scripts for running the
boilerplate with Gunicorn and in prod settings. The ones that were on the
Gist.
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Carlos Andrรฉs Planchรณn Prestes
---
README.md | 5 +
.../Dockerfile | 27 +++++
.../docker-compose.yml | 114 ++++++++++++++++++
scripts/gunicorn_managing_uvicorn_workers/env | 53 ++++++++
scripts/production_with_nginx/Dockerfile | 27 +++++
.../production_with_nginx/docker-compose.yml | 112 +++++++++++++++++
scripts/production_with_nginx/env | 53 ++++++++
7 files changed, 391 insertions(+)
create mode 100644 scripts/gunicorn_managing_uvicorn_workers/Dockerfile
create mode 100644 scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml
create mode 100644 scripts/gunicorn_managing_uvicorn_workers/env
create mode 100644 scripts/production_with_nginx/Dockerfile
create mode 100644 scripts/production_with_nginx/docker-compose.yml
create mode 100644 scripts/production_with_nginx/env
diff --git a/README.md b/README.md
index afdc0db..bee84e6 100644
--- a/README.md
+++ b/README.md
@@ -28,6 +28,9 @@ Use the template on GitHub, create your repo, then:
git clone https://github.com//FastAPI-boilerplate
cd FastAPI-boilerplate
+# In the scripts/ folder, you can find scripts to run FastAPI-Boilerplate locally, with uvicorn workers, and in production with nginx.
+# NOTE: For now, only local scripts are updated.
+
# Running locally with Uvicorn:
# Copy Dockerfile and Docker Compose files:
@@ -93,6 +96,8 @@ Not a fit if you need a monorepo microservices scaffold - see the docs for point
Create `src/.env` and set **app**, **database**, **JWT**, and **environment** settings. See the docs for a copy-pasteable example and production guidance.
+[https://benavlabs.github.io/FastAPI-boilerplate/getting-started/configuration/](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/configuration/)
+
* `ENVIRONMENT=local|staging|production` controls API docs exposure
* Set `ADMIN_*` to enable the first admin user
diff --git a/scripts/gunicorn_managing_uvicorn_workers/Dockerfile b/scripts/gunicorn_managing_uvicorn_workers/Dockerfile
new file mode 100644
index 0000000..33c6419
--- /dev/null
+++ b/scripts/gunicorn_managing_uvicorn_workers/Dockerfile
@@ -0,0 +1,27 @@
+# --------- requirements ---------
+
+FROM python:3.11 as requirements-stage
+
+WORKDIR /tmp
+
+RUN pip install poetry
+
+COPY ./pyproject.toml ./poetry.lock* /tmp/
+
+RUN poetry export -f requirements.txt --output requirements.txt --without-hashes
+
+
+# --------- final image build ---------
+FROM python:3.11
+
+WORKDIR /code
+
+COPY --from=requirements-stage /tmp/requirements.txt /code/requirements.txt
+
+RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
+
+COPY ./src/app /code/app
+
+# -------- replace with comment to run with gunicorn --------
+# CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
+CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"]
diff --git a/scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml b/scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml
new file mode 100644
index 0000000..f20c9e8
--- /dev/null
+++ b/scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml
@@ -0,0 +1,114 @@
+version: '3.8'
+
+services:
+ web:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ # -------- Both of the following commands should be commented to run with nginx --------
+
+ # -------- replace with comment to run with gunicorn or just uvicorn --------
+ # command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
+ command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
+ env_file:
+ - ./src/.env
+ # -------- replace with expose if you are using nginx --------
+ ports:
+ - "8000:8000"
+ # expose:
+ # - "8000"
+ depends_on:
+ - db
+ - redis
+ volumes:
+ - ./src/app:/code/app
+ - ./src/.env:/code/.env
+
+ worker:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ command: arq app.core.worker.settings.WorkerSettings
+ env_file:
+ - ./src/.env
+ depends_on:
+ - db
+ - redis
+ volumes:
+ - ./src/app:/code/app
+ - ./src/.env:/code/.env
+
+ db:
+ image: postgres:13
+ env_file:
+ - ./src/.env
+ volumes:
+ - postgres-data:/var/lib/postgresql/data
+ expose:
+ - "5432"
+
+ redis:
+ image: redis:alpine
+ volumes:
+ - redis-data:/data
+ expose:
+ - "6379"
+
+ #-------- uncomment to run with nginx --------
+ # nginx:
+ # image: nginx:latest
+ # ports:
+ # - "80:80"
+ # volumes:
+ # - ./default.conf:/etc/nginx/conf.d/default.conf
+ # depends_on:
+ # - web
+
+ #-------- uncomment to create first superuser --------
+ create_superuser:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ env_file:
+ - ./src/.env
+ depends_on:
+ - db
+ - web
+ command: python -m src.scripts.create_first_superuser
+ volumes:
+ - ./src:/code/src
+
+ #-------- uncomment to run tests --------
+ # pytest:
+ # build:
+ # context: .
+ # dockerfile: Dockerfile
+ # env_file:
+ # - ./src/.env
+ # depends_on:
+ # - db
+ # - create_superuser
+ # - redis
+ # command: python -m pytest ./tests
+ # volumes:
+ # - .:/code
+
+ #-------- uncomment to create first tier --------
+ # create_tier:
+ # build:
+ # context: .
+ # dockerfile: Dockerfile
+ # env_file:
+ # - ./src/.env
+ # depends_on:
+ # - create_superuser
+ # - db
+ # - web
+ # command: python -m src.scripts.create_first_tier
+ # volumes:
+ # - ./src:/code/src
+
+volumes:
+ postgres-data:
+ redis-data:
+
\ No newline at end of file
diff --git a/scripts/gunicorn_managing_uvicorn_workers/env b/scripts/gunicorn_managing_uvicorn_workers/env
new file mode 100644
index 0000000..a438ca5
--- /dev/null
+++ b/scripts/gunicorn_managing_uvicorn_workers/env
@@ -0,0 +1,53 @@
+# ------------- app settings -------------
+APP_NAME="My Project"
+APP_DESCRIPTION="My Project Description"
+APP_VERSION="0.1"
+CONTACT_NAME="Me"
+CONTACT_EMAIL="my.email@example.com"
+LICENSE_NAME="MIT"
+
+# ------------- database -------------
+POSTGRES_USER="postgres"
+POSTGRES_PASSWORD=1234
+POSTGRES_SERVER="db"
+POSTGRES_PORT=5432
+POSTGRES_DB="postgres"
+POSTGRES_ASYNC_PREFIX="postgresql+asyncpg://"
+
+# ------------- crypt -------------
+SECRET_KEY=953843cd400d99a039698e7feb46ca1b3e33c44fee2c24c6d88cf0f0b290fb61
+ALGORITHM=HS256
+ACCESS_TOKEN_EXPIRE_MINUTES=60
+
+# ------------- admin -------------
+ADMIN_NAME="admin"
+ADMIN_EMAIL="admin@example.com"
+ADMIN_USERNAME="admin"
+ADMIN_PASSWORD="Str1ngst!"
+
+# ------------- redis cache -------------
+REDIS_CACHE_HOST="redis"
+REDIS_CACHE_PORT=6379
+
+# ------------- redis queue -------------
+REDIS_QUEUE_HOST="redis"
+REDIS_QUEUE_PORT=6379
+
+# ------------- redis rate limit -------------
+REDIS_RATE_LIMIT_HOST="redis"
+REDIS_RATE_LIMIT_PORT=6379
+
+# ------------- client side cache -------------
+CLIENT_CACHE_MAX_AGE=60
+
+# ------------- test -------------
+TEST_NAME="Tester User"
+TEST_EMAIL="test@tester.com"
+TEST_USERNAME="testeruser"
+TEST_PASSWORD="Str1ng$t"
+
+# ------------- environment -------------
+ENVIRONMENT="staging"
+
+# ------------- first tier -------------
+TIER_NAME="free"
diff --git a/scripts/production_with_nginx/Dockerfile b/scripts/production_with_nginx/Dockerfile
new file mode 100644
index 0000000..49538b8
--- /dev/null
+++ b/scripts/production_with_nginx/Dockerfile
@@ -0,0 +1,27 @@
+# --------- requirements ---------
+
+FROM python:3.11 as requirements-stage
+
+WORKDIR /tmp
+
+RUN pip install poetry
+
+COPY ./pyproject.toml ./poetry.lock* /tmp/
+
+RUN poetry export -f requirements.txt --output requirements.txt --without-hashes
+
+
+# --------- final image build ---------
+FROM python:3.11
+
+WORKDIR /code
+
+COPY --from=requirements-stage /tmp/requirements.txt /code/requirements.txt
+
+RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
+
+COPY ./src/app /code/app
+
+# -------- replace with comment to run with gunicorn --------
+CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
+# CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"]
diff --git a/scripts/production_with_nginx/docker-compose.yml b/scripts/production_with_nginx/docker-compose.yml
new file mode 100644
index 0000000..e7ca163
--- /dev/null
+++ b/scripts/production_with_nginx/docker-compose.yml
@@ -0,0 +1,112 @@
+version: '3.8'
+
+services:
+ web:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ # -------- Both of the following commands should be commented to run with nginx --------
+
+ # -------- replace with comment to run with gunicorn --------
+ # command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
+ # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
+ env_file:
+ - ./src/.env
+ # -------- replace ports with expose if you are using nginx --------
+ # ports:
+ # - "8000:8000"
+ expose:
+ - "8000"
+ depends_on:
+ - db
+ - redis
+ volumes:
+ - ./src/app:/code/app
+ - ./src/.env:/code/.env
+
+ worker:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ command: arq app.core.worker.settings.WorkerSettings
+ env_file:
+ - ./src/.env
+ depends_on:
+ - db
+ - redis
+ volumes:
+ - ./src/app:/code/app
+ - ./src/.env:/code/.env
+
+ db:
+ image: postgres:13
+ env_file:
+ - ./src/.env
+ volumes:
+ - postgres-data:/var/lib/postgresql/data
+ expose:
+ - "5432"
+
+ redis:
+ image: redis:alpine
+ volumes:
+ - redis-data:/data
+ expose:
+ - "6379"
+
+ #-------- uncomment to run with nginx --------
+ nginx:
+ image: nginx:latest
+ ports:
+ - "80:80"
+ volumes:
+ - ./default.conf:/etc/nginx/conf.d/default.conf
+ depends_on:
+ - web
+
+ #-------- uncomment to create first superuser --------
+ # create_superuser:
+ # build:
+ # context: .
+ # dockerfile: Dockerfile
+ # env_file:
+ # - ./src/.env
+ # depends_on:
+ # - db
+ # - web
+ # command: python -m src.scripts.create_first_superuser
+ # volumes:
+ # - ./src:/code/src
+
+ #-------- uncomment to run tests --------
+ # pytest:
+ # build:
+ # context: .
+ # dockerfile: Dockerfile
+ # env_file:
+ # - ./src/.env
+ # depends_on:
+ # - web
+ # - redis
+ # command: python -m pytest ./tests
+ # volumes:
+ # - .:/code
+
+ #-------- uncomment to create first tier --------
+ # create_tier:
+ # build:
+ # context: .
+ # dockerfile: Dockerfile
+ # env_file:
+ # - ./src/.env
+ # depends_on:
+ # - create_superuser
+ # - db
+ # - web
+ # command: python -m src.scripts.create_first_tier
+ # volumes:
+ # - ./src:/code/src
+
+volumes:
+ postgres-data:
+ redis-data:
diff --git a/scripts/production_with_nginx/env b/scripts/production_with_nginx/env
new file mode 100644
index 0000000..107cfe4
--- /dev/null
+++ b/scripts/production_with_nginx/env
@@ -0,0 +1,53 @@
+# ------------- app settings -------------
+APP_NAME="My Project"
+APP_DESCRIPTION="My Project Description"
+APP_VERSION="0.1"
+CONTACT_NAME="Me"
+CONTACT_EMAIL="my.email@example.com"
+LICENSE_NAME="MIT"
+
+# ------------- database -------------
+POSTGRES_USER="postgres"
+POSTGRES_PASSWORD=1234
+POSTGRES_SERVER="db"
+POSTGRES_PORT=5432
+POSTGRES_DB="postgres"
+POSTGRES_ASYNC_PREFIX="postgresql+asyncpg://"
+
+# ------------- crypt -------------
+SECRET_KEY=db210482bea9aae930b00b17f3449a21340c281ac7e1f2a4e33e2c5cd77f291e
+ALGORITHM=HS256
+ACCESS_TOKEN_EXPIRE_MINUTES=60
+
+# ------------- admin -------------
+ADMIN_NAME="admin"
+ADMIN_EMAIL="admin@example.com"
+ADMIN_USERNAME="admin"
+ADMIN_PASSWORD="Str1ngst!"
+
+# ------------- redis cache -------------
+REDIS_CACHE_HOST="redis"
+REDIS_CACHE_PORT=6379
+
+# ------------- redis queue -------------
+REDIS_QUEUE_HOST="redis"
+REDIS_QUEUE_PORT=6379
+
+# ------------- redis rate limit -------------
+REDIS_RATE_LIMIT_HOST="redis"
+REDIS_RATE_LIMIT_PORT=6379
+
+# ------------- client side cache -------------
+CLIENT_CACHE_MAX_AGE=60
+
+# ------------- test -------------
+TEST_NAME="Tester User"
+TEST_EMAIL="test@tester.com"
+TEST_USERNAME="testeruser"
+TEST_PASSWORD="Str1ng$t"
+
+# ------------- environment -------------
+ENVIRONMENT="production"
+
+# ------------- first tier -------------
+TIER_NAME="free"
From 584d14fd03ad6485f4a54abbfd3ded51366c7cf0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Mon, 10 Nov 2025 17:23:48 -0300
Subject: [PATCH 07/19] Adding Contributing, References, License and Contact
section from the original README.md.
---
README.md | 27 ++++++++++++++++++---------
1 file changed, 18 insertions(+), 9 deletions(-)
diff --git a/README.md b/README.md
index bee84e6..2fe9b0b 100644
--- a/README.md
+++ b/README.md
@@ -122,18 +122,27 @@ More examples (superuser creation, tiers, rate limits, admin usage) - **docs**.
## Contributing
-Issues and PRs are welcome. Please read **CONTRIBUTING.md** and follow the style of existing modules (type hints, async/await, explicit None checks, and paginated responses).
+Read [contributing](CONTRIBUTING.md).
----
+## References
+
+This project was inspired by a few projects, it's based on them with things changed to the way I like (and pydantic, sqlalchemy updated)
+
+- [`Full Stack FastAPI and PostgreSQL`](https://github.com/tiangolo/full-stack-fastapi-postgresql) by @tiangolo himself
+- [`FastAPI Microservices`](https://github.com/Kludex/fastapi-microservices) by @kludex which heavily inspired this boilerplate
+- [`Async Web API with FastAPI + SQLAlchemy 2.0`](https://github.com/rhoboro/async-fastapi-sqlalchemy) for sqlalchemy 2.0 ORM examples
+- [`FastaAPI Rocket Boilerplate`](https://github.com/asacristani/fastapi-rocket-boilerplate/tree/main) for docker compose
## License
-MIT - see `LICENSE.md`.
+[`MIT`](LICENSE.md)
----
+## Contact
-
-
-
-
-
+Benav Labs โ [benav.io](https://benav.io)
+[github.com/benavlabs](https://github.com/benavlabs/)
+
+
+
+
+
From ee2e7f0f6ca3e2c27136ac7565b2bca12d5f64f3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Wed, 12 Nov 2025 22:32:34 -0300
Subject: [PATCH 08/19] Removing deprecated version keyword from
docker-compose.yml files.
---
scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml | 2 --
scripts/production_with_nginx/docker-compose.yml | 2 --
2 files changed, 4 deletions(-)
diff --git a/scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml b/scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml
index f20c9e8..8b4cefd 100644
--- a/scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml
+++ b/scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml
@@ -1,5 +1,3 @@
-version: '3.8'
-
services:
web:
build:
diff --git a/scripts/production_with_nginx/docker-compose.yml b/scripts/production_with_nginx/docker-compose.yml
index e7ca163..c32b252 100644
--- a/scripts/production_with_nginx/docker-compose.yml
+++ b/scripts/production_with_nginx/docker-compose.yml
@@ -1,5 +1,3 @@
-version: '3.8'
-
services:
web:
build:
From 5ffc12d7e7a8a1996844541d61d19435c93751e2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Wed, 12 Nov 2025 23:01:21 -0300
Subject: [PATCH 09/19] With Claude Code: Addressing production nginx
docker-compose missing web service command directive.
---
scripts/production_with_nginx/docker-compose.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/scripts/production_with_nginx/docker-compose.yml b/scripts/production_with_nginx/docker-compose.yml
index c32b252..77c6296 100644
--- a/scripts/production_with_nginx/docker-compose.yml
+++ b/scripts/production_with_nginx/docker-compose.yml
@@ -7,7 +7,7 @@ services:
# -------- replace with comment to run with gunicorn --------
# command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
- # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
+ command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000
env_file:
- ./src/.env
# -------- replace ports with expose if you are using nginx --------
From b8bb72ce686381695182967c3db63ad596e71012 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Wed, 12 Nov 2025 23:08:46 -0300
Subject: [PATCH 10/19] With Claude Code: Addressing Instructions don't mention
copying default.conf for nginx setup.
---
README.md | 32 +++++++++++++++++++++++++++++---
1 file changed, 29 insertions(+), 3 deletions(-)
diff --git a/README.md b/README.md
index 2fe9b0b..a865914 100644
--- a/README.md
+++ b/README.md
@@ -29,9 +29,8 @@ git clone https://github.com//FastAPI-boilerplate
cd FastAPI-boilerplate
# In the scripts/ folder, you can find scripts to run FastAPI-Boilerplate locally, with uvicorn workers, and in production with nginx.
-# NOTE: For now, only local scripts are updated.
-# Running locally with Uvicorn:
+# Option 1: Running locally with Uvicorn
# Copy Dockerfile and Docker Compose files:
cp scripts/local_with_uvicorn/Dockerfile Dockerfile
@@ -39,13 +38,40 @@ cp scripts/local_with_uvicorn/docker-compose.yml docker-compose.yml
# Copy and create your environment file
cp scripts/local_with_uvicorn/env src/.env
-# If you want, modify in the minimal environment variables as described in the docs.
+# If you want, modify the minimal environment variables as described in the docs.
# Run everything using Docker:
docker compose up
# Open the API documentation
open http://127.0.0.1:8000/docs
+
+# Option 2: Running with Gunicorn managing Uvicorn workers
+
+# Copy Dockerfile and Docker Compose files:
+cp scripts/gunicorn_managing_uvicorn_workers/Dockerfile Dockerfile
+cp scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml docker-compose.yml
+
+# Copy and create your environment file
+cp scripts/gunicorn_managing_uvicorn_workers/env src/.env
+
+# Run everything using Docker:
+docker compose up
+
+# Option 3: Production with NGINX
+
+# Copy Dockerfile, Docker Compose, and nginx config:
+cp scripts/production_with_nginx/Dockerfile Dockerfile
+cp scripts/production_with_nginx/docker-compose.yml docker-compose.yml
+cp default.conf default.conf # nginx configuration (already in root)
+
+# Copy and create your environment file
+cp scripts/production_with_nginx/env src/.env
+
+# Run everything using Docker:
+docker compose up
+
+# Access via http://localhost (nginx proxies to the app)
```
> Full setup (from-scratch, .env examples, PostgreSQL & Redis, gunicorn, nginx) lives in the docs.
From 01ca271a68f5a6443986ca7d6a82d511679ac623 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Wed, 12 Nov 2025 23:16:28 -0300
Subject: [PATCH 11/19] With Claude Code: Files named env should be renamed to
.env.example to follow standard conventions. Also add warning headers
indicating these are example values that must be changed (current hardcoded
values like SECRET_KEY and passwords could be accidentally used in
production).
---
README.md | 10 ++++++----
.../{env => .env.example} | 16 +++++++++++++++-
scripts/local_with_uvicorn/{env => .env.example} | 16 +++++++++++++++-
.../production_with_nginx/{env => .env.example} | 16 +++++++++++++++-
4 files changed, 51 insertions(+), 7 deletions(-)
rename scripts/gunicorn_managing_uvicorn_workers/{env => .env.example} (62%)
rename scripts/local_with_uvicorn/{env => .env.example} (62%)
rename scripts/production_with_nginx/{env => .env.example} (62%)
diff --git a/README.md b/README.md
index a865914..18a8dc4 100644
--- a/README.md
+++ b/README.md
@@ -37,8 +37,8 @@ cp scripts/local_with_uvicorn/Dockerfile Dockerfile
cp scripts/local_with_uvicorn/docker-compose.yml docker-compose.yml
# Copy and create your environment file
-cp scripts/local_with_uvicorn/env src/.env
-# If you want, modify the minimal environment variables as described in the docs.
+cp scripts/local_with_uvicorn/.env.example src/.env
+# For local development, the example values work fine. Modify if needed.
# Run everything using Docker:
docker compose up
@@ -53,7 +53,8 @@ cp scripts/gunicorn_managing_uvicorn_workers/Dockerfile Dockerfile
cp scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml docker-compose.yml
# Copy and create your environment file
-cp scripts/gunicorn_managing_uvicorn_workers/env src/.env
+cp scripts/gunicorn_managing_uvicorn_workers/.env.example src/.env
+# Recommended: Change SECRET_KEY and passwords for staging/testing environments.
# Run everything using Docker:
docker compose up
@@ -66,7 +67,8 @@ cp scripts/production_with_nginx/docker-compose.yml docker-compose.yml
cp default.conf default.conf # nginx configuration (already in root)
# Copy and create your environment file
-cp scripts/production_with_nginx/env src/.env
+cp scripts/production_with_nginx/.env.example src/.env
+# CRITICAL: You MUST change SECRET_KEY, all passwords, and sensitive values before deploying!
# Run everything using Docker:
docker compose up
diff --git a/scripts/gunicorn_managing_uvicorn_workers/env b/scripts/gunicorn_managing_uvicorn_workers/.env.example
similarity index 62%
rename from scripts/gunicorn_managing_uvicorn_workers/env
rename to scripts/gunicorn_managing_uvicorn_workers/.env.example
index a438ca5..60747f9 100644
--- a/scripts/gunicorn_managing_uvicorn_workers/env
+++ b/scripts/gunicorn_managing_uvicorn_workers/.env.example
@@ -1,4 +1,18 @@
-# ------------- app settings -------------
+# ============================================================================
+# WARNING: EXAMPLE CONFIGURATION - DO NOT USE IN PRODUCTION AS-IS
+# ============================================================================
+# This file contains example values for development/testing purposes only.
+#
+# SECURITY CRITICAL: Before deploying to production, you MUST:
+# 1. Copy this file to src/.env
+# 2. Generate a new SECRET_KEY using: openssl rand -hex 32
+# 3. Change all passwords (POSTGRES_PASSWORD, ADMIN_PASSWORD, etc.)
+# 4. Update all sensitive configuration values
+#
+# Using these example values in production is a SECURITY RISK.
+# ============================================================================
+
+# ------------- app settings -------------
APP_NAME="My Project"
APP_DESCRIPTION="My Project Description"
APP_VERSION="0.1"
diff --git a/scripts/local_with_uvicorn/env b/scripts/local_with_uvicorn/.env.example
similarity index 62%
rename from scripts/local_with_uvicorn/env
rename to scripts/local_with_uvicorn/.env.example
index d312bed..87604a6 100644
--- a/scripts/local_with_uvicorn/env
+++ b/scripts/local_with_uvicorn/.env.example
@@ -1,4 +1,18 @@
-# ------------- app settings -------------
+# ============================================================================
+# WARNING: EXAMPLE CONFIGURATION - DO NOT USE IN PRODUCTION AS-IS
+# ============================================================================
+# This file contains example values for development/testing purposes only.
+#
+# SECURITY CRITICAL: Before deploying to production, you MUST:
+# 1. Copy this file to src/.env
+# 2. Generate a new SECRET_KEY using: openssl rand -hex 32
+# 3. Change all passwords (POSTGRES_PASSWORD, ADMIN_PASSWORD, etc.)
+# 4. Update all sensitive configuration values
+#
+# Using these example values in production is a SECURITY RISK.
+# ============================================================================
+
+# ------------- app settings -------------
APP_NAME="My Project"
APP_DESCRIPTION="My Project Description"
APP_VERSION="0.1"
diff --git a/scripts/production_with_nginx/env b/scripts/production_with_nginx/.env.example
similarity index 62%
rename from scripts/production_with_nginx/env
rename to scripts/production_with_nginx/.env.example
index 107cfe4..4863897 100644
--- a/scripts/production_with_nginx/env
+++ b/scripts/production_with_nginx/.env.example
@@ -1,4 +1,18 @@
-# ------------- app settings -------------
+# ============================================================================
+# WARNING: EXAMPLE CONFIGURATION - DO NOT USE IN PRODUCTION AS-IS
+# ============================================================================
+# This file contains example values for development/testing purposes only.
+#
+# SECURITY CRITICAL: Before deploying to production, you MUST:
+# 1. Copy this file to src/.env
+# 2. Generate a new SECRET_KEY using: openssl rand -hex 32
+# 3. Change all passwords (POSTGRES_PASSWORD, ADMIN_PASSWORD, etc.)
+# 4. Update all sensitive configuration values
+#
+# Using these example values in production is a SECURITY RISK.
+# ============================================================================
+
+# ------------- app settings -------------
APP_NAME="My Project"
APP_DESCRIPTION="My Project Description"
APP_VERSION="0.1"
From 69a7fc0ed4484273a377be0c95022b2348e5c3d4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Planch=C3=B3n=20Prestes?=
Date: Wed, 12 Nov 2025 23:23:09 -0300
Subject: [PATCH 12/19] With Claude Code: The copy-paste commands in the README
haven't been validated to actually work end-to-end.
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index 18a8dc4..4b0c28b 100644
--- a/README.md
+++ b/README.md
@@ -61,10 +61,10 @@ docker compose up
# Option 3: Production with NGINX
-# Copy Dockerfile, Docker Compose, and nginx config:
+# Copy Dockerfile and Docker Compose:
cp scripts/production_with_nginx/Dockerfile Dockerfile
cp scripts/production_with_nginx/docker-compose.yml docker-compose.yml
-cp default.conf default.conf # nginx configuration (already in root)
+# Note: default.conf for nginx is already in the root directory
# Copy and create your environment file
cp scripts/production_with_nginx/.env.example src/.env
From 662894bd8432021b79215b1181b328012ca01132 Mon Sep 17 00:00:00 2001
From: Igor Benav
Date: Sat, 15 Nov 2025 17:38:31 -0300
Subject: [PATCH 13/19] fix syntax, change password so warning is gone
---
scripts/gunicorn_managing_uvicorn_workers/.env.example | 2 +-
scripts/gunicorn_managing_uvicorn_workers/Dockerfile | 2 +-
scripts/local_with_uvicorn/.env.example | 2 +-
scripts/production_with_nginx/.env.example | 2 +-
scripts/production_with_nginx/Dockerfile | 2 +-
5 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/scripts/gunicorn_managing_uvicorn_workers/.env.example b/scripts/gunicorn_managing_uvicorn_workers/.env.example
index 60747f9..1c1e585 100644
--- a/scripts/gunicorn_managing_uvicorn_workers/.env.example
+++ b/scripts/gunicorn_managing_uvicorn_workers/.env.example
@@ -58,7 +58,7 @@ CLIENT_CACHE_MAX_AGE=60
TEST_NAME="Tester User"
TEST_EMAIL="test@tester.com"
TEST_USERNAME="testeruser"
-TEST_PASSWORD="Str1ng$t"
+TEST_PASSWORD="Str1ngT3st!"
# ------------- environment -------------
ENVIRONMENT="staging"
diff --git a/scripts/gunicorn_managing_uvicorn_workers/Dockerfile b/scripts/gunicorn_managing_uvicorn_workers/Dockerfile
index 33c6419..98d55fc 100644
--- a/scripts/gunicorn_managing_uvicorn_workers/Dockerfile
+++ b/scripts/gunicorn_managing_uvicorn_workers/Dockerfile
@@ -24,4 +24,4 @@ COPY ./src/app /code/app
# -------- replace with comment to run with gunicorn --------
# CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
-CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"]
+CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:8000"]
diff --git a/scripts/local_with_uvicorn/.env.example b/scripts/local_with_uvicorn/.env.example
index 87604a6..10f0ba3 100644
--- a/scripts/local_with_uvicorn/.env.example
+++ b/scripts/local_with_uvicorn/.env.example
@@ -58,7 +58,7 @@ CLIENT_CACHE_MAX_AGE=60
TEST_NAME="Tester User"
TEST_EMAIL="test@tester.com"
TEST_USERNAME="testeruser"
-TEST_PASSWORD="Str1ng$t"
+TEST_PASSWORD="Str1ngT3st!"
# ------------- environment -------------
ENVIRONMENT="local"
diff --git a/scripts/production_with_nginx/.env.example b/scripts/production_with_nginx/.env.example
index 4863897..6f9c5d6 100644
--- a/scripts/production_with_nginx/.env.example
+++ b/scripts/production_with_nginx/.env.example
@@ -58,7 +58,7 @@ CLIENT_CACHE_MAX_AGE=60
TEST_NAME="Tester User"
TEST_EMAIL="test@tester.com"
TEST_USERNAME="testeruser"
-TEST_PASSWORD="Str1ng$t"
+TEST_PASSWORD="Str1ngT3st!"
# ------------- environment -------------
ENVIRONMENT="production"
diff --git a/scripts/production_with_nginx/Dockerfile b/scripts/production_with_nginx/Dockerfile
index 49538b8..8b8ccfe 100644
--- a/scripts/production_with_nginx/Dockerfile
+++ b/scripts/production_with_nginx/Dockerfile
@@ -24,4 +24,4 @@ COPY ./src/app /code/app
# -------- replace with comment to run with gunicorn --------
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
-# CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"]
+# CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:8000"]
From 0f723e61f8c3fe7043913bf37630adbd7a498278 Mon Sep 17 00:00:00 2001
From: Igor Benav
Date: Sat, 15 Nov 2025 17:46:24 -0300
Subject: [PATCH 14/19] add some other stuff to readme
---
README.md | 110 ++++++++++++++++++++++++++++++++----------------------
1 file changed, 65 insertions(+), 45 deletions(-)
diff --git a/README.md b/README.md
index 4b0c28b..21b606a 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,28 @@
-# Benav Labs FastAPI Boilerplate
+ Benav Labs FastAPI boilerplate
+
+ **Batteries-included FastAPI starter** with Pydantic v2, SQLAlchemy 2.0, PostgreSQL, Redis, ARQ jobs, rate-limiting and a minimal admin. Production-ready defaults, optional modules, and clear docs.
+
+
+
+
+
+
+
-> **Batteries-included FastAPI starter** with Pydantic v2, SQLAlchemy 2.0, PostgreSQL, Redis, ARQ jobs, rate-limiting and a minimal admin. Production-ready defaults, optional modules, and clear docs.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
FastAPI
@@ -19,6 +41,44 @@
---
+## Features
+
+* โก๏ธ Fully async FastAPI + SQLAlchemy 2.0
+* ๐งฑ Pydantic v2 models & validation
+* ๐ JWT auth (access + refresh), cookies for refresh
+* ๐ฎ Rate limiter + tiers (free/pro/etc.)
+* ๐งฐ FastCRUD for efficient CRUD & pagination
+* ๐งโ๐ผ **CRUDAdmin**: minimal admin panel (optional)
+* ๐ฆ ARQ background jobs (Redis)
+* ๐ง Redis caching (server + client-side headers)
+* ๐ณ One-command Docker Compose
+* ๐ NGINX & Gunicorn recipes for prod
+
+---
+
+## When to use it
+
+* You want a pragmatic starter with auth, CRUD, jobs, caching and rate-limits.
+* You value **sensible defaults** with the freedom to opt-out of modules.
+* You prefer **docs over boilerplate** in README - depth lives in the site.
+
+Not a fit if you need a monorepo microservices scaffold - see the docs for pointers.
+
+---
+
+## What's inside (high-level)
+
+* **App**: FastAPI app factory, env-aware docs exposure
+* **Auth**: JWT access/refresh, logout via token blacklist
+* **DB**: Postgres + SQLAlchemy 2.0, Alembic migrations
+* **CRUD**: FastCRUD generics (get, get_multi, create, update, delete, joins)
+* **Caching**: decorator-based endpoints cache; client cache headers
+* **Queues**: ARQ worker (async jobs), Redis connection helpers
+* **Rate limits**: per-tier + per-path rules
+* **Admin**: CRUDAdmin views for common models (optional)
+
+> The full tree and deep dives are in **Project Structure**, **Database**, **CRUD Operations**, **API**, **Caching**, **Background Tasks**, **Rate Limiting**, and **Production** sections of the docs.
+
## TL;DR - Quickstart
Use the template on GitHub, create your repo, then:
@@ -28,7 +88,8 @@ Use the template on GitHub, create your repo, then:
git clone https://github.com//FastAPI-boilerplate
cd FastAPI-boilerplate
-# In the scripts/ folder, you can find scripts to run FastAPI-Boilerplate locally, with uvicorn workers, and in production with nginx.
+# In the scripts/ folder, you can find scripts to run FastAPI-Boilerplate locally,
+# with uvicorn workers, and in production with nginx.
# Option 1: Running locally with Uvicorn
@@ -80,46 +141,6 @@ docker compose up
---
-## Features
-
-* โก๏ธ Fully async FastAPI + SQLAlchemy 2.0
-* ๐งฑ Pydantic v2 models & validation
-* ๐ JWT auth (access + refresh), cookies for refresh
-* ๐ฎ Rate limiter + tiers (free/pro/etc.)
-* ๐งฐ FastCRUD for efficient CRUD & pagination
-* ๐งโ๐ผ **CRUDAdmin**: minimal admin panel (optional)
-* ๐ฆ ARQ background jobs (Redis)
-* ๐ง Redis caching (server + client-side headers)
-* ๐ณ One-command Docker Compose
-* ๐ NGINX & Gunicorn recipes for prod
-
----
-
-## When to use it
-
-* You want a pragmatic starter with auth, CRUD, jobs, caching and rate-limits.
-* You value **sensible defaults** with the freedom to opt-out of modules.
-* You prefer **docs over boilerplate** in README - depth lives in the site.
-
-Not a fit if you need a monorepo microservices scaffold - see the docs for pointers.
-
----
-
-## What's inside (high-level)
-
-* **App**: FastAPI app factory, env-aware docs exposure
-* **Auth**: JWT access/refresh, logout via token blacklist
-* **DB**: Postgres + SQLAlchemy 2.0, Alembic migrations
-* **CRUD**: FastCRUD generics (get, get_multi, create, update, delete, joins)
-* **Caching**: decorator-based endpoints cache; client cache headers
-* **Queues**: ARQ worker (async jobs), Redis connection helpers
-* **Rate limits**: per-tier + per-path rules
-* **Admin**: CRUDAdmin views for common models (optional)
-
-> The full tree and deep dives are in **Project Structure**, **Database**, **CRUD Operations**, **API**, **Caching**, **Background Tasks**, **Rate Limiting**, and **Production** sections of the docs.
-
----
-
## Configuration (minimal)
Create `src/.env` and set **app**, **database**, **JWT**, and **environment** settings. See the docs for a copy-pasteable example and production guidance.
@@ -167,8 +188,7 @@ This project was inspired by a few projects, it's based on them with things chan
## Contact
-Benav Labs โ [benav.io](https://benav.io)
-[github.com/benavlabs](https://github.com/benavlabs/)
+Benav Labs โ [benav.io](https://benav.io), [discord server](https://discord.com/invite/TEmPs22gqB)
From 59f1e2887714541e09ceac830cf2ccaa78c0d57f Mon Sep 17 00:00:00 2001
From: Igor Benav
Date: Sat, 15 Nov 2025 17:47:44 -0300
Subject: [PATCH 15/19] fix some things in the readme
---
README.md | 11 +----------
1 file changed, 1 insertion(+), 10 deletions(-)
diff --git a/README.md b/README.md
index 21b606a..911dca2 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
Benav Labs FastAPI boilerplate
- **Batteries-included FastAPI starter** with Pydantic v2, SQLAlchemy 2.0, PostgreSQL, Redis, ARQ jobs, rate-limiting and a minimal admin. Production-ready defaults, optional modules, and clear docs.
+ Batteries-included FastAPI starter with production-ready defaults, optional modules, and clear docs.
@@ -24,15 +24,6 @@
-
- FastAPI
- Pydantic v2
- SQLAlchemy 2.0
- PostgreSQL
- Redis
- ARQ
-
-
**Docs:**
* ๐ [https://benavlabs.github.io/FastAPI-boilerplate/](https://benavlabs.github.io/FastAPI-boilerplate/)
From 59e51ea15a18b1eb47eeb5ee3fd790876919155d Mon Sep 17 00:00:00 2001
From: Igor Benav
Date: Sat, 15 Nov 2025 17:52:42 -0300
Subject: [PATCH 16/19] some more fixes
---
README.md | 69 +++++++++++++++++++++++++------------------------------
1 file changed, 31 insertions(+), 38 deletions(-)
diff --git a/README.md b/README.md
index 911dca2..5fea2a7 100644
--- a/README.md
+++ b/README.md
@@ -30,8 +30,6 @@
* ๐ง DeepWiki: [https://deepwiki.com/benavlabs/FastAPI-boilerplate](https://deepwiki.com/benavlabs/FastAPI-boilerplate)
* ๐ฌ Discord: [https://discord.com/invite/TEmPs22gqB](https://discord.com/invite/TEmPs22gqB)
----
-
## Features
* โก๏ธ Fully async FastAPI + SQLAlchemy 2.0
@@ -45,8 +43,6 @@
* ๐ณ One-command Docker Compose
* ๐ NGINX & Gunicorn recipes for prod
----
-
## When to use it
* You want a pragmatic starter with auth, CRUD, jobs, caching and rate-limits.
@@ -55,8 +51,6 @@
Not a fit if you need a monorepo microservices scaffold - see the docs for pointers.
----
-
## What's inside (high-level)
* **App**: FastAPI app factory, env-aware docs exposure
@@ -75,62 +69,65 @@ Not a fit if you need a monorepo microservices scaffold - see the docs for point
Use the template on GitHub, create your repo, then:
```bash
-# Clone your new repository
git clone https://github.com//FastAPI-boilerplate
cd FastAPI-boilerplate
+```
-# In the scripts/ folder, you can find scripts to run FastAPI-Boilerplate locally,
-# with uvicorn workers, and in production with nginx.
+The `scripts/` folder contains ready-to-use configurations for different deployment scenarios. Pick your path:
-# Option 1: Running locally with Uvicorn
+### Option 1: Local development with Uvicorn
-# Copy Dockerfile and Docker Compose files:
+Best for: **Development and testing**
+
+```bash
cp scripts/local_with_uvicorn/Dockerfile Dockerfile
cp scripts/local_with_uvicorn/docker-compose.yml docker-compose.yml
-
-# Copy and create your environment file
cp scripts/local_with_uvicorn/.env.example src/.env
-# For local development, the example values work fine. Modify if needed.
+```
-# Run everything using Docker:
+For local development, the example environment values work fine. You can modify them later if needed.
+
+```bash
docker compose up
+```
+
+Your API will be running at http://127.0.0.1:8000 with auto-reload enabled. Open http://127.0.0.1:8000/docs to see the interactive documentation.
-# Open the API documentation
-open http://127.0.0.1:8000/docs
+### Option 2: Staging with Gunicorn managing Uvicorn workers
-# Option 2: Running with Gunicorn managing Uvicorn workers
+Best for: **Staging environments and load testing**
-# Copy Dockerfile and Docker Compose files:
+```bash
cp scripts/gunicorn_managing_uvicorn_workers/Dockerfile Dockerfile
cp scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml docker-compose.yml
-
-# Copy and create your environment file
cp scripts/gunicorn_managing_uvicorn_workers/.env.example src/.env
-# Recommended: Change SECRET_KEY and passwords for staging/testing environments.
+```
-# Run everything using Docker:
+โ ๏ธ **Recommended**: Change `SECRET_KEY` and passwords in the `.env` file for staging/testing environments.
+
+```bash
docker compose up
+```
+
+### Option 3: Production with NGINX
-# Option 3: Production with NGINX
+Best for: **Production deployments**
-# Copy Dockerfile and Docker Compose:
+```bash
cp scripts/production_with_nginx/Dockerfile Dockerfile
cp scripts/production_with_nginx/docker-compose.yml docker-compose.yml
-# Note: default.conf for nginx is already in the root directory
-
-# Copy and create your environment file
cp scripts/production_with_nginx/.env.example src/.env
-# CRITICAL: You MUST change SECRET_KEY, all passwords, and sensitive values before deploying!
+```
-# Run everything using Docker:
-docker compose up
+๐จ **CRITICAL**: You MUST change `SECRET_KEY`, all passwords, and sensitive values in the `.env` file before deploying!
-# Access via http://localhost (nginx proxies to the app)
+```bash
+docker compose up
```
-> Full setup (from-scratch, .env examples, PostgreSQL & Redis, gunicorn, nginx) lives in the docs.
+Access your application via http://localhost (NGINX proxies to the FastAPI app).
----
+> Full setup (from-scratch, .env examples, PostgreSQL & Redis, gunicorn, nginx) lives in the docs.
## Configuration (minimal)
@@ -141,8 +138,6 @@ Create `src/.env` and set **app**, **database**, **JWT**, and **environment** se
* `ENVIRONMENT=local|staging|production` controls API docs exposure
* Set `ADMIN_*` to enable the first admin user
----
-
## Common tasks
```bash
@@ -158,8 +153,6 @@ curl -X POST 'http://127.0.0.1:8000/api/v1/tasks/task?message=hello'
More examples (superuser creation, tiers, rate limits, admin usage) - **docs**.
----
-
## Contributing
Read [contributing](CONTRIBUTING.md).
From f4e8740e0e1d1041e0e2d52141cb8583dcb5e395 Mon Sep 17 00:00:00 2001
From: Igor Benav
Date: Sat, 15 Nov 2025 18:19:09 -0300
Subject: [PATCH 17/19] some other improvements
---
README.md | 68 +++++++++++++++++++++++++++++++------------------------
1 file changed, 38 insertions(+), 30 deletions(-)
diff --git a/README.md b/README.md
index 5fea2a7..4904e90 100644
--- a/README.md
+++ b/README.md
@@ -9,6 +9,10 @@
+
+๐ Docs ยท ๐ง DeepWiki ยท ๐ฌ Discord
+
+
@@ -24,12 +28,6 @@
-**Docs:**
-
-* ๐ [https://benavlabs.github.io/FastAPI-boilerplate/](https://benavlabs.github.io/FastAPI-boilerplate/)
-* ๐ง DeepWiki: [https://deepwiki.com/benavlabs/FastAPI-boilerplate](https://deepwiki.com/benavlabs/FastAPI-boilerplate)
-* ๐ฌ Discord: [https://discord.com/invite/TEmPs22gqB](https://discord.com/invite/TEmPs22gqB)
-
## Features
* โก๏ธ Fully async FastAPI + SQLAlchemy 2.0
@@ -43,26 +41,30 @@
* ๐ณ One-command Docker Compose
* ๐ NGINX & Gunicorn recipes for prod
-## When to use it
+## Why and When to use it
-* You want a pragmatic starter with auth, CRUD, jobs, caching and rate-limits.
-* You value **sensible defaults** with the freedom to opt-out of modules.
-* You prefer **docs over boilerplate** in README - depth lives in the site.
+**Perfect if you want:**
-Not a fit if you need a monorepo microservices scaffold - see the docs for pointers.
+* A pragmatic starter with auth, CRUD, jobs, caching and rate-limits
+* **Sensible defaults** with the freedom to opt-out of modules
+* **Docs over boilerplate** in README - depth lives in the site
-## What's inside (high-level)
+> **Not a fit** if you need a monorepo microservices scaffold - [see the docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/project-structure/) for pointers.
-* **App**: FastAPI app factory, env-aware docs exposure
-* **Auth**: JWT access/refresh, logout via token blacklist
-* **DB**: Postgres + SQLAlchemy 2.0, Alembic migrations
-* **CRUD**: FastCRUD generics (get, get_multi, create, update, delete, joins)
-* **Caching**: decorator-based endpoints cache; client cache headers
-* **Queues**: ARQ worker (async jobs), Redis connection helpers
-* **Rate limits**: per-tier + per-path rules
-* **Admin**: CRUDAdmin views for common models (optional)
+**What you get:**
-> The full tree and deep dives are in **Project Structure**, **Database**, **CRUD Operations**, **API**, **Caching**, **Background Tasks**, **Rate Limiting**, and **Production** sections of the docs.
+* **App**: FastAPI app factory, [env-aware docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/development/) exposure
+* **Auth**: [JWT access/refresh](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/authentication/), logout via token blacklist
+* **DB**: Postgres + SQLAlchemy 2.0, [Alembic migrations](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/database/)
+* **CRUD**: [FastCRUD generics](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/database/crud/) (get, get_multi, create, update, delete, joins)
+* **Caching**: [decorator-based endpoints cache](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/caching/); client cache headers
+* **Queues**: [ARQ worker](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/background-tasks/) (async jobs), Redis connection helpers
+* **Rate limits**: [per-tier + per-path rules](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/rate-limiting/)
+* **Admin**: [CRUDAdmin views](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/admin-panel/) for common models (optional)
+
+This is what we've been using in production apps. Several applications running in production started from this boilerplate as their foundation - from SaaS platforms to internal tools. It's proven, stable technology that works together reliably. Use this as the foundation for whatever you want to build on top.
+
+> **Building an AI SaaS?** Skip even more setup with [**FastroAI**](https://fastro.ai) - our production-ready template with AI integration, payments, and frontend included.
## TL;DR - Quickstart
@@ -77,7 +79,7 @@ The `scripts/` folder contains ready-to-use configurations for different deploym
### Option 1: Local development with Uvicorn
-Best for: **Development and testing**
+Best for: **Development and testing**. Simply run:
```bash
cp scripts/local_with_uvicorn/Dockerfile Dockerfile
@@ -85,7 +87,7 @@ cp scripts/local_with_uvicorn/docker-compose.yml docker-compose.yml
cp scripts/local_with_uvicorn/.env.example src/.env
```
-For local development, the example environment values work fine. You can modify them later if needed.
+For local development, the example environment values work fine. You can modify them later if needed. Then you just need to run:
```bash
docker compose up
@@ -95,7 +97,7 @@ Your API will be running at http://127.0.0.1:8000 with auto-reload enabled. Open
### Option 2: Staging with Gunicorn managing Uvicorn workers
-Best for: **Staging environments and load testing**
+Best for: **Staging environments and load testing**. Run:
```bash
cp scripts/gunicorn_managing_uvicorn_workers/Dockerfile Dockerfile
@@ -103,7 +105,10 @@ cp scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml docker-compose.y
cp scripts/gunicorn_managing_uvicorn_workers/.env.example src/.env
```
-โ ๏ธ **Recommended**: Change `SECRET_KEY` and passwords in the `.env` file for staging/testing environments.
+> [!WARNING]
+> Change `SECRET_KEY` and passwords in the `.env` file for staging/testing environments.
+
+And start with:
```bash
docker compose up
@@ -111,7 +116,7 @@ docker compose up
### Option 3: Production with NGINX
-Best for: **Production deployments**
+Best for: **Production deployments**. Just run these commands:
```bash
cp scripts/production_with_nginx/Dockerfile Dockerfile
@@ -119,7 +124,10 @@ cp scripts/production_with_nginx/docker-compose.yml docker-compose.yml
cp scripts/production_with_nginx/.env.example src/.env
```
-๐จ **CRITICAL**: You MUST change `SECRET_KEY`, all passwords, and sensitive values in the `.env` file before deploying!
+> [!CAUTION]
+> You MUST change `SECRET_KEY`, all passwords, and sensitive values in the `.env` file before deploying!
+
+And then, to sart:
```bash
docker compose up
@@ -127,11 +135,11 @@ docker compose up
Access your application via http://localhost (NGINX proxies to the FastAPI app).
-> Full setup (from-scratch, .env examples, PostgreSQL & Redis, gunicorn, nginx) lives in the docs.
+> Full setup (from-scratch, .env examples, PostgreSQL & Redis, gunicorn, nginx) lives in the [docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/installation/).
## Configuration (minimal)
-Create `src/.env` and set **app**, **database**, **JWT**, and **environment** settings. See the docs for a copy-pasteable example and production guidance.
+Create `src/.env` and set **app**, **database**, **JWT**, and **environment** settings. See the [docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/configuration/) for a copy-pasteable example and production guidance.
[https://benavlabs.github.io/FastAPI-boilerplate/getting-started/configuration/](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/configuration/)
@@ -151,7 +159,7 @@ cd src && uv run alembic revision --autogenerate && uv run alembic upgrade head
curl -X POST 'http://127.0.0.1:8000/api/v1/tasks/task?message=hello'
```
-More examples (superuser creation, tiers, rate limits, admin usage) - **docs**.
+More examples (superuser creation, tiers, rate limits, admin usage) in the [docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/first-run/).
## Contributing
From ccda306dadf072af0063e0d24ccbb49c72992776 Mon Sep 17 00:00:00 2001
From: Igor Benav
Date: Sun, 16 Nov 2025 16:22:52 -0300
Subject: [PATCH 18/19] setup script added for convenience
---
README.md | 95 ++++++----
docs/getting-started/installation.md | 22 ++-
docs/user-guide/configuration/docker-setup.md | 16 ++
setup.py | 172 ++++++++++++++++++
4 files changed, 271 insertions(+), 34 deletions(-)
create mode 100755 setup.py
diff --git a/README.md b/README.md
index 4904e90..68ac4f1 100644
--- a/README.md
+++ b/README.md
@@ -75,65 +75,98 @@ git clone https://github.com//FastAPI-boilerplate
cd FastAPI-boilerplate
```
-The `scripts/` folder contains ready-to-use configurations for different deployment scenarios. Pick your path:
+**Quick setup:** Run the interactive setup script to choose your deployment configuration:
+
+```bash
+./setup.py
+```
+
+Or directly specify the deployment type: `./setup.py local`, `./setup.py staging`, or `./setup.py production`.
+
+The script copies the right files for your deployment scenario. Here's what each option sets up:
### Option 1: Local development with Uvicorn
-Best for: **Development and testing**. Simply run:
+Best for: **Development and testing**
-```bash
-cp scripts/local_with_uvicorn/Dockerfile Dockerfile
-cp scripts/local_with_uvicorn/docker-compose.yml docker-compose.yml
-cp scripts/local_with_uvicorn/.env.example src/.env
-```
+**Copies:**
-For local development, the example environment values work fine. You can modify them later if needed. Then you just need to run:
+- `scripts/local_with_uvicorn/Dockerfile` โ `Dockerfile`
+- `scripts/local_with_uvicorn/docker-compose.yml` โ `docker-compose.yml`
+- `scripts/local_with_uvicorn/.env.example` โ `src/.env`
-```bash
-docker compose up
-```
+Sets up Uvicorn with auto-reload enabled. The example environment values work fine for development.
-Your API will be running at http://127.0.0.1:8000 with auto-reload enabled. Open http://127.0.0.1:8000/docs to see the interactive documentation.
+**Manual setup:** `./setup.py local` or copy the files above manually.
### Option 2: Staging with Gunicorn managing Uvicorn workers
-Best for: **Staging environments and load testing**. Run:
+Best for: **Staging environments and load testing**
-```bash
-cp scripts/gunicorn_managing_uvicorn_workers/Dockerfile Dockerfile
-cp scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml docker-compose.yml
-cp scripts/gunicorn_managing_uvicorn_workers/.env.example src/.env
-```
+**Copies:**
+
+- `scripts/gunicorn_managing_uvicorn_workers/Dockerfile` โ `Dockerfile`
+- `scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml` โ `docker-compose.yml`
+- `scripts/gunicorn_managing_uvicorn_workers/.env.example` โ `src/.env`
+
+Sets up Gunicorn managing multiple Uvicorn workers for production-like performance testing.
> [!WARNING]
-> Change `SECRET_KEY` and passwords in the `.env` file for staging/testing environments.
+> Change `SECRET_KEY` and passwords in the `.env` file for staging environments.
+
+**Manual setup:** `./setup.py staging` or copy the files above manually.
+
+### Option 3: Production with NGINX
+
+Best for: **Production deployments**
+
+**Copies:**
-And start with:
+- `scripts/production_with_nginx/Dockerfile` โ `Dockerfile`
+- `scripts/production_with_nginx/docker-compose.yml` โ `docker-compose.yml`
+- `scripts/production_with_nginx/.env.example` โ `src/.env`
+
+Sets up NGINX as reverse proxy with Gunicorn + Uvicorn workers for production.
+
+> [!CAUTION]
+> You MUST change `SECRET_KEY`, all passwords, and sensitive values in the `.env` file before deploying!
+
+**Manual setup:** `./setup.py production` or copy the files above manually.
+
+---
+
+**Start your application:**
```bash
docker compose up
```
-### Option 3: Production with NGINX
+**Access your app:**
+- **Local**: http://127.0.0.1:8000 (auto-reload enabled) โ [API docs](http://127.0.0.1:8000/docs)
+- **Staging**: http://127.0.0.1:8000 (production-like performance)
+- **Production**: http://localhost (NGINX reverse proxy)
-Best for: **Production deployments**. Just run these commands:
+### Next steps
+**Create your first admin user:**
```bash
-cp scripts/production_with_nginx/Dockerfile Dockerfile
-cp scripts/production_with_nginx/docker-compose.yml docker-compose.yml
-cp scripts/production_with_nginx/.env.example src/.env
+docker compose run --rm create_superuser
```
-> [!CAUTION]
-> You MUST change `SECRET_KEY`, all passwords, and sensitive values in the `.env` file before deploying!
-
-And then, to sart:
+**Run database migrations** (if you add models):
+```bash
+cd src && uv run alembic revision --autogenerate && uv run alembic upgrade head
+```
+**Test background jobs:**
```bash
-docker compose up
+curl -X POST 'http://127.0.0.1:8000/api/v1/tasks/task?message=hello'
```
-Access your application via http://localhost (NGINX proxies to the FastAPI app).
+**Or run locally without Docker:**
+```bash
+uv sync && uv run uvicorn src.app.main:app --reload
+```
> Full setup (from-scratch, .env examples, PostgreSQL & Redis, gunicorn, nginx) lives in the [docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/installation/).
diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md
index 2f7bbbe..f040f3d 100644
--- a/docs/getting-started/installation.md
+++ b/docs/getting-started/installation.md
@@ -31,19 +31,35 @@ Install these tools on your system:
cd fastapi-boilerplate
```
-1. **Set up environment**:
+1. **Quick setup** (recommended):
```bash
- cp src/.env.example src/.env
- # Edit src/.env with your configuration
+ # Interactive setup - choose your deployment type
+ ./setup.py
+
+ # Or specify directly: ./setup.py local, ./setup.py staging, ./setup.py production
```
+ This automatically copies the correct `Dockerfile`, `docker-compose.yml`, and `.env` files for your chosen deployment scenario.
+
1. **Start services**:
```bash
docker compose up -d
```
+#### Manual Setup Alternative
+
+If you prefer to set up manually:
+
+```bash
+# Copy configuration files for local development
+cp scripts/local_with_uvicorn/Dockerfile Dockerfile
+cp scripts/local_with_uvicorn/docker-compose.yml docker-compose.yml
+cp scripts/local_with_uvicorn/.env.example src/.env
+# Edit src/.env with your configuration if needed
+```
+
1. **Verify installation**:
```bash
diff --git a/docs/user-guide/configuration/docker-setup.md b/docs/user-guide/configuration/docker-setup.md
index db4d1bb..fed2dac 100644
--- a/docs/user-guide/configuration/docker-setup.md
+++ b/docs/user-guide/configuration/docker-setup.md
@@ -2,6 +2,22 @@
Learn how to configure and run the FastAPI Boilerplate using Docker Compose. The project includes a complete containerized setup with PostgreSQL, Redis, background workers, and optional services.
+## Quick Start
+
+The fastest way to get started is with the setup script:
+
+```bash
+./setup.py
+```
+
+This script helps you choose between three deployment configurations:
+
+- **Local development** (`./setup.py local`) - Uvicorn with auto-reload
+- **Staging** (`./setup.py staging`) - Gunicorn with workers
+- **Production** (`./setup.py production`) - NGINX + Gunicorn
+
+Each option copies the appropriate `Dockerfile`, `docker-compose.yml`, and `.env.example` files from the `scripts/` folder.
+
## Docker Compose Architecture
The boilerplate includes these core services:
diff --git a/setup.py b/setup.py
new file mode 100755
index 0000000..e16f8aa
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python3
+"""
+FastAPI Boilerplate Setup Script
+
+Automates copying the correct configuration files for different deployment scenarios.
+"""
+
+import sys
+import shutil
+from pathlib import Path
+
+DEPLOYMENTS = {
+ "local": {
+ "name": "Local development with Uvicorn",
+ "description": "Auto-reload enabled, development-friendly",
+ "path": "scripts/local_with_uvicorn"
+ },
+ "staging": {
+ "name": "Staging with Gunicorn managing Uvicorn workers",
+ "description": "Production-like setup for testing",
+ "path": "scripts/gunicorn_managing_uvicorn_workers"
+ },
+ "production": {
+ "name": "Production with NGINX",
+ "description": "Full production setup with reverse proxy",
+ "path": "scripts/production_with_nginx"
+ }
+}
+
+def show_help():
+ """Display help information"""
+ print("FastAPI Boilerplate Setup")
+ print("=" * 25)
+ print()
+ print("Usage: python setup.py ")
+ print()
+ print("Available deployment types:")
+ for key, config in DEPLOYMENTS.items():
+ print(f" {key:12} - {config['name']}")
+ print(f" {' ' * 12} {config['description']}")
+ print()
+ print("Examples:")
+ print(" python setup.py local # Set up for local development")
+ print(" python setup.py staging # Set up for staging environment")
+ print(" python setup.py production # Set up for production deployment")
+
+def copy_files(deployment_type: str):
+ """Copy configuration files for the specified deployment type"""
+ if deployment_type not in DEPLOYMENTS:
+ print(f"โ Unknown deployment type: {deployment_type}")
+ print()
+ show_help()
+ return False
+
+ config = DEPLOYMENTS[deployment_type]
+ source_path = Path(config["path"])
+
+ if not source_path.exists():
+ print(f"โ Configuration path not found: {source_path}")
+ return False
+
+ print(f"๐ Setting up {config['name']}...")
+ print(f" {config['description']}")
+ print()
+
+ files_to_copy = [
+ ("Dockerfile", "Dockerfile"),
+ ("docker-compose.yml", "docker-compose.yml"),
+ (".env.example", "src/.env")
+ ]
+
+ success = True
+ for source_file, dest_file in files_to_copy:
+ source = source_path / source_file
+ dest = Path(dest_file)
+
+ if not source.exists():
+ print(f"โ ๏ธ Warning: {source} not found, skipping...")
+ continue
+
+ try:
+ dest.parent.mkdir(parents=True, exist_ok=True)
+
+ shutil.copy2(source, dest)
+ print(f"โ
Copied {source} โ {dest}")
+
+ except Exception as e:
+ print(f"โ Failed to copy {source} โ {dest}: {e}")
+ success = False
+
+ if success:
+ print()
+ print("๐ Setup complete!")
+ print()
+
+ if deployment_type in ["staging", "production"]:
+ print("โ ๏ธ IMPORTANT: Update the .env file with your production values:")
+ print(" - Generate a new SECRET_KEY: openssl rand -hex 32")
+ print(" - Change all passwords and sensitive values")
+ print()
+
+ print("Next steps:")
+ print(" docker compose up")
+
+ if deployment_type == "local":
+ print(" open http://127.0.0.1:8000/docs")
+ elif deployment_type == "production":
+ print(" open http://localhost")
+
+ return True
+
+ return False
+
+def interactive_setup():
+ """Interactive setup when no arguments provided"""
+ print("FastAPI Boilerplate Setup")
+ print("=" * 25)
+ print()
+ print("Choose your deployment type:")
+ print()
+
+ options = list(DEPLOYMENTS.keys())
+ for i, key in enumerate(options, 1):
+ config = DEPLOYMENTS[key]
+ print(f" {i}. {config['name']}")
+ print(f" {config['description']}")
+ print()
+
+ while True:
+ try:
+ choice = input(f"Enter your choice (1-{len(options)}): ").strip()
+
+ if choice.isdigit():
+ choice_num = int(choice)
+ if 1 <= choice_num <= len(options):
+ return options[choice_num - 1]
+
+ if choice.lower() in DEPLOYMENTS:
+ return choice.lower()
+
+ print(f"โ Invalid choice. Please enter 1-{len(options)} or the deployment name.")
+
+ except KeyboardInterrupt:
+ print("\n\n๐ Setup cancelled.")
+ return None
+ except EOFError:
+ print("\n\n๐ Setup cancelled.")
+ return None
+
+def main():
+ """Main entry point"""
+ if len(sys.argv) > 1 and sys.argv[1] in ["-h", "--help", "help"]:
+ show_help()
+ return
+
+ if len(sys.argv) == 2:
+ deployment_type = sys.argv[1].lower()
+ elif len(sys.argv) == 1:
+ deployment_type = interactive_setup()
+ if deployment_type is None:
+ return
+ else:
+ show_help()
+ return
+
+ success = copy_files(deployment_type)
+
+ if not success:
+ sys.exit(1)
+
+if __name__ == "__main__":
+ main()
From f6ec92998d080181d2303fcd96abfc8c53a56d2e Mon Sep 17 00:00:00 2001
From: Igor Benav
Date: Sun, 16 Nov 2025 16:23:39 -0300
Subject: [PATCH 19/19] script linting
---
setup.py | 17 +++++++++++------
1 file changed, 11 insertions(+), 6 deletions(-)
diff --git a/setup.py b/setup.py
index e16f8aa..ea0c717 100755
--- a/setup.py
+++ b/setup.py
@@ -5,28 +5,29 @@
Automates copying the correct configuration files for different deployment scenarios.
"""
-import sys
import shutil
+import sys
from pathlib import Path
DEPLOYMENTS = {
"local": {
"name": "Local development with Uvicorn",
"description": "Auto-reload enabled, development-friendly",
- "path": "scripts/local_with_uvicorn"
+ "path": "scripts/local_with_uvicorn",
},
"staging": {
"name": "Staging with Gunicorn managing Uvicorn workers",
"description": "Production-like setup for testing",
- "path": "scripts/gunicorn_managing_uvicorn_workers"
+ "path": "scripts/gunicorn_managing_uvicorn_workers",
},
"production": {
"name": "Production with NGINX",
"description": "Full production setup with reverse proxy",
- "path": "scripts/production_with_nginx"
- }
+ "path": "scripts/production_with_nginx",
+ },
}
+
def show_help():
"""Display help information"""
print("FastAPI Boilerplate Setup")
@@ -44,6 +45,7 @@ def show_help():
print(" python setup.py staging # Set up for staging environment")
print(" python setup.py production # Set up for production deployment")
+
def copy_files(deployment_type: str):
"""Copy configuration files for the specified deployment type"""
if deployment_type not in DEPLOYMENTS:
@@ -66,7 +68,7 @@ def copy_files(deployment_type: str):
files_to_copy = [
("Dockerfile", "Dockerfile"),
("docker-compose.yml", "docker-compose.yml"),
- (".env.example", "src/.env")
+ (".env.example", "src/.env"),
]
success = True
@@ -111,6 +113,7 @@ def copy_files(deployment_type: str):
return False
+
def interactive_setup():
"""Interactive setup when no arguments provided"""
print("FastAPI Boilerplate Setup")
@@ -147,6 +150,7 @@ def interactive_setup():
print("\n\n๐ Setup cancelled.")
return None
+
def main():
"""Main entry point"""
if len(sys.argv) > 1 and sys.argv[1] in ["-h", "--help", "help"]:
@@ -168,5 +172,6 @@ def main():
if not success:
sys.exit(1)
+
if __name__ == "__main__":
main()