diff --git a/fastapi_template/__main__.py b/fastapi_template/__main__.py index e4a34d40..78f869be 100644 --- a/fastapi_template/__main__.py +++ b/fastapi_template/__main__.py @@ -1,7 +1,6 @@ from pathlib import Path -from cookiecutter.exceptions import (FailedHookException, - OutputDirExistsException) +from cookiecutter.exceptions import FailedHookException, OutputDirExistsException from cookiecutter.main import cookiecutter from termcolor import cprint @@ -21,7 +20,7 @@ def generate_project(context: BuilderContext) -> None: cookiecutter( template=f"{script_dir}/template", extra_context=context.dict(), - default_config=BuilderContext().dict(), + default_config=True, no_input=True, overwrite_if_exists=context.force, ) diff --git a/fastapi_template/template/hooks/post_gen_project.py b/fastapi_template/template/hooks/post_gen_project.py index 84042aae..ce2f6451 100644 --- a/fastapi_template/template/hooks/post_gen_project.py +++ b/fastapi_template/template/hooks/post_gen_project.py @@ -1,35 +1,39 @@ #!/usr/bin/env python -import json -import os import shutil import subprocess +import tomllib +import shlex from termcolor import cprint, colored from pathlib import Path -CONDITIONAL_MANIFEST = "conditional_files.json" -REPLACE_MANIFEST = "replaceable_files.json" +CONDITIONAL_MANIFEST = Path("conditional_files.toml") +REPLACE_MANIFEST = Path("replaceable_files.toml") -def delete_resource(resource): - if os.path.isfile(resource): - os.remove(resource) - elif os.path.isdir(resource): +def delete_resource(resource: Path): + if resource.is_file(): + resource.unlink() + elif resource.is_dir(): shutil.rmtree(resource) def delete_resources_for_disabled_features(): - with open(CONDITIONAL_MANIFEST) as manifest_file: - manifest = json.load(manifest_file) - for feature_name, feature in manifest.items(): - if feature["enabled"].lower() != "true": + with CONDITIONAL_MANIFEST.open("rb") as manifest_file: + manifest = tomllib.load(manifest_file) + + for feature in manifest["features"]: + enabled = feature["enabled"].lower() != "true" + name = feature["name"] + resources = feature["resources"] + if enabled: text = "{} resources for disabled feature {}...".format( colored("Removing", color="red"), - colored(feature_name, color="magenta", attrs=["underline"]), + colored(name, color="magenta", attrs=["underline"]), ) print(text) - for resource in feature["resources"]: - delete_resource(resource) + for resource in resources: + delete_resource(Path(resource)) delete_resource(CONDITIONAL_MANIFEST) cprint("cleanup complete!", color="green") @@ -40,14 +44,15 @@ def replace_resources(): colored("resources", color="green"), colored("new project", color="blue") ) ) - with open(REPLACE_MANIFEST) as replace_manifest: - manifest = json.load(replace_manifest) - for target, replaces in manifest.items(): - target_path = Path(target) - delete_resource(target_path) - for src_file in map(Path, replaces): + with REPLACE_MANIFEST.open("rb") as replace_manifest: + manifest = tomllib.load(replace_manifest) + for substitution in manifest["sub"]: + target = Path(substitution["target"]) + replaces = [Path(path) for path in substitution["replaces"]] + delete_resource(target) + for src_file in replaces: if src_file.exists(): - shutil.move(src_file, target_path) + shutil.move(src_file, target) delete_resource(REPLACE_MANIFEST) print( "Resources are happy to be where {}.".format( @@ -56,17 +61,41 @@ def replace_resources(): ) +def run_cmd(cmd: str, ignore_error: bool = False): + out = subprocess.run( + shlex.split(cmd), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + if out.returncode != 0 and not ignore_error: + cprint(" WARNING ".center(50, "=")) + cprint( + f"[WARN] Command `{cmd}` was not successfull. Check output below.", + "yellow", + ) + cprint( + "However, the project was generated. So it could be a false-positive.", + "yellow", + ) + cprint(out.stdout.decode(), "red") + cprint(out.stderr.decode(), "red") + exit(1) + + def init_repo(): - subprocess.run(["git", "init"], stdout=subprocess.PIPE) - cprint("Git repository initialized.", "green") - subprocess.run(["git", "add", "."], stdout=subprocess.PIPE) - cprint("Added files to index.", "green") - subprocess.run(["uv", "sync"]) - subprocess.run(["uv", "run", "pre-commit", "install"]) - cprint("pre-commit installed.", "green") - subprocess.run(["uv", "run", "pre-commit", "run", "-a"]) - subprocess.run(["git", "add", "."], stdout=subprocess.PIPE) - subprocess.run(["git", "commit", "-m", "Initial commit"], stdout=subprocess.PIPE) + run_cmd("git init") + cprint("ξΆ§ Git repository initialized", "green") + run_cmd("git add .") + cprint("🐍 Installing python dpendencies with UV", "green") + run_cmd("uv sync") + run_cmd("uv run pre-commit install") + cprint("πŸ“šπŸ–ŒοΈπŸ“„πŸ“ Tidying up the project", "green") + for _ in range(2): + run_cmd("uv run pre-commit run -a", ignore_error=True) + run_cmd("git add .") + cprint("πŸš€Creating your first commit", "green") + run_cmd("git commit -m 'Initial commit'") + if __name__ == "__main__": delete_resources_for_disabled_features() diff --git a/fastapi_template/template/{{cookiecutter.project_name}}/.env b/fastapi_template/template/{{cookiecutter.project_name}}/.env index 7ade1f1d..df555456 100644 --- a/fastapi_template/template/{{cookiecutter.project_name}}/.env +++ b/fastapi_template/template/{{cookiecutter.project_name}}/.env @@ -6,4 +6,7 @@ {%- endif %} {%- if cookiecutter.add_users == "True" %} USERS_SECRET="" -{%- endif %} \ No newline at end of file +{%- endif %} +{%- if cookiecutter.enable_kafka == "True" %} +{{cookiecutter.project_name | upper}}_KAFKA_BOOTSTRAP_SERVERS='["localhost:9094"]' +{%- endif %} diff --git a/fastapi_template/template/{{cookiecutter.project_name}}/README.md b/fastapi_template/template/{{cookiecutter.project_name}}/README.md index 205276c3..27c72bd0 100644 --- a/fastapi_template/template/{{cookiecutter.project_name}}/README.md +++ b/fastapi_template/template/{{cookiecutter.project_name}}/README.md @@ -25,14 +25,7 @@ You can read more about uv here: https://docs.astral.sh/ruff/ You can start the project with docker using this command: ```bash -docker-compose up --build -``` - -If you want to develop in docker with autoreload and exposed ports add `-f deploy/docker-compose.dev.yml` to your docker command. -Like this: - -```bash -docker-compose -f docker-compose.yml -f deploy/docker-compose.dev.yml --project-directory . up --build +docker compose up --build ``` This command exposes the web application on port 8000, mounts current directory and enables autoreload. @@ -40,7 +33,7 @@ This command exposes the web application on port 8000, mounts current directory But you have to rebuild image every time you modify `uv.lock` or `pyproject.toml` with this command: ```bash -docker-compose build +docker compose build ``` ## Project structure @@ -98,12 +91,11 @@ you can add `-f ./deploy/docker-compose.otlp.yml` to your docker command. Like this: ```bash -docker-compose -f docker-compose.yml -f deploy/docker-compose.otlp.yml --project-directory . up +docker compose -f docker-compose.yml -f deploy/docker-compose.otlp.yml --project-directory . up ``` -This command will start OpenTelemetry collector and jaeger. -After sending a requests you can see traces in jaeger's UI -at http://localhost:16686/. +This command will start grafana with full opentelemetry stack at http://localhost:3000/. +After sending a requests you can see traces at explore tab in drilldown. This docker configuration is not supposed to be used in production. It's only for demo purpose. @@ -189,30 +181,35 @@ aerich migrate If you want to run it in docker, simply run: ```bash -docker-compose run --build --rm api pytest -vv . -docker-compose down +docker compose run --build --rm api pytest -vv . +docker compose down ``` For running tests on your local machine. -{%- if cookiecutter.db_info.name != "none" %} -{%- if cookiecutter.db_info.name != "sqlite" %} -1. you need to start a database. +{%- if ((cookiecutter.db_info.name != "none" and cookiecutter.db_info.name != "sqlite") or + (cookiecutter.enable_redis == "True") or + (cookiecutter.enable_rmq == "True") or + (cookiecutter.enable_kafka == "True") or + (cookiecutter.enable_nats == "True") +) %} +1. you need to start all aux services. -I prefer doing it with docker: -``` -{%- if cookiecutter.db_info.name == "postgresql" %} -docker run -p "{{cookiecutter.db_info.port}}:{{cookiecutter.db_info.port}}" -e "POSTGRES_PASSWORD={{cookiecutter.project_name}}" -e "POSTGRES_USER={{cookiecutter.project_name}}" -e "POSTGRES_DB={{cookiecutter.project_name}}" {{cookiecutter.db_info.image}} -{%- endif %} -{%- if cookiecutter.db_info.name == "mysql" %} -docker run -p "{{cookiecutter.db_info.port}}:{{cookiecutter.db_info.port}}" -e "MYSQL_PASSWORD={{cookiecutter.project_name}}" -e "MYSQL_USER={{cookiecutter.project_name}}" -e "MYSQL_DATABASE={{cookiecutter.project_name}}" -e ALLOW_EMPTY_PASSWORD=yes {{cookiecutter.db_info.image}} -{%- endif %} +We can do so by using our `docker-compose.yaml` configuration. It already has everything we need. + +```bash +docker compose up -d --wait{%- if cookiecutter.db_info.name != 'none' %} db{%- endif %}{%- if cookiecutter.enable_redis == "True" %} redis{%- endif %}{%- if cookiecutter.enable_rmq == "True" %} rmq{%- endif %}{%- if cookiecutter.enable_kafka == "True" %} kafka{%- endif %}{%- if cookiecutter.enable_nats == "True" %} nats{%- endif %} ``` -{%- endif %} -{%- endif %} +2. Run tests. +```bash +pytest -vv . +``` +{%- else %} +Simply run -2. Run the pytest. ```bash pytest -vv . ``` +{%- endif %} + diff --git a/fastapi_template/template/{{cookiecutter.project_name}}/conditional_files.json b/fastapi_template/template/{{cookiecutter.project_name}}/conditional_files.json deleted file mode 100644 index 7804c227..00000000 --- a/fastapi_template/template/{{cookiecutter.project_name}}/conditional_files.json +++ /dev/null @@ -1,240 +0,0 @@ -{ - "GraphQL API": { - "enabled": "{{cookiecutter.api_type == 'graphql'}}", - "resources": [ - "{{cookiecutter.project_name}}/web/gql" - ] - }, - "REST API": { - "enabled": "{{cookiecutter.api_type == 'rest'}}", - "resources": [ - "{{cookiecutter.project_name}}/web/api/rabbit", - "{{cookiecutter.project_name}}/web/api/dummy", - "{{cookiecutter.project_name}}/web/api/echo", - "{{cookiecutter.project_name}}/web/api/redis", - "{{cookiecutter.project_name}}/web/api/kafka", - "{{cookiecutter.project_name}}/web/api/nats" - ] - }, - "Redis": { - "enabled": "{{cookiecutter.enable_redis}}", - "resources": [ - "{{cookiecutter.project_name}}/web/api/redis", - "{{cookiecutter.project_name}}/web/gql/redis", - "{{cookiecutter.project_name}}/services/redis", - "tests/test_redis.py" - ] - }, - "RabbitMQ support": { - "enabled": "{{cookiecutter.enable_rmq}}", - "resources": [ - "{{cookiecutter.project_name}}/web/api/rabbit", - "{{cookiecutter.project_name}}/web/gql/rabbit", - "{{cookiecutter.project_name}}/services/rabbit", - "tests/test_rabbit.py" - ] - }, - "Kafka support": { - "enabled": "{{cookiecutter.enable_kafka}}", - "resources": [ - "{{cookiecutter.project_name}}/web/api/kafka", - "{{cookiecutter.project_name}}/web/gql/kafka", - "{{cookiecutter.project_name}}/services/kafka", - "tests/test_kafka.py" - ] - }, - "Nats support": { - "enabled": "{{cookiecutter.enable_nats}}", - "resources": [ - "{{cookiecutter.project_name}}/web/api/nats", - "{{cookiecutter.project_name}}/web/gql/nats", - "{{cookiecutter.project_name}}/services/nats", - "tests/test_nats.py" - ] - }, - "Database support": { - "enabled": "{{cookiecutter.db_info.name != 'none'}}", - "resources": [ - "alembic.ini", - "{{cookiecutter.project_name}}/web/api/dummy", - "{{cookiecutter.project_name}}/web/gql/dummy", - "tests/test_dummy.py" - ] - }, - "Beanie support": { - "enabled": "{{cookiecutter.db_info.name == 'mongodb'}}", - "resources": [ - "{{cookiecutter.project_name}}/db_beanie" - ] - }, - "Migrations": { - "enabled": "{{cookiecutter.enable_migrations}}", - "resources": [ - "alembic.ini", - "{{cookiecutter.project_name}}/db_sa/migrations", - "{{cookiecutter.project_name}}/db_ormar/migrations", - "{{cookiecutter.project_name}}/db_tortoise/migrations", - "{{cookiecutter.project_name}}/db_piccolo/migrations", - "{{cookiecutter.project_name}}/db_beanie/migrations" - ] - }, - "Alembic migrations": { - "enabled": "{{cookiecutter.orm in ['ormar', 'sqlalchemy']}}", - "resources": [ - "alembic.ini" - ] - }, - "Gitlab CI": { - "enabled": "{{cookiecutter.ci_type == 'gitlab_ci'}}", - "resources": [ - ".gitlab-ci.yml" - ] - }, - "Github CI": { - "enabled": "{{cookiecutter.ci_type == 'github'}}", - "resources": [ - ".github" - ] - }, - "Loguru": { - "enabled": "{{cookiecutter.enable_loguru}}", - "resources": [ - "{{cookiecutter.project_name}}/logging.py" - ] - }, - "Routers": { - "enabled": "{{cookiecutter.enable_routers}}", - "resources": [ - "{{cookiecutter.project_name}}/web/api/echo", - "{{cookiecutter.project_name}}/web/gql/echo", - "{{cookiecutter.project_name}}/web/api/dummy", - "{{cookiecutter.project_name}}/web/gql/dummy", - "{{cookiecutter.project_name}}/web/api/redis", - "{{cookiecutter.project_name}}/web/gql/redis", - "{{cookiecutter.project_name}}/web/api/kafka", - "{{cookiecutter.project_name}}/web/gql/kafka", - "{{cookiecutter.project_name}}/web/api/rabbit", - "{{cookiecutter.project_name}}/web/gql/rabbit", - "tests/test_echo.py", - "tests/test_dummy.py", - "tests/test_redis.py", - "tests/test_rabbit.py", - "tests/test_kafka.py" - ] - }, - "Users model": { - "enabled": "{{cookiecutter.add_users}}", - "resources": [ - "{{cookiecutter.project_name}}/web/api/users", - "{{cookiecutter.project_name}}/db_sa/models/users.py" - ] - }, - "Dummy model": { - "enabled": "{{cookiecutter.add_dummy}}", - "resources": [ - "{{cookiecutter.project_name}}/web/api/dummy", - "{{cookiecutter.project_name}}/web/gql/dummy", - "{{cookiecutter.project_name}}/db_sa/dao", - "{{cookiecutter.project_name}}/db_sa/models/dummy_model.py", - "{{cookiecutter.project_name}}/db_ormar/dao", - "{{cookiecutter.project_name}}/db_ormar/models/dummy_model.py", - "{{cookiecutter.project_name}}/db_tortoise/dao", - "{{cookiecutter.project_name}}/db_tortoise/models/dummy_model.py", - "{{cookiecutter.project_name}}/db_psycopg/dao", - "{{cookiecutter.project_name}}/db_psycopg/models/dummy_model.py", - "tests/test_dummy.py", - "{{cookiecutter.project_name}}/db_piccolo/dao", - "{{cookiecutter.project_name}}/db_piccolo/models/dummy_model.py", - "{{cookiecutter.project_name}}/db_beanie/models/dummy_model.py", - "{{cookiecutter.project_name}}/db_sa/migrations/versions/2021-08-16-16-55_2b7380507a71.py", - "{{cookiecutter.project_name}}/db_ormar/migrations/versions/2021-08-16-16-55_2b7380507a71.py", - "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_pg.sql", - "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_mysql.sql", - "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_sqlite.sql", - "{{cookiecutter.project_name}}/db_piccolo/migrations/2022-04-16T17-38-51-672827.py" - ] - }, - "Self-hosted swagger": { - "enabled": "{{cookiecutter.self_hosted_swagger}}", - "resources": [ - "{{cookiecutter.project_name}}/static/docs", - "{{cookiecutter.project_name}}/web/api/docs" - ] - }, - "SQLAlchemy ORM": { - "enabled": "{{cookiecutter.orm == 'sqlalchemy'}}", - "resources": [ - "{{cookiecutter.project_name}}/db_sa" - ] - }, - "Tortoise ORM": { - "enabled": "{{cookiecutter.orm == 'tortoise'}}", - "resources": [ - "{{cookiecutter.project_name}}/db_tortoise" - ] - }, - "Ormar ORM": { - "enabled": "{{cookiecutter.orm == 'ormar'}}", - "resources": [ - "{{cookiecutter.project_name}}/db_ormar" - ] - }, - "PsycoPG": { - "enabled": "{{cookiecutter.orm == 'psycopg'}}", - "resources": [ - "{{cookiecutter.project_name}}/db_psycopg" - ] - }, - "Piccolo": { - "enabled": "{{cookiecutter.orm == 'piccolo'}}", - "resources": [ - "{{cookiecutter.project_name}}/db_piccolo", - "{{cookiecutter.project_name}}/piccolo_conf.py" - ] - }, - "Beanie": { - "enabled": "{{cookiecutter.orm == 'beanie'}}", - "resources": [ - "{{cookiecutter.project_name}}/db_beanie" - ] - }, - "Postgresql DB": { - "enabled": "{{cookiecutter.db_info.name == 'postgresql'}}", - "resources": [ - "{{cookiecutter.project_name}}/db_tortoise/migrations/models/0_20210928165300_init_pg.sql", - "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_pg.sql" - ] - }, - "MySQL DB": { - "enabled": "{{cookiecutter.db_info.name == 'mysql'}}", - "resources": [ - "{{cookiecutter.project_name}}/db_tortoise/migrations/models/0_20210928165300_init_mysql.sql", - "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_mysql.sql" - ] - }, - "Opentelemetry support": { - "enabled": "{{cookiecutter.otlp_enabled}}", - "resources": [ - "deploy/docker-compose.otlp.yml" - ] - }, - "SQLite DB": { - "enabled": "{{cookiecutter.db_info.name == 'sqlite'}}", - "resources": [ - "{{cookiecutter.project_name}}/db_tortoise/migrations/models/0_20210928165300_init_sqlite.sql", - "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_sqlite.sql" - ] - }, - "Taskiq support":{ - "enabled": "{{cookiecutter.enable_taskiq}}", - "resources": [ - "{{cookiecutter.project_name}}/tkq.py" - ] - }, - "Gunicorn support":{ - "enabled": "{{cookiecutter.gunicorn}}", - "resources": [ - "{{cookiecutter.project_name}}/gunicorn_runner.py" - ] - } -} diff --git a/fastapi_template/template/{{cookiecutter.project_name}}/conditional_files.toml b/fastapi_template/template/{{cookiecutter.project_name}}/conditional_files.toml new file mode 100644 index 00000000..b02e0ae8 --- /dev/null +++ b/fastapi_template/template/{{cookiecutter.project_name}}/conditional_files.toml @@ -0,0 +1,233 @@ +[[features]] +name = "GraphQL API" +enabled = "{{cookiecutter.api_type == 'graphql'}}" +resources = ["{{cookiecutter.project_name}}/web/gql"] + +[[features]] +name = "REST API" +enabled = "{{cookiecutter.api_type == 'rest'}}" +resources = [ + "{{cookiecutter.project_name}}/web/api/rabbit", + "{{cookiecutter.project_name}}/web/api/dummy", + "{{cookiecutter.project_name}}/web/api/echo", + "{{cookiecutter.project_name}}/web/api/redis", + "{{cookiecutter.project_name}}/web/api/kafka", + "{{cookiecutter.project_name}}/web/api/nats" +] + +[[features]] +name = "Redis support" +enabled = "{{cookiecutter.enable_redis}}" +resources = [ + "{{cookiecutter.project_name}}/web/api/redis", + "{{cookiecutter.project_name}}/web/gql/redis", + "{{cookiecutter.project_name}}/services/redis", + "tests/test_redis.py" +] + +[[features]] +name = "RabbitMQ support" +enabled = "{{cookiecutter.enable_rmq}}" +resources = [ + "{{cookiecutter.project_name}}/web/api/rabbit", + "{{cookiecutter.project_name}}/web/gql/rabbit", + "{{cookiecutter.project_name}}/services/rabbit", + "tests/test_rabbit.py" +] + +[[features]] +name = "Kafka support" +enabled = "{{cookiecutter.enable_kafka}}" +resources = [ + "{{cookiecutter.project_name}}/web/api/kafka", + "{{cookiecutter.project_name}}/web/gql/kafka", + "{{cookiecutter.project_name}}/services/kafka", + "tests/test_kafka.py" +] + +[[features]] +name = "NATS support" +enabled = "{{cookiecutter.enable_nats}}" +resources = [ + "{{cookiecutter.project_name}}/web/api/nats", + "{{cookiecutter.project_name}}/web/gql/nats", + "{{cookiecutter.project_name}}/services/nats", + "tests/test_nats.py" +] + +[[features]] +name = "Database support" +enabled = "{{cookiecutter.db_info.name != 'none'}}" +resources = [ + "alembic.ini", + "{{cookiecutter.project_name}}/web/api/dummy", + "{{cookiecutter.project_name}}/web/gql/dummy", + "tests/test_dummy.py" +] + +[[features]] +name = "Migrations support" +enabled = "{{cookiecutter.enable_migrations}}" +resources = [ + "alembic.ini", + "{{cookiecutter.project_name}}/db_sa/migrations", + "{{cookiecutter.project_name}}/db_ormar/migrations", + "{{cookiecutter.project_name}}/db_tortoise/migrations", + "{{cookiecutter.project_name}}/db_piccolo/migrations", + "{{cookiecutter.project_name}}/db_beanie/migrations" +] + +[[features]] +name = "Alembic migrations" +enabled = "{{cookiecutter.orm in ['ormar', 'sqlalchemy']}}" +resources = ["alembic.ini"] + +[[features]] +name = "Gitlab CI" +enabled = "{{cookiecutter.ci_type == 'gitlab_ci'}}" +resources = [".gitlab-ci.yml"] + +[[features]] +name = "GitHub actions" +enabled = "{{cookiecutter.ci_type == 'github'}}" +resources = [".github"] + +[[features]] +name = "Loguru support" +enabled = "{{cookiecutter.enable_loguru}}" +resources = ["{{cookiecutter.project_name}}/logging.py"] + +[[features]] +name = "Example routes" +enabled = "{{cookiecutter.enable_routers}}" +resources = [ + "{{cookiecutter.project_name}}/web/api/echo", + "{{cookiecutter.project_name}}/web/gql/echo", + "{{cookiecutter.project_name}}/web/api/dummy", + "{{cookiecutter.project_name}}/web/gql/dummy", + "{{cookiecutter.project_name}}/web/api/redis", + "{{cookiecutter.project_name}}/web/gql/redis", + "{{cookiecutter.project_name}}/web/api/kafka", + "{{cookiecutter.project_name}}/web/gql/kafka", + "{{cookiecutter.project_name}}/web/api/rabbit", + "{{cookiecutter.project_name}}/web/gql/rabbit", + "tests/test_echo.py", + "tests/test_dummy.py", + "tests/test_redis.py", + "tests/test_rabbit.py", + "tests/test_kafka.py" +] + +[[features]] +name = "User models" +enabled = "{{cookiecutter.add_users}}" +resources = [ + "{{cookiecutter.project_name}}/web/api/users", + "{{cookiecutter.project_name}}/db_sa/models/users.py" +] + +[[features]] +name = "Dummy models" +enabled = "{{cookiecutter.add_dummy}}" +resources = [ + "{{cookiecutter.project_name}}/web/api/dummy", + "{{cookiecutter.project_name}}/web/gql/dummy", + "{{cookiecutter.project_name}}/db_sa/dao", + "{{cookiecutter.project_name}}/db_sa/models/dummy_model.py", + "{{cookiecutter.project_name}}/db_ormar/dao", + "{{cookiecutter.project_name}}/db_ormar/models/dummy_model.py", + "{{cookiecutter.project_name}}/db_tortoise/dao", + "{{cookiecutter.project_name}}/db_tortoise/models/dummy_model.py", + "{{cookiecutter.project_name}}/db_psycopg/dao", + "{{cookiecutter.project_name}}/db_psycopg/models/dummy_model.py", + "tests/test_dummy.py", + "{{cookiecutter.project_name}}/db_piccolo/dao", + "{{cookiecutter.project_name}}/db_piccolo/models/dummy_model.py", + "{{cookiecutter.project_name}}/db_beanie/models/dummy_model.py", + "{{cookiecutter.project_name}}/db_sa/migrations/versions/2021-08-16-16-55_2b7380507a71.py", + "{{cookiecutter.project_name}}/db_ormar/migrations/versions/2021-08-16-16-55_2b7380507a71.py", + "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_pg.sql", + "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_mysql.sql", + "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_sqlite.sql", + "{{cookiecutter.project_name}}/db_piccolo/migrations/2022-04-16T17-38-51-672827.py" +] + +[[features]] +name = "Self-Hosted Swagger UI" +enabled = "{{cookiecutter.self_hosted_swagger}}" +resources = [ + "{{cookiecutter.project_name}}/static/docs", + "{{cookiecutter.project_name}}/web/api/docs" +] + +[[features]] +name = "SQLAlchemy ORM" +enabled = "{{cookiecutter.orm == 'sqlalchemy'}}" +resources = ["{{cookiecutter.project_name}}/db_sa"] + +[[features]] +name = "Tortoise ORM" +enabled = "{{cookiecutter.orm == 'tortoise'}}" +resources = ["{{cookiecutter.project_name}}/db_tortoise"] + +[[features]] +name = "Ormar ORM" +enabled = "{{cookiecutter.orm == 'ormar'}}" +resources = ["{{cookiecutter.project_name}}/db_ormar"] + +[[features]] +name = "PsycoPG" +enabled = "{{cookiecutter.orm == 'psycopg'}}" +resources = ["{{cookiecutter.project_name}}/db_psycopg"] + +[[features]] +name = "Piccolo" +enabled = "{{cookiecutter.orm == 'piccolo'}}" +resources = [ + "{{cookiecutter.project_name}}/db_piccolo", + "{{cookiecutter.project_name}}/piccolo_conf.py" +] + +[[features]] +name = "Beanie" +enabled = "{{cookiecutter.orm == 'beanie'}}" +resources = ["{{cookiecutter.project_name}}/db_beanie"] + +[[features]] +name = "PostgreSQL support" +enabled = "{{cookiecutter.db_info.name == 'postgresql'}}" +resources = [ + "{{cookiecutter.project_name}}/db_tortoise/migrations/models/0_20210928165300_init_pg.sql", + "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_pg.sql" +] + +[[features]] +name = "MySQL DB" +enabled = "{{cookiecutter.db_info.name == 'mysql'}}" +resources = [ + "{{cookiecutter.project_name}}/db_tortoise/migrations/models/0_20210928165300_init_mysql.sql", + "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_mysql.sql" +] + +[[features]] +name = "SQLite DB" +enabled = "{{cookiecutter.db_info.name == 'sqlite'}}" +resources = [ + "{{cookiecutter.project_name}}/db_tortoise/migrations/models/0_20210928165300_init_sqlite.sql", + "{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_sqlite.sql" +] + +[[features]] +name = "Opentelemetry support" +enabled = "{{cookiecutter.otlp_enabled}}" +resources = ["deploy/docker-compose.otlp.yml"] + +[[features]] +name = "Taskiq support" +enabled = "{{cookiecutter.enable_taskiq}}" +resources = ["{{cookiecutter.project_name}}/tkq.py"] + +[[features]] +name = "Gunicorn support" +enabled = "{{cookiecutter.gunicorn}}" +resources = ["{{cookiecutter.project_name}}/gunicorn_runner.py"] diff --git a/fastapi_template/template/{{cookiecutter.project_name}}/docker-compose.yml b/fastapi_template/template/{{cookiecutter.project_name}}/docker-compose.yml index c915de23..5bccd953 100644 --- a/fastapi_template/template/{{cookiecutter.project_name}}/docker-compose.yml +++ b/fastapi_template/template/{{cookiecutter.project_name}}/docker-compose.yml @@ -111,6 +111,8 @@ services: db: image: {{cookiecutter.db_info.image}} hostname: {{cookiecutter.project_name}}-db + ports: + - "{{cookiecutter.db_info.port}}:{{cookiecutter.db_info.port}}" environment: POSTGRES_PASSWORD: "{{cookiecutter.project_name}}" POSTGRES_USER: "{{cookiecutter.project_name}}" @@ -130,6 +132,8 @@ services: image: {{cookiecutter.db_info.image}} hostname: {{cookiecutter.project_name}}-db restart: always + ports: + - "{{cookiecutter.db_info.port}}:{{cookiecutter.db_info.port}}" environment: MONGO_INITDB_ROOT_USERNAME: "{{cookiecutter.project_name}}" MONGO_INITDB_ROOT_PASSWORD: "{{cookiecutter.project_name}}" @@ -149,6 +153,8 @@ services: image: {{cookiecutter.db_info.image}} hostname: {{cookiecutter.project_name}}-db restart: always + ports: + - "{{cookiecutter.db_info.port}}:{{cookiecutter.db_info.port}}" environment: MYSQL_USER: "{{cookiecutter.project_name}}" MYSQL_PASSWORD: "{{cookiecutter.project_name}}" @@ -198,6 +204,8 @@ services: image: redis:8.4.0 hostname: "{{cookiecutter.project_name}}-redis" restart: always + ports: + - 6379:6379 environment: ALLOW_EMPTY_PASSWORD: "yes" healthcheck: @@ -213,6 +221,9 @@ services: image: rabbitmq:4.2.1-management-alpine hostname: "{{cookiecutter.project_name}}-rmq" restart: always + ports: + - 5672:5672 + - 15672:15672 environment: RABBITMQ_DEFAULT_USER: "guest" RABBITMQ_DEFAULT_PASS: "guest" @@ -229,19 +240,18 @@ services: kafka: image: apache/kafka:4.1.1 hostname: "{{cookiecutter.project_name}}-kafka" + ports: + - 9094:9094 environment: KAFKA_NODE_ID: "0" KAFKA_PROCESS_ROLES: "controller,broker" KAFKA_LISTENERS: "PLAINTEXT://:9092,CONTROLLER://:9093,EXTERNAL://:9094" - KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://kafka:9092,EXTERNAL://localhost:9094" + KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://{{cookiecutter.project_name}}-kafka:9092,EXTERNAL://localhost:9094" KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "CONTROLLER:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT" KAFKA_CONTROLLER_QUORUM_VOTERS: "0@{{cookiecutter.project_name}}-kafka:9093" KAFKA_CONTROLLER_LISTENER_NAMES: "CONTROLLER" KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true" KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: "1" - # Uncomment it to connect from localhost. - # ports: - # - 9094:9094 healthcheck: test: /opt/kafka/bin/kafka-topics.sh --list --bootstrap-server localhost:9092 interval: 1s @@ -255,6 +265,8 @@ services: image: nats:2.12-alpine hostname: "{{cookiecutter.project_name}}-nats" command: -m 8222 -js + ports: + - 4222:4222 healthcheck: test: - CMD @@ -265,8 +277,6 @@ services: timeout: 3s retries: 20 start_period: 3s - ports: - - 4222:4222 {%- endif %} {% if cookiecutter.db_info.name != 'none' %} diff --git a/fastapi_template/template/{{cookiecutter.project_name}}/replaceable_files.json b/fastapi_template/template/{{cookiecutter.project_name}}/replaceable_files.json deleted file mode 100644 index 29d2c07c..00000000 --- a/fastapi_template/template/{{cookiecutter.project_name}}/replaceable_files.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "{{cookiecutter.project_name}}/db": [ - "{{cookiecutter.project_name}}/db_sa", - "{{cookiecutter.project_name}}/db_ormar", - "{{cookiecutter.project_name}}/db_tortoise", - "{{cookiecutter.project_name}}/db_psycopg", - "{{cookiecutter.project_name}}/db_piccolo", - "{{cookiecutter.project_name}}/db_beanie" - ] -} diff --git a/fastapi_template/template/{{cookiecutter.project_name}}/replaceable_files.toml b/fastapi_template/template/{{cookiecutter.project_name}}/replaceable_files.toml new file mode 100644 index 00000000..2ee549b4 --- /dev/null +++ b/fastapi_template/template/{{cookiecutter.project_name}}/replaceable_files.toml @@ -0,0 +1,10 @@ +[[sub]] +target = "{{cookiecutter.project_name}}/db" +replaces = [ + "{{cookiecutter.project_name}}/db_sa", + "{{cookiecutter.project_name}}/db_ormar", + "{{cookiecutter.project_name}}/db_tortoise", + "{{cookiecutter.project_name}}/db_psycopg", + "{{cookiecutter.project_name}}/db_piccolo", + "{{cookiecutter.project_name}}/db_beanie" +] diff --git a/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/settings.py b/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/settings.py index 401e5530..1f65d901 100644 --- a/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/settings.py +++ b/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/settings.py @@ -56,12 +56,8 @@ class Settings(BaseSettings): db_port: int = {{cookiecutter.db_info.port}} db_user: str = "{{cookiecutter.project_name}}" db_pass: str = "{{cookiecutter.project_name}}" # noqa: S105 - {%- if cookiecutter.db_info.name != "sqlite" %} - db_base: str = "admin" - {%- else %} db_base: str = "{{cookiecutter.project_name}}" {%- endif %} - {%- endif %} db_echo: bool = False {%- endif %} @@ -70,7 +66,7 @@ class Settings(BaseSettings): {%- if cookiecutter.enable_redis == "True" %} # Variables for Redis - redis_host: str = "{{cookiecutter.project_name}}-redis" + redis_host: str = "localhost" redis_port: int = 6379 redis_user: Optional[str] = None redis_pass: Optional[str] = None @@ -82,7 +78,7 @@ class Settings(BaseSettings): {%- if cookiecutter.enable_rmq == "True" %} # Variables for RabbitMQ - rabbit_host: str = "{{cookiecutter.project_name}}-rmq" + rabbit_host: str = "localhost" rabbit_port: int = 5672 rabbit_user: str = "guest" rabbit_pass: str = "guest" # noqa: S105 @@ -122,35 +118,36 @@ class Settings(BaseSettings): {%- if cookiecutter.enable_kafka == "True" %} - kafka_bootstrap_servers: List[str] = ["{{cookiecutter.project_name}}-kafka:9092"] + kafka_bootstrap_servers: List[str] = ["localhost:9092"] {%- endif %} {%- if cookiecutter.enable_nats == "True" %} - nats_hosts: list[str] = ["nats://{{cookiecutter.project_name}}-nats:4222"] + nats_hosts: list[str] = ["nats://localhost:4222"] {%- endif %} {%- if cookiecutter.db_info.name != "none" %} @property - def db_url(self) -> URL: + def db_url(self) -> {%- if cookiecutter.db_info.name == "sqlite" + %}str{%- else %}URL{%- endif %}: """ Assemble database URL from settings. :return: database URL. """ {%- if cookiecutter.db_info.name == "sqlite" %} - return URL.build( + return ( {%- if cookiecutter.orm == "sqlalchemy" %} - scheme="{{cookiecutter.db_info.async_driver}}", + "{{cookiecutter.db_info.async_driver}}:" {%- elif cookiecutter.orm == "tortoise" %} - scheme="{{cookiecutter.db_info.driver_short}}", + "{{cookiecutter.db_info.driver_short}}:" {%- else %} - scheme="{{cookiecutter.db_info.driver}}", + "{{cookiecutter.db_info.driver}}:" {%- endif %} - path=f"///{self.db_file}" + f"///{self.db_file}" ) {%- else %} return URL.build( diff --git a/fastapi_template/tests/utils.py b/fastapi_template/tests/utils.py index 2f1060ed..c5db72e0 100644 --- a/fastapi_template/tests/utils.py +++ b/fastapi_template/tests/utils.py @@ -32,6 +32,8 @@ def run_default_check(context: BuilderContext, worker_id: str, without_pytest=Fa with compose.open("r") as compose_file: data = yaml.safe_load(compose_file) data["services"]["api"]["image"] = f"test_image:v{worker_id}" + for service in data["services"].values(): + del service["ports"] with compose.open("w") as compose_file: yaml.safe_dump(data, compose_file)