Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions fastapi_template/__main__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from pathlib import Path

from cookiecutter.exceptions import (FailedHookException,
OutputDirExistsException)
from cookiecutter.exceptions import FailedHookException, OutputDirExistsException
from cookiecutter.main import cookiecutter
from termcolor import cprint

Expand All @@ -21,7 +20,7 @@ def generate_project(context: BuilderContext) -> None:
cookiecutter(
template=f"{script_dir}/template",
extra_context=context.dict(),
default_config=BuilderContext().dict(),
default_config=True,
no_input=True,
overwrite_if_exists=context.force,
)
Expand Down
93 changes: 61 additions & 32 deletions fastapi_template/template/hooks/post_gen_project.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,39 @@
#!/usr/bin/env python
import json
import os
import shutil
import subprocess
import tomllib
import shlex

from termcolor import cprint, colored
from pathlib import Path

CONDITIONAL_MANIFEST = "conditional_files.json"
REPLACE_MANIFEST = "replaceable_files.json"
CONDITIONAL_MANIFEST = Path("conditional_files.toml")
REPLACE_MANIFEST = Path("replaceable_files.toml")


def delete_resource(resource):
if os.path.isfile(resource):
os.remove(resource)
elif os.path.isdir(resource):
def delete_resource(resource: Path):
if resource.is_file():
resource.unlink()
elif resource.is_dir():
shutil.rmtree(resource)


def delete_resources_for_disabled_features():
with open(CONDITIONAL_MANIFEST) as manifest_file:
manifest = json.load(manifest_file)
for feature_name, feature in manifest.items():
if feature["enabled"].lower() != "true":
with CONDITIONAL_MANIFEST.open("rb") as manifest_file:
manifest = tomllib.load(manifest_file)

for feature in manifest["features"]:
enabled = feature["enabled"].lower() != "true"
name = feature["name"]
resources = feature["resources"]
if enabled:
text = "{} resources for disabled feature {}...".format(
colored("Removing", color="red"),
colored(feature_name, color="magenta", attrs=["underline"]),
colored(name, color="magenta", attrs=["underline"]),
)
print(text)
for resource in feature["resources"]:
delete_resource(resource)
for resource in resources:
delete_resource(Path(resource))
delete_resource(CONDITIONAL_MANIFEST)
cprint("cleanup complete!", color="green")

Expand All @@ -40,14 +44,15 @@ def replace_resources():
colored("resources", color="green"), colored("new project", color="blue")
)
)
with open(REPLACE_MANIFEST) as replace_manifest:
manifest = json.load(replace_manifest)
for target, replaces in manifest.items():
target_path = Path(target)
delete_resource(target_path)
for src_file in map(Path, replaces):
with REPLACE_MANIFEST.open("rb") as replace_manifest:
manifest = tomllib.load(replace_manifest)
for substitution in manifest["sub"]:
target = Path(substitution["target"])
replaces = [Path(path) for path in substitution["replaces"]]
delete_resource(target)
for src_file in replaces:
if src_file.exists():
shutil.move(src_file, target_path)
shutil.move(src_file, target)
delete_resource(REPLACE_MANIFEST)
print(
"Resources are happy to be where {}.".format(
Expand All @@ -56,17 +61,41 @@ def replace_resources():
)


def run_cmd(cmd: str, ignore_error: bool = False):
out = subprocess.run(
shlex.split(cmd),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
if out.returncode != 0 and not ignore_error:
cprint(" WARNING ".center(50, "="))
cprint(
f"[WARN] Command `{cmd}` was not successfull. Check output below.",
"yellow",
)
cprint(
"However, the project was generated. So it could be a false-positive.",
"yellow",
)
cprint(out.stdout.decode(), "red")
cprint(out.stderr.decode(), "red")
exit(1)


def init_repo():
subprocess.run(["git", "init"], stdout=subprocess.PIPE)
cprint("Git repository initialized.", "green")
subprocess.run(["git", "add", "."], stdout=subprocess.PIPE)
cprint("Added files to index.", "green")
subprocess.run(["uv", "sync"])
subprocess.run(["uv", "run", "pre-commit", "install"])
cprint("pre-commit installed.", "green")
subprocess.run(["uv", "run", "pre-commit", "run", "-a"])
subprocess.run(["git", "add", "."], stdout=subprocess.PIPE)
subprocess.run(["git", "commit", "-m", "Initial commit"], stdout=subprocess.PIPE)
run_cmd("git init")
cprint(" Git repository initialized", "green")
run_cmd("git add .")
cprint("🐍 Installing python dpendencies with UV", "green")
run_cmd("uv sync")
run_cmd("uv run pre-commit install")
cprint("📚🖌️📄📏 Tidying up the project", "green")
for _ in range(2):
run_cmd("uv run pre-commit run -a", ignore_error=True)
run_cmd("git add .")
cprint("🚀Creating your first commit", "green")
run_cmd("git commit -m 'Initial commit'")


if __name__ == "__main__":
delete_resources_for_disabled_features()
Expand Down
5 changes: 4 additions & 1 deletion fastapi_template/template/{{cookiecutter.project_name}}/.env
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,7 @@
{%- endif %}
{%- if cookiecutter.add_users == "True" %}
USERS_SECRET=""
{%- endif %}
{%- endif %}
{%- if cookiecutter.enable_kafka == "True" %}
{{cookiecutter.project_name | upper}}_KAFKA_BOOTSTRAP_SERVERS='["localhost:9094"]'
{%- endif %}
55 changes: 26 additions & 29 deletions fastapi_template/template/{{cookiecutter.project_name}}/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,22 +25,15 @@ You can read more about uv here: https://docs.astral.sh/ruff/
You can start the project with docker using this command:

```bash
docker-compose up --build
```

If you want to develop in docker with autoreload and exposed ports add `-f deploy/docker-compose.dev.yml` to your docker command.
Like this:

```bash
docker-compose -f docker-compose.yml -f deploy/docker-compose.dev.yml --project-directory . up --build
docker compose up --build
```

This command exposes the web application on port 8000, mounts current directory and enables autoreload.

But you have to rebuild image every time you modify `uv.lock` or `pyproject.toml` with this command:

```bash
docker-compose build
docker compose build
```

## Project structure
Expand Down Expand Up @@ -98,12 +91,11 @@ you can add `-f ./deploy/docker-compose.otlp.yml` to your docker command.
Like this:

```bash
docker-compose -f docker-compose.yml -f deploy/docker-compose.otlp.yml --project-directory . up
docker compose -f docker-compose.yml -f deploy/docker-compose.otlp.yml --project-directory . up
```

This command will start OpenTelemetry collector and jaeger.
After sending a requests you can see traces in jaeger's UI
at http://localhost:16686/.
This command will start grafana with full opentelemetry stack at http://localhost:3000/.
After sending a requests you can see traces at explore tab in drilldown.

This docker configuration is not supposed to be used in production.
It's only for demo purpose.
Expand Down Expand Up @@ -189,30 +181,35 @@ aerich migrate
If you want to run it in docker, simply run:

```bash
docker-compose run --build --rm api pytest -vv .
docker-compose down
docker compose run --build --rm api pytest -vv .
docker compose down
```

For running tests on your local machine.

{%- if cookiecutter.db_info.name != "none" %}
{%- if cookiecutter.db_info.name != "sqlite" %}
1. you need to start a database.
{%- if ((cookiecutter.db_info.name != "none" and cookiecutter.db_info.name != "sqlite") or
(cookiecutter.enable_redis == "True") or
(cookiecutter.enable_rmq == "True") or
(cookiecutter.enable_kafka == "True") or
(cookiecutter.enable_nats == "True")
) %}
1. you need to start all aux services.

I prefer doing it with docker:
```
{%- if cookiecutter.db_info.name == "postgresql" %}
docker run -p "{{cookiecutter.db_info.port}}:{{cookiecutter.db_info.port}}" -e "POSTGRES_PASSWORD={{cookiecutter.project_name}}" -e "POSTGRES_USER={{cookiecutter.project_name}}" -e "POSTGRES_DB={{cookiecutter.project_name}}" {{cookiecutter.db_info.image}}
{%- endif %}
{%- if cookiecutter.db_info.name == "mysql" %}
docker run -p "{{cookiecutter.db_info.port}}:{{cookiecutter.db_info.port}}" -e "MYSQL_PASSWORD={{cookiecutter.project_name}}" -e "MYSQL_USER={{cookiecutter.project_name}}" -e "MYSQL_DATABASE={{cookiecutter.project_name}}" -e ALLOW_EMPTY_PASSWORD=yes {{cookiecutter.db_info.image}}
{%- endif %}
We can do so by using our `docker-compose.yaml` configuration. It already has everything we need.

```bash
docker compose up -d --wait{%- if cookiecutter.db_info.name != 'none' %} db{%- endif %}{%- if cookiecutter.enable_redis == "True" %} redis{%- endif %}{%- if cookiecutter.enable_rmq == "True" %} rmq{%- endif %}{%- if cookiecutter.enable_kafka == "True" %} kafka{%- endif %}{%- if cookiecutter.enable_nats == "True" %} nats{%- endif %}
```
{%- endif %}
{%- endif %}

2. Run tests.
```bash
pytest -vv .
```
{%- else %}
Simply run

2. Run the pytest.
```bash
pytest -vv .
```
{%- endif %}

Loading
Loading