diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..840aa28 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,142 @@ +# VSCode +.vscode/ + +# Everything below this point is a copy of .gitignore. + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# macOS +.DS_Store diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..dfb90b7 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + + - package-ecosystem: "docker" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..6446681 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,79 @@ +name: CI + +"on": + merge_group: {} + pull_request: {} + push: + branches-ignore: + # These should always correspond to pull requests, so ignore them for + # the push trigger and let them be triggered by the pull_request + # trigger, avoiding running the workflow twice. This is a minor + # optimization so there's no need to ensure this is comprehensive. + - "dependabot/**" + - "gh-readonly-queue/**" + - "renovate/**" + - "tickets/**" + - "u/**" + tags: + - "*" + +jobs: + lint: + runs-on: ubuntu-latest + timeout-minutes: 5 + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Run pre-commit + uses: pre-commit/action@v3.0.1 + + test: + runs-on: ubuntu-latest + timeout-minutes: 10 + + strategy: + matrix: + python: + - "3.12" + + steps: + - uses: actions/checkout@v4 + + - name: Run tox + uses: lsst-sqre/run-tox@v1 + with: + python-version: ${{ matrix.python }} + tox-envs: "py,coverage-report,typing" + tox-requirements: requirements/tox.txt + + build: + runs-on: ubuntu-latest + needs: [lint, test] + timeout-minutes: 10 + + # Only do Docker builds of tagged releases and pull requests from ticket + # branches. This will still trigger on pull requests from untrusted + # repositories whose branch names match our tickets/* branch convention, + # but in this case the build will fail with an error since the secret + # won't be set. + if: > + github.event_name != 'merge_group' + && (startsWith(github.ref, 'refs/tags/') + || startsWith(github.head_ref, 'tickets/')) + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: lsst-sqre/build-and-push-to-ghcr@v1 + id: build + with: + image: ${{ github.repository }} + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/periodic-ci.yaml b/.github/workflows/periodic-ci.yaml new file mode 100644 index 0000000..5389676 --- /dev/null +++ b/.github/workflows/periodic-ci.yaml @@ -0,0 +1,42 @@ +# This is a separate run of the Python test suite that runs from a schedule, +# doesn't cache the tox environment, and updates pinned dependencies first. +# The purpose is to test compatibility with the latest versions of +# dependencies. + +name: Periodic CI + +"on": + schedule: + - cron: "0 12 * * 1" + workflow_dispatch: {} + +jobs: + test: + runs-on: ubuntu-latest + timeout-minutes: 10 + + strategy: + matrix: + python: + - "3.12" + + steps: + - uses: actions/checkout@v4 + + # Use the oldest supported version of Python to update dependencies, + # not the matrixed Python version, since this accurately reflects + # how dependencies should later be updated. + - name: Run neophile + uses: lsst-sqre/run-neophile@v1 + with: + python-version: "3.12" + mode: update + + - name: Run tests in tox + uses: lsst-sqre/run-tox@v1 + with: + python-version: ${{ matrix.python }} + tox-envs: "lint,typing,py" + tox-requirements: requirements/tox.txt + use-cache: false + diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7af665b --- /dev/null +++ b/.gitignore @@ -0,0 +1,137 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# macOS +.DS_Store diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..c1384b7 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,14 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-toml + - id: check-yaml + - id: trailing-whitespace + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.3 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..9aafe5f --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,8 @@ +# Change log + +jeremym-fastapi-example is versioned with [semver](https://semver.org/). +Dependencies are updated to the latest available version during each release, and aren't noted here. + +Find changes for the upcoming release in the project's [changelog.d directory](https://github.com/lsst-dm/jeremym-fastapi-example/tree/main/changelog.d/). + + diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..f54aada --- /dev/null +++ b/Dockerfile @@ -0,0 +1,68 @@ +# This Dockerfile has four stages: +# +# base-image +# Updates the base Python image with security patches and common system +# packages. This image becomes the base of all other images. +# dependencies-image +# Installs third-party dependencies (requirements/main.txt) into a virtual +# environment. This virtual environment is ideal for copying across build +# stages. +# install-image +# Installs the app into the virtual environment. +# runtime-image +# - Copies the virtual environment into place. +# - Runs a non-root user. +# - Sets up the entrypoint and port. + +FROM python:3.12.2-slim-bookworm as base-image + +# Update system packages +COPY scripts/install-base-packages.sh . +RUN ./install-base-packages.sh && rm ./install-base-packages.sh + +FROM base-image AS dependencies-image + +# Install system packages only needed for building dependencies. +COPY scripts/install-dependency-packages.sh . +RUN ./install-dependency-packages.sh + +# Create a Python virtual environment +ENV VIRTUAL_ENV=/opt/venv +RUN python -m venv $VIRTUAL_ENV +# Make sure we use the virtualenv +ENV PATH="$VIRTUAL_ENV/bin:$PATH" +# Put the latest pip and setuptools in the virtualenv +RUN pip install --upgrade --no-cache-dir pip setuptools wheel + +# Install the app's Python runtime dependencies +COPY requirements/main.txt ./requirements.txt +RUN pip install --quiet --no-cache-dir -r requirements.txt + +FROM dependencies-image AS install-image + +# Use the virtualenv +ENV PATH="/opt/venv/bin:$PATH" + +COPY . /workdir +WORKDIR /workdir +RUN pip install --no-cache-dir . + +FROM base-image AS runtime-image + +# Create a non-root user +RUN useradd --create-home appuser + +# Copy the virtualenv +COPY --from=install-image /opt/venv /opt/venv + +# Make sure we use the virtualenv +ENV PATH="/opt/venv/bin:$PATH" + +# Switch to the non-root user. +USER appuser + +# Expose the port. +EXPOSE 8080 + +# Run the application. +CMD ["uvicorn", "jeremymfastapiexample.main:app", "--host", "0.0.0.0", "--port", "8080"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..dba7a2c --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2024 The Board of Trustees of the Leland Stanford Junior University, through SLAC National Accelerator Laboratory + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..e69de29 diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..8654d83 --- /dev/null +++ b/Makefile @@ -0,0 +1,48 @@ +.PHONY: help +help: + @echo "Make targets for jeremym-fastapi-example" + @echo "make init - Set up dev environment" + @echo "make run - Start a local development instance" + @echo "make update - Update pinned dependencies and run make init" + @echo "make update-deps - Update pinned dependencies" + @echo "make update-deps-no-hashes - Pin dependencies without hashes" + +.PHONY: init +init: + pip install --upgrade uv + uv pip install -r requirements/main.txt -r requirements/dev.txt \ + -r requirements/tox.txt + uv pip install --editable . + rm -rf .tox + uv pip install --upgrade pre-commit + pre-commit install + +.PHONY: run +run: + tox run -e run + +.PHONY: update +update: update-deps init + +.PHONY: update-deps +update-deps: + pip install --upgrade uv + uv pip install --upgrade pre-commit + pre-commit autoupdate + uv pip compile --upgrade --generate-hashes \ + --output-file requirements/main.txt requirements/main.in + uv pip compile --upgrade --generate-hashes \ + --output-file requirements/dev.txt requirements/dev.in + uv pip compile --upgrade --generate-hashes \ + --output-file requirements/tox.txt requirements/tox.in + +# Useful for testing against a Git version of Safir. +.PHONY: update-deps-no-hashes +update-deps-no-hashes: + pip install --upgrade uv + uv pip compile --upgrade \ + --output-file requirements/main.txt requirements/main.in + uv pip compile --upgrade \ + --output-file requirements/dev.txt requirements/dev.in + uv pip compile --upgrade \ + --output-file requirements/tox.txt requirements/tox.in diff --git a/README.md b/README.md new file mode 100644 index 0000000..3f52489 --- /dev/null +++ b/README.md @@ -0,0 +1,6 @@ +# jeremym-fastapi-example + +fastapi example +Learn more at https://jeremym-fastapi-example.lsst.io + +jeremym-fastapi-example is developed with [FastAPI](https://fastapi.tiangolo.com) and [Safir](https://safir.lsst.io). diff --git a/changelog.d/_template.md.jinja b/changelog.d/_template.md.jinja new file mode 100644 index 0000000..6e644b8 --- /dev/null +++ b/changelog.d/_template.md.jinja @@ -0,0 +1,7 @@ + +{%- for cat in config.categories %} + +### {{ cat }} + +- +{%- endfor %} diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0f40cd5 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,212 @@ +[project] +# https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ +name = "jeremym-fastapi-example" +description = "fastapi example" +license = { file = "LICENSE" } +readme = "README.md" +keywords = ["rubin", "lsst"] +# https://pypi.org/classifiers/ +classifiers = [ + "Development Status :: 4 - Beta", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.12", + "Intended Audience :: Developers", + "Natural Language :: English", + "Operating System :: POSIX", + "Typing :: Typed", +] +requires-python = ">=3.12" +# Use requirements/main.in for runtime dependencies instead. +dependencies = [] +dynamic = ["version"] + +[project.scripts] +jeremym-fastapi-example = "jeremymfastapiexample.cli:main" + +[project.urls] +Homepage = "https://jeremym-fastapi-example.lsst.io" +Source = "https://github.com/lsst-dm/jeremym-fastapi-example" + +[build-system] +requires = ["setuptools>=61", "wheel", "setuptools_scm[toml]>=6.2"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] + +[tool.coverage.run] +parallel = true +branch = true +source = ["jeremymfastapiexample"] + +[tool.coverage.paths] +source = ["src", ".tox/*/site-packages"] + +[tool.coverage.report] +show_missing = true +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + "if __name__ == .__main__.:", + "if TYPE_CHECKING:", +] + +[tool.mypy] +disallow_untyped_defs = true +disallow_incomplete_defs = true +ignore_missing_imports = true +local_partial_types = true +plugins = ["pydantic.mypy"] +no_implicit_reexport = true +show_error_codes = true +strict_equality = true +warn_redundant_casts = true +warn_unreachable = true +warn_unused_ignores = true + +[tool.pydantic-mypy] +init_forbid_extra = true +init_typed = true +warn_required_dynamic_aliases = true +warn_untyped_fields = true + +[tool.pytest.ini_options] +asyncio_mode = "strict" +# The python_files setting is not for test detection (pytest will pick up any +# test files named *_test.py without this setting) but to enable special +# assert processing in any non-test supporting files under tests. We +# conventionally put test support functions under tests.support and may +# sometimes use assert in test fixtures in conftest.py, and pytest only +# enables magical assert processing (showing a full diff on assert failures +# with complex data structures rather than only the assert message) in files +# listed in python_files. +python_files = ["tests/*.py", "tests/*/*.py"] + +# The rule used with Ruff configuration is to disable every lint that has +# legitimate exceptions that are not dodgy code, rather than cluttering code +# with noqa markers. This is therefore a reiatively relaxed configuration that +# errs on the side of disabling legitimate lints. +# +# Reference for settings: https://beta.ruff.rs/docs/settings/ +# Reference for rules: https://beta.ruff.rs/docs/rules/ +[tool.ruff] +exclude = [ + "docs/**", +] +line-length = 79 +target-version = "py312" + +[tool.ruff.lint] +ignore = [ + "ANN101", # self should not have a type annotation + "ANN102", # cls should not have a type annotation + "ANN401", # sometimes Any is the right type + "ARG001", # unused function arguments are often legitimate + "ARG002", # unused method arguments are often legitimate + "ARG005", # unused lambda arguments are often legitimate + "BLE001", # we want to catch and report Exception in background tasks + "C414", # nested sorted is how you sort by multiple keys with reverse + "D102", # sometimes we use docstring inheritence + "D104", # don't see the point of documenting every package + "D105", # our style doesn't require docstrings for magic methods + "D106", # Pydantic uses a nested Config class that doesn't warrant docs + "D205", # our documentation style allows a folded first line + "EM101", # justification (duplicate string in traceback) is silly + "EM102", # justification (duplicate string in traceback) is silly + "FBT003", # positional booleans are normal for Pydantic field defaults + "FIX002", # point of a TODO comment is that we're not ready to fix it + "G004", # forbidding logging f-strings is appealing, but not our style + "RET505", # disagree that omitting else always makes code more readable + "PLR0911", # often many returns is clearer and simpler style + "PLR0913", # factory pattern uses constructors with many arguments + "PLR2004", # too aggressive about magic values + "PLW0603", # yes global is discouraged but if needed, it's needed + "S105", # good idea but too many false positives on non-passwords + "S106", # good idea but too many false positives on non-passwords + "S107", # good idea but too many false positives on non-passwords + "S603", # not going to manually mark every subprocess call as reviewed + "S607", # using PATH is not a security vulnerability + "SIM102", # sometimes the formatting of nested if statements is clearer + "SIM117", # sometimes nested with contexts are clearer + "TCH001", # we decided to not maintain separate TYPE_CHECKING blocks + "TCH002", # we decided to not maintain separate TYPE_CHECKING blocks + "TCH003", # we decided to not maintain separate TYPE_CHECKING blocks + "TID252", # if we're going to use relative imports, use them always + "TRY003", # good general advice but lint is way too aggressive + "TRY301", # sometimes raising exceptions inside try is the best flow + + # The following settings should be disabled when using ruff format + # per https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules + "W191", + "E111", + "E114", + "E117", + "D206", + "D300", + "Q000", + "Q001", + "Q002", + "Q003", + "COM812", + "COM819", + "ISC001", + "ISC002", +] +select = ["ALL"] + +[tool.ruff.lint.per-file-ignores] +"src/jeremymfastapiexample/handlers/**" = [ + "D103", # FastAPI handlers should not have docstrings +] +"tests/**" = [ + "C901", # tests are allowed to be complex, sometimes that's convenient + "D101", # tests don't need docstrings + "D103", # tests don't need docstrings + "PLR0915", # tests are allowed to be long, sometimes that's convenient + "PT012", # way too aggressive about limiting pytest.raises blocks + "S101", # tests should use assert + "S106", # tests are allowed to hard-code dummy passwords + "SLF001", # tests are allowed to access private members +] + +[tool.ruff.lint.isort] +known-first-party = ["jeremymfastapiexample", "tests"] +split-on-trailing-comma = false + +# These are too useful as attributes or methods to allow the conflict with the +# built-in to rule out their use. +[tool.ruff.lint.flake8-builtins] +builtins-ignorelist = [ + "all", + "any", + "help", + "id", + "list", + "type", +] + +[tool.ruff.lint.flake8-pytest-style] +fixture-parentheses = false +mark-parentheses = false + +[tool.ruff.lint.pydocstyle] +convention = "numpy" + +[tool.scriv] +categories = [ + "Backwards-incompatible changes", + "New features", + "Bug fixes", + "Other changes", +] +entry_title_template = "{{ version }} ({{ date.strftime('%Y-%m-%d') }})" +format = "md" +md_header_level = "2" +new_fragment_template = "file:changelog.d/_template.md.jinja" +skip_fragments = "_template.md.jinja" diff --git a/requirements/dev.in b/requirements/dev.in new file mode 100644 index 0000000..5d8619b --- /dev/null +++ b/requirements/dev.in @@ -0,0 +1,23 @@ +# -*- conf -*- +# +# Editable development dependencies +# Add direct development, test, and documentation dependencies here, as well +# as implicit dev dependencies with constrained versions. +# +# After editing, update requirements/dev.txt by running: +# make update-deps + +-c main.txt + +# Testing +asgi-lifespan +coverage[toml] +httpx +mypy +pydantic +pytest +pytest-asyncio +pytest-cov + +# Documentation +scriv diff --git a/requirements/main.in b/requirements/main.in new file mode 100644 index 0000000..985d2fe --- /dev/null +++ b/requirements/main.in @@ -0,0 +1,18 @@ +# -*- conf -*- +# +# Editable runtime dependencies (equivalent to install_requires) +# Add direct runtime dependencies here, as well as implicit dependencies +# with constrained versions. +# +# After editing, update requirements/main.txt by running: +# make update-deps + +# These dependencies are for fastapi including some optional features. +fastapi +starlette +uvicorn[standard] + +# Other dependencies. +pydantic +pydantic-settings +safir>=5 diff --git a/requirements/tox.in b/requirements/tox.in new file mode 100644 index 0000000..fde253f --- /dev/null +++ b/requirements/tox.in @@ -0,0 +1,14 @@ +# -*- conf -*- +# +# Editable tox dependencies +# Add tox and its plugins here. These will be installed in the user's venv for +# local development and by CI when running tox actions. +# +# After editing, update requirements/dev.txt by running: +# make update-deps + +-c main.txt +-c dev.txt + +tox +tox-uv diff --git a/scripts/install-base-packages.sh b/scripts/install-base-packages.sh new file mode 100755 index 0000000..620781c --- /dev/null +++ b/scripts/install-base-packages.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +# This script updates packages in the base Docker image that's used by both the +# build and runtime images, and gives us a place to install additional +# system-level packages with apt-get. +# +# Based on the blog post: +# https://pythonspeed.com/articles/system-packages-docker/ + +# Bash "strict mode", to help catch problems and bugs in the shell +# script. Every bash script you write should include this. See +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ for +# details. +set -euo pipefail + +# Display each command as it's run. +set -x + +# Tell apt-get we're never going to be able to give manual +# feedback: +export DEBIAN_FRONTEND=noninteractive + +# Update the package listing, so we know what packages exist: +apt-get update + +# Install security updates: +apt-get -y upgrade + +# Example of installing a new package, without unnecessary packages: +apt-get -y install --no-install-recommends git + +# Delete cached files we don't need anymore: +apt-get clean +rm -rf /var/lib/apt/lists/* diff --git a/scripts/install-dependency-packages.sh b/scripts/install-dependency-packages.sh new file mode 100755 index 0000000..f63ef75 --- /dev/null +++ b/scripts/install-dependency-packages.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +# This script installs additional packages used by the dependency image but +# not needed by the runtime image, such as additional packages required to +# build Python dependencies. +# +# Since the base image wipes all the apt caches to clean up the image that +# will be reused by the runtime image, we unfortunately have to do another +# apt-get update here, which wastes some time and network. + +# Bash "strict mode", to help catch problems and bugs in the shell +# script. Every bash script you write should include this. See +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ for +# details. +set -euo pipefail + +# Display each command as it's run. +set -x + +# Tell apt-get we're never going to be able to give manual +# feedback: +export DEBIAN_FRONTEND=noninteractive + +# Update the package listing, so we know what packages exist: +apt-get update + +# Install build-essential because sometimes Python dependencies need to build +# C modules, particularly when upgrading to newer Python versions. libffi-dev +# is sometimes needed to build cffi (a cryptography dependency). +apt-get -y install --no-install-recommends build-essential libffi-dev + +# Delete cached files we don't need anymore: +apt-get clean +rm -rf /var/lib/apt/lists/* diff --git a/src/jeremymfastapiexample/__init__.py b/src/jeremymfastapiexample/__init__.py new file mode 100644 index 0000000..aefcffd --- /dev/null +++ b/src/jeremymfastapiexample/__init__.py @@ -0,0 +1,14 @@ +"""The jeremym-fastapi-example service.""" + +__all__ = ["__version__"] + +from importlib.metadata import PackageNotFoundError, version + +__version__: str +"""The application version string (PEP 440 / SemVer compatible).""" + +try: + __version__ = version("jeremym-fastapi-example") +except PackageNotFoundError: + # package is not installed + __version__ = "0.0.0" diff --git a/src/jeremymfastapiexample/config.py b/src/jeremymfastapiexample/config.py new file mode 100644 index 0000000..e225170 --- /dev/null +++ b/src/jeremymfastapiexample/config.py @@ -0,0 +1,35 @@ +"""Configuration definition.""" + +from __future__ import annotations + +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict +from safir.logging import LogLevel, Profile + +__all__ = ["Config", "config"] + + +class Config(BaseSettings): + """Configuration for jeremym-fastapi-example.""" + + name: str = Field("jeremym-fastapi-example", title="Name of application") + + path_prefix: str = Field( + "/jeremym-fastapi-example", title="URL prefix for application" + ) + + profile: Profile = Field( + Profile.development, title="Application logging profile" + ) + + log_level: LogLevel = Field( + LogLevel.INFO, title="Log level of the application's logger" + ) + + model_config = SettingsConfigDict( + env_prefix="JEREMYM_FASTAPI_EXAMPLE_", case_sensitive=False + ) + + +config = Config() +"""Configuration for jeremym-fastapi-example.""" diff --git a/src/jeremymfastapiexample/handlers/__init__.py b/src/jeremymfastapiexample/handlers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/jeremymfastapiexample/handlers/external.py b/src/jeremymfastapiexample/handlers/external.py new file mode 100644 index 0000000..23fc779 --- /dev/null +++ b/src/jeremymfastapiexample/handlers/external.py @@ -0,0 +1,52 @@ +"""Handlers for the app's external root, ``/jeremym-fastapi-example/``.""" + +from typing import Annotated + +from fastapi import APIRouter, Depends +from safir.dependencies.logger import logger_dependency +from safir.metadata import get_metadata +from structlog.stdlib import BoundLogger + +from ..config import config +from ..models import Index + +__all__ = ["get_index", "external_router"] + +external_router = APIRouter() +"""FastAPI router for all external handlers.""" + + +@external_router.get( + "/", + description=( + "Document the top-level API here. By default it only returns metadata" + " about the application." + ), + response_model=Index, + response_model_exclude_none=True, + summary="Application metadata", +) +async def get_index( + logger: Annotated[BoundLogger, Depends(logger_dependency)], +) -> Index: + """GET ``/jeremym-fastapi-example/`` (the app's external root). + + Customize this handler to return whatever the top-level resource of your + application should return. For example, consider listing key API URLs. + When doing so, also change or customize the response model in + `jeremymfastapiexample.models.Index`. + + By convention, the root of the external API includes a field called + ``metadata`` that provides the same Safir-generated metadata as the + internal root endpoint. + """ + # There is no need to log simple requests since uvicorn will do this + # automatically, but this is included as an example of how to use the + # logger for more complex logging. + logger.info("Request for application metadata") + + metadata = get_metadata( + package_name="jeremym-fastapi-example", + application_name=config.name, + ) + return Index(metadata=metadata) diff --git a/src/jeremymfastapiexample/handlers/internal.py b/src/jeremymfastapiexample/handlers/internal.py new file mode 100644 index 0000000..b600285 --- /dev/null +++ b/src/jeremymfastapiexample/handlers/internal.py @@ -0,0 +1,42 @@ +"""Internal HTTP handlers that serve relative to the root path, ``/``. + +These handlers aren't externally visible since the app is available at a path, +``/jeremym-fastapi-example``. See `jeremymfastapiexample.handlers.external` for +the external endpoint handlers. + +These handlers should be used for monitoring, health checks, internal status, +or other information that should not be visible outside the Kubernetes cluster. +""" + +from fastapi import APIRouter +from safir.metadata import Metadata, get_metadata + +from ..config import config + +__all__ = ["get_index", "internal_router"] + +internal_router = APIRouter() +"""FastAPI router for all internal handlers.""" + + +@internal_router.get( + "/", + description=( + "Return metadata about the running application. Can also be used as" + " a health check. This route is not exposed outside the cluster and" + " therefore cannot be used by external clients." + ), + include_in_schema=False, + response_model=Metadata, + response_model_exclude_none=True, + summary="Application metadata", +) +async def get_index() -> Metadata: + """GET ``/`` (the app's internal root). + + By convention, this endpoint returns only the application's metadata. + """ + return get_metadata( + package_name="jeremym-fastapi-example", + application_name=config.name, + ) diff --git a/src/jeremymfastapiexample/main.py b/src/jeremymfastapiexample/main.py new file mode 100644 index 0000000..7ccb984 --- /dev/null +++ b/src/jeremymfastapiexample/main.py @@ -0,0 +1,60 @@ +"""The main application factory for the jeremym-fastapi-example service. + +Notes +----- +Be aware that, following the normal pattern for FastAPI services, the app is +constructed when this module is loaded and is not deferred until a function is +called. +""" + +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager +from importlib.metadata import metadata, version + +from fastapi import FastAPI +from safir.dependencies.http_client import http_client_dependency +from safir.logging import configure_logging, configure_uvicorn_logging +from safir.middleware.x_forwarded import XForwardedMiddleware + +from .config import config +from .handlers.external import external_router +from .handlers.internal import internal_router + +__all__ = ["app", "config"] + + +@asynccontextmanager +async def lifespan(app: FastAPI) -> AsyncIterator[None]: + """Set up and tear down the application.""" + # Any code here will be run when the application starts up. + + yield + + # Any code here will be run when the application shuts down. + await http_client_dependency.aclose() + + +configure_logging( + profile=config.profile, + log_level=config.log_level, + name="jeremymfastapiexample", +) +configure_uvicorn_logging(config.log_level) + +app = FastAPI( + title="jeremym-fastapi-example", + description=metadata("jeremym-fastapi-example")["Summary"], + version=version("jeremym-fastapi-example"), + openapi_url=f"{config.path_prefix}/openapi.json", + docs_url=f"{config.path_prefix}/docs", + redoc_url=f"{config.path_prefix}/redoc", + lifespan=lifespan, +) +"""The main FastAPI application for jeremym-fastapi-example.""" + +# Attach the routers. +app.include_router(internal_router) +app.include_router(external_router, prefix=f"{config.path_prefix}") + +# Add middleware. +app.add_middleware(XForwardedMiddleware) diff --git a/src/jeremymfastapiexample/models.py b/src/jeremymfastapiexample/models.py new file mode 100644 index 0000000..f5afb76 --- /dev/null +++ b/src/jeremymfastapiexample/models.py @@ -0,0 +1,20 @@ +"""Models for jeremym-fastapi-example.""" + +from pydantic import BaseModel, Field +from safir.metadata import Metadata as SafirMetadata + +__all__ = ["Index"] + + +class Index(BaseModel): + """Metadata returned by the external root URL of the application. + + Notes + ----- + As written, this is not very useful. Add additional metadata that will be + helpful for a user exploring the application, or replace this model with + some other model that makes more sense to return from the application API + root. + """ + + metadata: SafirMetadata = Field(..., title="Package metadata") diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..f58761f --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,30 @@ +"""Test fixtures for jeremym-fastapi-example tests.""" + +from __future__ import annotations + +from collections.abc import AsyncIterator + +import pytest_asyncio +from asgi_lifespan import LifespanManager +from fastapi import FastAPI +from httpx import AsyncClient + +from jeremymfastapiexample import main + + +@pytest_asyncio.fixture +async def app() -> AsyncIterator[FastAPI]: + """Return a configured test application. + + Wraps the application in a lifespan manager so that startup and shutdown + events are sent during test execution. + """ + async with LifespanManager(main.app): + yield main.app + + +@pytest_asyncio.fixture +async def client(app: FastAPI) -> AsyncIterator[AsyncClient]: + """Return an ``httpx.AsyncClient`` configured to talk to the test app.""" + async with AsyncClient(app=app, base_url="https://example.com/") as client: + yield client diff --git a/tests/handlers/__init__.py b/tests/handlers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/handlers/external_test.py b/tests/handlers/external_test.py new file mode 100644 index 0000000..b452933 --- /dev/null +++ b/tests/handlers/external_test.py @@ -0,0 +1,22 @@ +"""Tests for the jeremymfastapiexample.handlers.external module and routes.""" + +from __future__ import annotations + +import pytest +from httpx import AsyncClient + +from jeremymfastapiexample.config import config + + +@pytest.mark.asyncio +async def test_get_index(client: AsyncClient) -> None: + """Test ``GET /jeremym-fastapi-example/``.""" + response = await client.get("/jeremym-fastapi-example/") + assert response.status_code == 200 + data = response.json() + metadata = data["metadata"] + assert metadata["name"] == config.name + assert isinstance(metadata["version"], str) + assert isinstance(metadata["description"], str) + assert isinstance(metadata["repository_url"], str) + assert isinstance(metadata["documentation_url"], str) diff --git a/tests/handlers/internal_test.py b/tests/handlers/internal_test.py new file mode 100644 index 0000000..0be0347 --- /dev/null +++ b/tests/handlers/internal_test.py @@ -0,0 +1,21 @@ +"""Tests for the jeremymfastapiexample.handlers.internal module and routes.""" + +from __future__ import annotations + +import pytest +from httpx import AsyncClient + +from jeremymfastapiexample.config import config + + +@pytest.mark.asyncio +async def test_get_index(client: AsyncClient) -> None: + """Test ``GET /``.""" + response = await client.get("/") + assert response.status_code == 200 + data = response.json() + assert data["name"] == config.name + assert isinstance(data["version"], str) + assert isinstance(data["description"], str) + assert isinstance(data["repository_url"], str) + assert isinstance(data["documentation_url"], str) diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..9b1af76 --- /dev/null +++ b/tox.ini @@ -0,0 +1,36 @@ +[tox] +envlist = py,coverage-report,typing,lint +isolated_build = True + +[testenv] +description = Run pytest against {envname}. +deps = + -r{toxinidir}/requirements/main.txt + -r{toxinidir}/requirements/dev.txt +commands = + pytest --cov=jeremymfastapiexample --cov-branch --cov-report= {posargs} + +[testenv:coverage-report] +description = Compile coverage from each test run. +skip_install = true +deps = coverage[toml]>=5.0.2 +depends = + py +commands = coverage report + +[testenv:typing] +description = Run mypy. +commands = + mypy src/jeremymfastapiexample tests + +[testenv:lint] +description = Lint codebase by running pre-commit +skip_install = true +deps = + pre-commit +commands = pre-commit run --all-files + +[testenv:run] +description = Run the development server with auto-reload for code changes. +usedevelop = true +commands = uvicorn jeremymfastapiexample.main:app --reload