From 55d1569d9de3760d65eb9bd81e8c48310e5195b8 Mon Sep 17 00:00:00 2001 From: Young Joon Lee Date: Thu, 29 Feb 2024 18:06:50 +0900 Subject: [PATCH] feat(.copier-config.yaml): create new template config file --- .copier-config.yaml | 34 + .copier-docker-config.yaml | 56 + .copierignore | 6 + .docker/.docker-scripts/docker-compose.sh | 182 +++ .docker/.dockerignore | 11 + .docker/.ids/admin.env | 11 + .docker/.ids/app.env | 10 + .docker/.ids/default.env | 10 + .docker/.ids/id.env | 11 + .docker/Dockerfile.app | 100 ++ .docker/Dockerfile.base | 105 ++ .docker/docker-compose.app.yaml | 84 ++ .docker/docker-compose.base.yaml | 36 + .docker/docker.app.env | 33 + .docker/docker.base.env | 9 + .docker/docker.common.env | 52 + .docker/docker.version | 1 + .docker/scripts/init-dotfiles.sh | 3 + .docker/scripts/launch.sh | 31 + .docker/scripts/requirements-base.txt | 1 + .docker/scripts/sshd_config | 121 ++ .editorconfig | 24 + .gitattributes | 1 + .github/dependabot.yaml | 25 + .github/labels.yaml | 66 + .github/release.yaml | 20 + .github/scripts/autotoc.py | 91 ++ .github/scripts/free-disk-space.sh | 49 + .github/workflows/deploy-app-image.yaml | 88 ++ .github/workflows/deploy-base-image.yaml | 86 ++ .github/workflows/deploy-docs.yaml | 43 + .github/workflows/lint_and_test.yaml | 78 ++ .../workflows/prerelease-to-test-pypi.yaml | 46 + .github/workflows/prerelease.yaml | 53 + .github/workflows/release-patch.yaml | 51 + .github/workflows/release-test.yaml | 44 + .github/workflows/release.yaml | 55 + .gitignore | 40 + .pre-commit-config.yaml | 55 + .tasks-extra.toml | 7 + .tasks.toml | 116 ++ CHANGELOG.md | 1 + CODE_OF_CONDUCT.md | 132 ++ CONTRIBUTING.md | 55 + LICENSE | 395 ++++++ Makefile | 148 ++ README.md | 47 +- book/_config.yml | 103 ++ book/_toc.yml | 9 + book/api.rst | 16 + book/assets/assets/extra/.gitkeep | 0 book/assets/extra/.gitkeep | 0 book/examples.md | 34 + book/index.md | 53 + book/references.bib | 3 + book/requirements.txt | 12 + book/syllabus/index.md | 1 + book/usage.md | 1 + codecov.yml | 12 + poetry.lock | 1233 +++++++++++++++++ pyproject.toml | 109 ++ 61 files changed, 4308 insertions(+), 1 deletion(-) create mode 100644 .copier-config.yaml create mode 100644 .copier-docker-config.yaml create mode 100644 .copierignore create mode 100644 .docker/.docker-scripts/docker-compose.sh create mode 100644 .docker/.dockerignore create mode 100644 .docker/.ids/admin.env create mode 100644 .docker/.ids/app.env create mode 100644 .docker/.ids/default.env create mode 100644 .docker/.ids/id.env create mode 100644 .docker/Dockerfile.app create mode 100644 .docker/Dockerfile.base create mode 100644 .docker/docker-compose.app.yaml create mode 100644 .docker/docker-compose.base.yaml create mode 100644 .docker/docker.app.env create mode 100644 .docker/docker.base.env create mode 100644 .docker/docker.common.env create mode 100644 .docker/docker.version create mode 100644 .docker/scripts/init-dotfiles.sh create mode 100644 .docker/scripts/launch.sh create mode 100644 .docker/scripts/requirements-base.txt create mode 100644 .docker/scripts/sshd_config create mode 100644 .editorconfig create mode 100644 .gitattributes create mode 100644 .github/dependabot.yaml create mode 100644 .github/labels.yaml create mode 100644 .github/release.yaml create mode 100644 .github/scripts/autotoc.py create mode 100644 .github/scripts/free-disk-space.sh create mode 100644 .github/workflows/deploy-app-image.yaml create mode 100644 .github/workflows/deploy-base-image.yaml create mode 100644 .github/workflows/deploy-docs.yaml create mode 100644 .github/workflows/lint_and_test.yaml create mode 100644 .github/workflows/prerelease-to-test-pypi.yaml create mode 100644 .github/workflows/prerelease.yaml create mode 100644 .github/workflows/release-patch.yaml create mode 100644 .github/workflows/release-test.yaml create mode 100644 .github/workflows/release.yaml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 .tasks-extra.toml create mode 100644 .tasks.toml create mode 100644 CHANGELOG.md create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 book/_config.yml create mode 100644 book/_toc.yml create mode 100644 book/api.rst create mode 100644 book/assets/assets/extra/.gitkeep create mode 100644 book/assets/extra/.gitkeep create mode 100644 book/examples.md create mode 100644 book/index.md create mode 100644 book/references.bib create mode 100644 book/requirements.txt create mode 100644 book/syllabus/index.md create mode 100644 book/usage.md create mode 100644 codecov.yml create mode 100644 poetry.lock create mode 100644 pyproject.toml diff --git a/.copier-config.yaml b/.copier-config.yaml new file mode 100644 index 0000000..df355c8 --- /dev/null +++ b/.copier-config.yaml @@ -0,0 +1,34 @@ +# Changes here will be overwritten by Copier; do NOT edit manually +_commit: v0.12.1-1-ged875bd +_src_path: gh:entelecheia/hyperfast-python-template +author: Young Joon Lee +build_and_release: false +code_template_answers_file: .copier-hyfi-config.yaml +code_template_source: '' +codecov_graph_token: '[REPLACE_ME]' +copyright_year: 2024 +documentaion_tool: jupyter-book +documentation_url: https://entelecheia.github.io/course-template +email: entelecheia@hotmail.com +favicon_path: https://assets.entelecheia.ai/favicon.png +friendly_name: Course Template +github_repo_name: course-template +github_username: entelecheia +google_analytics_id: '' +logo_path: '' +main_branch: main +package_name: coursetemp +package_scripts: +- coursetemp = 'coursetemp.__cli__:main' +poe_task_files: +- .tasks.toml +- .tasks-extra.toml +project_description: A template for a course +project_license: CC-BY-4.0 +project_name: course-template +project_short_description: A template for a course +upload_to_release: false +upload_to_repository: false +use_launch_buttons: true +use_source_code_skeleton: false + diff --git a/.copier-docker-config.yaml b/.copier-docker-config.yaml new file mode 100644 index 0000000..4e6b1e9 --- /dev/null +++ b/.copier-docker-config.yaml @@ -0,0 +1,56 @@ +# Changes here will be overwritten by Copier; do NOT edit manually +_commit: v0.28.4 +_src_path: gh:entelecheia/hyperfast-docker-template +app_dirname: course_temp +app_install_root: /workspace/projects +app_service_name: app +app_source_branch: main +app_source_repo: entelecheia/course-template +author: Young Joon Lee +build_images_from_dockerfile: true +clone_source_code: true +container_workspace_root: /workspace +copy_scripts_dir: true +cuda_device_id: '6' +docker_app_image_variant_name: app +docker_apt_packages: fontconfig fonts-nanum +docker_base_build_from: nvidia/cuda:11.8.0-cudnn8-runtime-ubuntu22.04 +docker_base_image_variant_name: base +docker_container_uid: 9001 +docker_container_username: dev +docker_image_version_variable_name: APP_VERSION +docker_name_prefix: COURSE_TEMP +docker_project_name: course-template +docker_registry: ghcr.io +docker_run_command: zsh +docker_service_name: workspace +docker_timezone: Asia/Seoul +docker_username: entelecheia +email: entelecheia@hotmail.com +enable_nvidia_gpu: true +friendly_name: Course Template +github_repo_name: course-template +github_username: entelecheia +install_dotfiles: true +install_pip_requirements: false +jupyter_host_port: 19871 +jupyter_port: 8585 +jupyter_token: '' +launch_scripts: launch.sh +main_branch: main +pip_command: pip3 +project_description: A template for a course +project_license: MIT +project_short_description: A template for a course +python_command: python3.10 +ssh_host_port: 2341 +ssh_port: 22 +use_builder_image_and_copy_venv: false +use_deploy_workflows: true +use_jupyter: true +use_semantic_versioning_for_image: true +use_ssh_service: true +use_web_service: true +web_service_host_port: 18761 +web_service_port: 8080 + diff --git a/.copierignore b/.copierignore new file mode 100644 index 0000000..78c2ee3 --- /dev/null +++ b/.copierignore @@ -0,0 +1,6 @@ +.copierignore +.tasks-extra.toml +CHANGELOG.md +pyproject.toml +Makefile +.gitignore diff --git a/.docker/.docker-scripts/docker-compose.sh b/.docker/.docker-scripts/docker-compose.sh new file mode 100644 index 0000000..61a8972 --- /dev/null +++ b/.docker/.docker-scripts/docker-compose.sh @@ -0,0 +1,182 @@ +#!/bin/bash +# add your custom commands here that should be executed when building the docker image +# arguments usage +USAGE=" +$0 COMMAND [OPTIONS] + +Arguments: +COMMAND The operation to be performed. Must be one of: [build|config|push|login|up|down|run] + +Options: +-v, --variant IMAGE_VARIANT Specify a variant for the Docker image. +-p, --pid PROJECT_ID Specify a project ID for the container instance. +-r, --run RUN_COMMAND Specify a command to run when using the 'run' command. Default: bash +-h, --help Display this help message. + +Additional arguments can be provided after the Docker name, and they will be passed directly to the Docker Compose command. + +Example: +$0 build -v base +" + +# declare arguments +PROJECT_ID="default" +COMMAND="build" +VARIANT="base" +RUN_COMMAND="bash" +ADDITIONAL_ARGS="" + +set +u +# read arguments +# first argument is the command +COMMAND="$1" +shift + +# parse options +while [[ $# -gt 0 ]]; do + case "$1" in + -v | --variant) + VARIANT="$2" + shift + ;; + --variant=*) + VARIANT="${1#*=}" + ;; + -p | --pid) + PROJECT_ID="$2" + shift + ;; + --pid=*) + PROJECT_ID="${1#*=}" + ;; + -r | --run) + RUN_COMMAND="$2" + shift + ;; + --run=*) + RUN_COMMAND="${1#*=}" + ;; + -h | --help) + echo "Usage: $0 $USAGE" >&2 + exit 0 + ;; + -h*) + echo "Usage: $0 $USAGE" >&2 + exit 0 + ;; + *) + ADDITIONAL_ARGS="$ADDITIONAL_ARGS $1" + ;; + esac + shift +done +# check if remaining arguments exist +if [[ -n "$ADDITIONAL_ARGS" ]]; then + echo "Additional arguments: $ADDITIONAL_ARGS" >&2 +fi +set -u + +if [ "${COMMAND}" == "build" ]; then + echo "Building docker image for variant: ${VARIANT}" +elif [ "${COMMAND}" == "config" ]; then + echo "Printing docker config for variant: ${VARIANT}" +elif [ "${COMMAND}" == "push" ]; then + echo "Pushing docker image for variant: ${VARIANT}" +elif [ "${COMMAND}" == "up" ]; then + echo "Starting docker container for variant: ${VARIANT}" +elif [ "${COMMAND}" == "down" ]; then + echo "Stopping docker container for variant: ${VARIANT}" +elif [ "${COMMAND}" == "run" ]; then + echo "Running docker container for variant: ${VARIANT}" +elif [ "${COMMAND}" == "login" ]; then + echo "Logging into docker registry for variant: ${VARIANT}" +else + echo "Invalid command: $COMMAND" >&2 + echo "Usage: $0 $USAGE" >&2 + exit 1 +fi +echo "---" + +# load environment variables and print them +set -a +# load secert environment variables from .env.secret +DOCKER_SECRET_ENV_FILENAME=${DOCKER_SECRET_ENV_FILENAME:-".env.secret"} +if [ -e "${DOCKER_SECRET_ENV_FILENAME}" ]; then + echo "Loading secret environment variables from ${DOCKER_SECRET_ENV_FILENAME}" + set -x # print commands and thier arguments + # shellcheck disable=SC1091,SC1090 + source "${DOCKER_SECRET_ENV_FILENAME}" + set +x # disable printing of environment variables +fi +# load global environment variables from .env.docker +DOCKERFILES_SHARE_DIR=${DOCKERFILES_SHARE_DIR:-"$HOME/.local/share/dockerfiles"} +DOCKER_GLOBAL_ENV_FILENAME=${DOCKER_GLOBAL_ENV_FILENAME:-".env.docker"} +DOCKER_GLOBAL_ENV_FILE=${DOCKER_GLOBAL_ENV_FILE:-"${DOCKERFILES_SHARE_DIR}/${DOCKER_GLOBAL_ENV_FILENAME}"} +if [ ! -e "${DOCKER_GLOBAL_ENV_FILENAME}" ] && [ -e "${DOCKER_GLOBAL_ENV_FILE}" ]; then + echo "Symlinking ${DOCKER_GLOBAL_ENV_FILE} to ${DOCKER_GLOBAL_ENV_FILENAME}" + ln -s "${DOCKER_GLOBAL_ENV_FILE}" "${DOCKER_GLOBAL_ENV_FILENAME}" +fi +if [ -e "${DOCKER_GLOBAL_ENV_FILENAME}" ]; then + echo "Loading global environment variables from ${DOCKER_GLOBAL_ENV_FILENAME}" + set -x # print commands and thier arguments + # shellcheck disable=SC1091,SC1090 + source "${DOCKER_GLOBAL_ENV_FILENAME}" + set +x # disable printing of environment variables +fi +# shellcheck disable=SC1091 +source .docker/docker.version +PROJECT_ID_ENV_FILE=".docker/.ids/${PROJECT_ID}.env" +if [ -e "${PROJECT_ID_ENV_FILE}" ]; then + echo "Loading project ID specific environment variables from ${PROJECT_ID_ENV_FILE}" + set -x # print commands and thier arguments + # shellcheck disable=SC1091,SC1090 + source "${PROJECT_ID_ENV_FILE}" + set +x # disable printing of environment variables +fi +if [ -e .docker/docker.common.env ]; then + echo "Loading common environment variables from .docker/docker.common.env" + set -x # print commands and thier arguments + # shellcheck disable=SC1091 + source .docker/docker.common.env + set +x # disable printing of environment variables +fi +if [ -e ".docker/docker.${VARIANT}.env" ]; then + echo "Loading environment variables from .docker/docker.${VARIANT}.env" + set -x # print commands and thier arguments + # shellcheck disable=SC1091,SC1090 + source ".docker/docker.${VARIANT}.env" + set +x # disable printing of environment variables +fi +set +a + +# prepare docker network +if [[ -n "${CONTAINER_NETWORK_NAME}" ]] && ! docker network ls | grep -q "${CONTAINER_NETWORK_NAME}"; then + echo "Creating network ${CONTAINER_NETWORK_NAME}" + docker network create "${CONTAINER_NETWORK_NAME}" +else + echo "Network ${CONTAINER_NETWORK_NAME} already exists." +fi + +# prepare local workspace to be mounted +echo "Preparing local workspace directories" +[ ! -d "${HOST_WORKSPACE_ROOT}" ] && mkdir -p "${HOST_WORKSPACE_ROOT}" +[ ! -d "${HOST_SCRIPTS_DIR}" ] && cp -r "$PWD/.docker/scripts" "${HOST_SCRIPTS_DIR}" +[ ! -d "${HOST_SSH_DIR}" ] && mkdir -p "${HOST_SSH_DIR}" +[ ! -d "${HOST_CACHE_DIR}" ] && mkdir -p "${HOST_CACHE_DIR}" +[ ! -d "${HOST_HF_HOME}" ] && mkdir -p "${HOST_HF_HOME}" +[ ! -d "${HOST_GH_CONFIG_DIR}" ] && mkdir -p "${HOST_GH_CONFIG_DIR}" +[ ! -d "${HOST_PASSAGE_DIR}" ] && mkdir -p "${HOST_PASSAGE_DIR}" + +# run docker-compose +if [ "${COMMAND}" == "push" ]; then + CMD="docker push ${CONTAINER_REGISTRY}/${IMAGE_NAME}:${IMAGE_TAG}" +elif [ "${COMMAND}" == "login" ]; then + echo "GITHUB_CR_PAT: $GITHUB_CR_PAT" + CMD="docker login ghcr.io -u $GITHUB_USERNAME" +elif [ "${COMMAND}" == "run" ]; then + CMD="docker compose --project-directory . -f .docker/docker-compose.${VARIANT}.yaml run workspace ${RUN_COMMAND} ${ADDITIONAL_ARGS}" +else + CMD="docker-compose --project-directory . -f .docker/docker-compose.${VARIANT}.yaml -p ${CONTAINER_HOSTNAME} ${COMMAND} ${ADDITIONAL_ARGS}" +fi +echo "Running command: ${CMD}" +eval "${CMD}" diff --git a/.docker/.dockerignore b/.docker/.dockerignore new file mode 100644 index 0000000..baeea21 --- /dev/null +++ b/.docker/.dockerignore @@ -0,0 +1,11 @@ +.virtual_documents +jupyterhub* +.vscode +.history +workspace +data +refs +tmp +logs +outputs +.env* diff --git a/.docker/.ids/admin.env b/.docker/.ids/admin.env new file mode 100644 index 0000000..cc2be9d --- /dev/null +++ b/.docker/.ids/admin.env @@ -0,0 +1,11 @@ +PROJECT_ID="admin" +CONTAINER_CUDA_DEVICE_ID=0 +HOST_WORKSPACE_LOCATION="$PWD/workspace/$PROJECT_ID" +HOST_WORKSPACE_ROOT="$HOME/workspace" +HOST_SCRIPTS_DIR="$PWD/.docker/scripts" +HOST_SSH_DIR="$HOME/.ssh" +HOST_CACHE_DIR="$HOST_WORKSPACE_LOCATION/.cache" +HOST_HF_HOME="$HOST_CACHE_DIR/huggingface" +HOST_SSH_PORT=12201 +HOST_JUPYTER_PORT=18801 +HOST_WEB_SVC_PORT=18081 diff --git a/.docker/.ids/app.env b/.docker/.ids/app.env new file mode 100644 index 0000000..5f6cf0e --- /dev/null +++ b/.docker/.ids/app.env @@ -0,0 +1,10 @@ +DOCKER_PROJECT_ID="app" + +HOST_WORKSPACE_LOCATION=${WORKSPACE_LOCATION:-"$PWD/workspace/$PROJECT_ID"} +HOST_WORKSPACE_ROOT=${WORKSPACE_ROOT:-"$HOST_WORKSPACE_LOCATION/workspace"} +HOST_SCRIPTS_DIR="$PWD/.docker/scripts" +HOST_SSH_DIR="$HOST_WORKSPACE_LOCATION/.ssh" +HOST_CACHE_DIR="$HOST_WORKSPACE_LOCATION/.cache" +HOST_HF_HOME=${HF_HOME:-"${HOST_WORKSPACE_LOCATION}/.cache/huggingface"} +HOST_GH_CONFIG_DIR="$HOST_WORKSPACE_LOCATION/.config/gh" +HOST_PASSAGE_DIR="$HOST_WORKSPACE_LOCATION/.passage" diff --git a/.docker/.ids/default.env b/.docker/.ids/default.env new file mode 100644 index 0000000..e8c884a --- /dev/null +++ b/.docker/.ids/default.env @@ -0,0 +1,10 @@ +DOCKER_PROJECT_ID="default" + +HOST_WORKSPACE_LOCATION=${WORKSPACE_LOCATION:-"$PWD/workspace/$PROJECT_ID"} +HOST_WORKSPACE_ROOT=${WORKSPACE_ROOT:-"$HOST_WORKSPACE_LOCATION/workspace"} +HOST_SCRIPTS_DIR="$PWD/.docker/scripts" +HOST_SSH_DIR="$HOME/.ssh" +HOST_CACHE_DIR="$HOME/.cache" +HOST_HF_HOME=${HF_HOME:-"${HOME}/.cache/huggingface"} +HOST_GH_CONFIG_DIR="$HOME/.config/gh" +HOST_PASSAGE_DIR="$HOME/.passage" diff --git a/.docker/.ids/id.env b/.docker/.ids/id.env new file mode 100644 index 0000000..478ca2b --- /dev/null +++ b/.docker/.ids/id.env @@ -0,0 +1,11 @@ +PROJECT_ID="id" +CONTAINER_CUDA_DEVICE_ID=0 +HOST_WORKSPACE_LOCATION="$PWD/workspace/$PROJECT_ID" +HOST_WORKSPACE_ROOT="$HOST_WORKSPACE_LOCATION/workspace" +HOST_SCRIPTS_DIR="$HOST_WORKSPACE_ROOT/scripts" +HOST_SSH_DIR="$HOST_WORKSPACE_LOCATION/.ssh" +HOST_CACHE_DIR="$HOST_WORKSPACE_LOCATION/.cache" +HOST_HF_HOME="$HOST_CACHE_DIR/huggingface" +HOST_SSH_PORT=12212 +HOST_JUPYTER_PORT=18812 +HOST_WEB_SVC_PORT=18012 diff --git a/.docker/Dockerfile.app b/.docker/Dockerfile.app new file mode 100644 index 0000000..50a533a --- /dev/null +++ b/.docker/Dockerfile.app @@ -0,0 +1,100 @@ +# Sets the base image for subsequent instructions +ARG ARG_BUILD_FROM="ghcr.io/entelecheia/course-template:latest-base" +FROM $ARG_BUILD_FROM + +# Setting ARGs and ENVs for user creation and workspace setup +ARG ARG_USERNAME="dev" +ARG ARG_USER_UID=9001 +ARG ARG_USER_GID=$ARG_USER_UID +ARG ARG_WORKSPACE_ROOT="/workspace" +ENV USERNAME $ARG_USERNAME +ENV USER_UID $ARG_USER_UID +ENV USER_GID $ARG_USER_GID +ENV WORKSPACE_ROOT $ARG_WORKSPACE_ROOT + +# Sets up the workspace for the user +RUN if [ ! -d $WORKSPACE_ROOT/projects ]; then mkdir -p $WORKSPACE_ROOT/projects; fi + +# Setting ARGs and ENVs for the app +ARG ARG_APP_SOURCE_REPO="entelecheia/course-template" +ARG ARG_APP_INSTALL_ROOT="/workspace/projects" +ARG ARG_APP_DIRNAME="course_temp" +ARG ARG_APP_SOURCE_BRANCH="main" +ARG ARG_APP_SERVICE_NAME="app" +ENV APP_SOURCE_REPO $ARG_APP_SOURCE_REPO +ENV APP_INSTALL_ROOT $ARG_APP_INSTALL_ROOT +ENV APP_DIRNAME $ARG_APP_DIRNAME +ENV APP_SOURCE_BRANCH $ARG_APP_SOURCE_BRANCH +ENV APP_SERVICE_NAME $ARG_APP_SERVICE_NAME +ENV APP_SRC_DIR=${APP_INSTALL_ROOT}/${APP_DIRNAME} +ENV APP_VIRTUAL_ENV=${APP_INSTALL_ROOT}/.venvs/${APP_DIRNAME} +ENV APP_WORKSPACE_ROOT=${APP_INSTALL_ROOT}/workspace + +# Clones the app repository from GitHub +RUN git clone --branch $APP_SOURCE_BRANCH https://github.com/${ARG_APP_SOURCE_REPO}.git ${APP_SRC_DIR} &&\ + cd ${APP_SRC_DIR} &&\ + git checkout $APP_SOURCE_BRANCH + +# Sets the working directory to workspace root +WORKDIR $WORKSPACE_ROOT +# Copies scripts from host into the image +COPY ./.docker/scripts/ ./scripts/ + + + +# Creates a non-root user with sudo privileges +# check if user exists and if not, create user +RUN if id -u $USERNAME >/dev/null 2>&1; then \ + echo "User exists"; \ + else \ + groupadd --gid $USER_GID $USERNAME && \ + adduser --uid $USER_UID --gid $USER_GID --force-badname --disabled-password --gecos "" $USERNAME && \ + echo "$USERNAME:$USERNAME" | chpasswd && \ + adduser $USERNAME sudo && \ + echo "$USERNAME ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers && \ + echo "$USERNAME ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/$USERNAME && \ + chmod 0440 /etc/sudoers.d/$USERNAME; \ + fi + +RUN chown -R $USERNAME:$USERNAME $WORKSPACE_ROOT +RUN chown -R $USERNAME:$USERNAME $APP_INSTALL_ROOT + +# Switches to the newly created user +USER $USERNAME + +# Install dotfiles +ARG ARG_USER_FULLNAME +ARG ARG_USER_EMAIL +ARG ARG_GITHUB_USERNAME +ARG ARG_SYSTEM_HOSTNAME +ARG ARG_WORKSPACE_LOCATION="/" +ARG ARG_DOTFILES_VERSION="0.1.0" +ENV USER_FULLNAME $ARG_USER_FULLNAME +ENV USER_EMAIL $ARG_USER_EMAIL +ENV GITHUB_USERNAME $ARG_GITHUB_USERNAME +ENV SYSTEM_HOSTNAME $ARG_SYSTEM_HOSTNAME +ENV WORKSPACE_LOCATION $ARG_WORKSPACE_LOCATION +ENV DOTFILES_VERSION $ARG_DOTFILES_VERSION +ENV DOTFILES_APPLY_ROOTMOI=0 +ENV DOTFILES_USE_CODE=1 +ENV DOTFILES_USE_PYTHON_TOOLS=1 +ENV DOTFILES_MINIMUM=1 +ENV REMOTE_CONTAINERS=1 + +RUN echo "Current user: $USERNAME" +RUN echo "Dotfiles version: $DOTFILES_VERSION" +RUN if [ -d "/home/$USERNAME/.dotfiles" ]; then \ + echo "Dotfiles already installed"; \ + else \ + sh -c "$(wget -qO- https://dotfiles.entelecheia.ai/install)"; \ + fi + +USER root +RUN usermod -u "${USER_UID}" "${USERNAME}" +RUN groupmod -g "${USER_GID}" "${USERNAME}" +RUN chown --recursive "${USER_UID}:${USER_GID}" "${WORKSPACE_ROOT}" +RUN chown --recursive "${USER_UID}:${USER_GID}" "${APP_INSTALL_ROOT}" +USER $USERNAME + +# Specifies the command that will be executed when the container is run +CMD ["bash"] diff --git a/.docker/Dockerfile.base b/.docker/Dockerfile.base new file mode 100644 index 0000000..7766700 --- /dev/null +++ b/.docker/Dockerfile.base @@ -0,0 +1,105 @@ +# Sets the base image for subsequent instructions +ARG ARG_BUILD_FROM="nvidia/cuda:11.8.0-cudnn8-runtime-ubuntu22.04" + + +FROM $ARG_BUILD_FROM + + + +# Sets labels for the image +LABEL org.opencontainers.image.source="https://github.com/entelecheia/course-template" +LABEL org.opencontainers.image.description="A template for a course" +LABEL org.opencontainers.image.licenses="MIT" + +# Setting this argument prevents interactive prompts during the build process +ARG DEBIAN_FRONTEND=noninteractive +# Updates the image and installs necessary packages +RUN apt-get update --fix-missing \ + && apt-get install -y curl wget jq sudo gosu git \ + python3.10-venv python3-pip build-essential \ + locales locales-all fontconfig fonts-nanum \ + tzdata openssh-server \ + # Cleans up unnecessary packages to reduce image size + && apt-get autoremove -y \ + && apt-get clean -y + +# Sets Python environment variables +ENV PIP_DEFAULT_TIMEOUT 100 +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONUNBUFFERED 1 + +# Setting ARGs and ENVs for the app +ARG ARG_APP_INSTALL_ROOT="/workspace/projects" +ARG ARG_APP_DIRNAME="course_temp" +ENV APP_INSTALL_ROOT $ARG_APP_INSTALL_ROOT +ENV APP_DIRNAME $ARG_APP_DIRNAME +ENV APP_SRC_DIR=${APP_INSTALL_ROOT}/${APP_DIRNAME} +ENV APP_VIRTUAL_ENV=${APP_INSTALL_ROOT}/.venvs/${APP_DIRNAME} +ENV APP_WORKSPACE_ROOT=${APP_INSTALL_ROOT}/workspace +ARG ARG_WORKSPACE_ROOT="/workspace" +ENV WORKSPACE_ROOT $ARG_WORKSPACE_ROOT +# Sets up the workspace for the user +RUN mkdir -p $WORKSPACE_ROOT/projects + + +# Sets the working directory to workspace root +WORKDIR $WORKSPACE_ROOT +# Copies scripts from host into the image +COPY ./.docker/scripts/ ./scripts/ +RUN if [ -f ./scripts/requirements-base.txt ]; then pip3 install -r ./scripts/requirements-base.txt; fi + +# Sets the time zone within the container +ENV TZ="Asia/Seoul" +# Sets up the locale to en_US.UTF-8 +RUN localedef -v -c -i en_US -f UTF-8 en_US.UTF-8 || true + +# Setting ARGs and ENVs for user creation and workspace setup +ARG ARG_USERNAME="dev" +ARG ARG_USER_UID=9001 +ARG ARG_USER_GID=$ARG_USER_UID +ENV USERNAME $ARG_USERNAME +ENV USER_UID $ARG_USER_UID +ENV USER_GID $ARG_USER_GID + +# Creates a non-root user with sudo privileges +RUN groupadd --gid $USER_GID $USERNAME \ + && adduser --uid $USER_UID --gid $USER_GID --force-badname --disabled-password --gecos "" $USERNAME \ + && echo "$USERNAME:$USERNAME" | chpasswd \ + && adduser $USERNAME sudo \ + && echo "$USERNAME ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers \ + && echo "$USERNAME ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/$USERNAME \ + && chmod 0440 /etc/sudoers.d/$USERNAME + +# Fixes sudo error related to core dumps +RUN echo "Set disable_coredump false" > /etc/sudo.conf + +# Switches to the newly created user +USER $USERNAME + +# install dotfiles +ARG ARG_USER_FULLNAME="Dev User" +ARG ARG_USER_EMAIL="dev@domain.com" +ARG ARG_GITHUB_USERNAME="" +ARG ARG_SYSTEM_HOSTNAME="dev-container" +ARG ARG_WORKSPACE_LOCATION="/" +ENV USER_FULLNAME $ARG_USER_FULLNAME +ENV USER_EMAIL $ARG_USER_EMAIL +ENV GITHUB_USERNAME $ARG_GITHUB_USERNAME +ENV SYSTEM_HOSTNAME $ARG_SYSTEM_HOSTNAME +ENV WORKSPACE_LOCATION $ARG_WORKSPACE_LOCATION + +ENV DOTFILES_APPLY_ROOTMOI=0 +ENV DOTFILES_USE_CODE=1 +ENV DOTFILES_USE_PYTHON_TOOLS=1 +ENV REMOTE_CONTAINERS=1 +# ENV DOTFILES_DEBUG=1 + +USER $USERNAME +RUN sh -c "$(wget -qO- https://dotfiles.entelecheia.ai/install)" + +USER root + +RUN chown -R $USERNAME:$USERNAME $WORKSPACE_ROOT + +# Specifies the command that will be executed when the container is run +CMD ["bash"] diff --git a/.docker/docker-compose.app.yaml b/.docker/docker-compose.app.yaml new file mode 100644 index 0000000..28ef0df --- /dev/null +++ b/.docker/docker-compose.app.yaml @@ -0,0 +1,84 @@ +version: "3" + +services: + # Defines a service name + workspace: + build: + # Sets the build context to the current directory + context: . + # Specifies the Dockerfile to use for the build + dockerfile: .docker/Dockerfile.app + # Specifies build-time variables (ARGs) + args: + ARG_BUILD_FROM: $BUILD_FROM + ARG_USERNAME: $CONTAINER_USERNAME + ARG_USER_UID: $CONTAINER_USER_UID + ARG_USER_GID: $CONTAINER_USER_GID + ARG_WORKSPACE_ROOT: $CONTAINER_WORKSPACE_ROOT + ARG_USER_FULLNAME: $APP_USER_FULLNAME + ARG_USER_EMAIL: $APP_USER_EMAIL + ARG_GITHUB_USERNAME: $GITHUB_USERNAME + ARG_SYSTEM_HOSTNAME: $CONTAINER_HOSTNAME + ARG_WORKSPACE_LOCATION: $CONTAINER_WORKSPACE_LOCATION + ARG_APP_GITHUB_USERNAME: $APP_GITHUB_USERNAME + ARG_APP_SOURCE_REPO: $APP_SOURCE_REPO + ARG_APP_INSTALL_ROOT: $APP_INSTALL_ROOT + ARG_APP_DIRNAME: $APP_DIRNAME + ARG_APP_SOURCE_BRANCH: $APP_SOURCE_BRANCH + ARG_APP_SERVICE_NAME: $APP_SERVICE_NAME + ARG_DOTFILES_VERSION: $DOTFILES_VERSION + # Sets the image name for the built image + image: $IMAGE_NAME:$IMAGE_TAG + # Sets the hostname of the container + hostname: $CONTAINER_HOSTNAME + command: + # Specifies the command to be executed when the container is run + - bash + - $CONTAINER_LAUNCH_SCRIPT + # set the environment variables + environment: + USER_UID: $CONTAINER_USER_UID + USER_GID: $CONTAINER_USER_GID + USER_FULLNAME: $APP_USER_FULLNAME + USER_EMAIL: $APP_USER_EMAIL + WORKSPACE_LOCATION: $CONTAINER_WORKSPACE_LOCATION + SYSTEM_HOSTNAME: $CONTAINER_HOSTNAME + GIT_COMMIT_GPGSIGN: $GIT_COMMIT_GPGSIGN + GITHUB_USERNAME: $APP_GITHUB_USERNAME + WORKSPACE_ROOT: $CONTAINER_WORKSPACE_ROOT + IMAGE_VARIANT: $IMAGE_VARIANT + JUPYTER_PORT: $CONTAINER_JUPYTER_PORT + JUPYTER_TOKEN: $CONTAINER_JUPYTER_TOKEN + ulimits: + # Sets the stack size and memory lock limits + stack: 67108864 + memlock: -1 + # Allows the container to use the host's IPC namespace + ipc: $CONTAINER_IPC + ports: + # Maps the container's SSH and Web service ports to the host's ports + - "$HOST_SSH_PORT:$CONTAINER_SSH_PORT" + - "$HOST_JUPYTER_PORT:$CONTAINER_JUPYTER_PORT" + - "$HOST_WEB_SVC_PORT:$CONTAINER_WEB_SVC_PORT" + volumes: + # Maps directories from the host to the container + - "$HOST_SCRIPTS_DIR:$CONTAINER_WORKSPACE_ROOT/scripts" + - "$HOST_HF_HOME:$CONTAINER_HF_HOME" + - "$HOST_WORKSPACE_ROOT:$CONTAINER_WORKSPACE_ROOT" + - "$HOST_CACHE_DIR:$CONTAINER_CACHE_DIR" + - "$HOST_SSH_DIR:$CONTAINER_SSH_DIR" + - "$HOST_GH_CONFIG_DIR:$CONTAINER_GH_CONFIG_DIR" + - "$HOST_PASSAGE_DIR:$CONTAINER_PASSAGE_DIR" + deploy: + resources: + reservations: + devices: + # Reserves the specified GPU for the container + - driver: nvidia + device_ids: ["${CONTAINER_CUDA_DEVICE_ID}"] + capabilities: [gpu] +networks: + default: + # Sets the name of the default network and makes it external + name: $CONTAINER_NETWORK_NAME + external: true diff --git a/.docker/docker-compose.base.yaml b/.docker/docker-compose.base.yaml new file mode 100644 index 0000000..e89ca14 --- /dev/null +++ b/.docker/docker-compose.base.yaml @@ -0,0 +1,36 @@ +version: "3" + +services: + # Defines a service name + workspace: + build: + # Sets the build context to the current directory + context: . + # Specifies the Dockerfile to use for the build + dockerfile: .docker/Dockerfile.base + # Specifies build-time variables (ARGs) + args: + ARG_BUILD_FROM: $BUILD_FROM + ARG_USERNAME: $CONTAINER_USERNAME + ARG_USER_UID: $CONTAINER_USER_UID + ARG_USER_GID: $CONTAINER_USER_GID + ARG_WORKSPACE_ROOT: $CONTAINER_WORKSPACE_ROOT + ARG_WORKSPACE_LOCATION: $CONTAINER_WORKSPACE_LOCATION + ARG_SYSTEM_HOSTNAME: $CONTAINER_HOSTNAME + # Sets the image name for the built image + image: $IMAGE_NAME:$IMAGE_TAG + # Sets the hostname of the container + hostname: $CONTAINER_HOSTNAME + command: + # Specifies the command to be executed when the container is run + - $CONTAINER_RUN_COMMAND + ulimits: + # Sets the stack size and memory lock limits + stack: 67108864 + memlock: -1 + ipc: $CONTAINER_IPC +networks: + default: + # Sets the name of the default network and makes it external + name: $CONTAINER_NETWORK_NAME + external: true diff --git a/.docker/docker.app.env b/.docker/docker.app.env new file mode 100644 index 0000000..24596ff --- /dev/null +++ b/.docker/docker.app.env @@ -0,0 +1,33 @@ +######################################################### +# Configuration parameters for the docker project # +# Change the variables below to your need: # +######################################################### +APP_GITHUB_USERNAME=${GITHUB_USERNAME:-"entelecheia"} # The GitHub username of the project +APP_USER_FULLNAME=${USER_FULLNAME:-"Young Joon Lee"} # The full name of the user +APP_USER_EMAIL=${USER_EMAIL:-"entelecheia@hotmail.com"} # The email address of the user + +APP_SOURCE_REPO=${COURSE_TEMP_SOURCE_REPO:-"entelecheia/course-template"} # The GitHub repository name of the project +APP_SOURCE_BRANCH=${COURSE_TEMP_SOURCE_BRANCH:-"main"} # The branch of the project to clone + +####################################################################################### +# Please do not make any changes below this line if you don't know what you are doing # +# change the variables above to your need # +####################################################################################### +# docker build: Configuration parameters for building the Docker image +IMAGE_VARIANT=${APP_VARIANT:-"app"} # The variant of the Docker image. +IMAGE_TAG="${IMAGE_VERSION}-${IMAGE_VARIANT}" # The tag of the Docker image +IMAGE_NAME="${CONTAINER_REGISTRY}/${DOCKER_USERNAME}/${DOCKER_PROJECT_NAME}" # The full name of the Docker image +BASE_VARIANT=${BASE_VARIANT:-"base"} # The variant of the Docker image. +BUILD_FROM="${IMAGE_NAME}:latest-${BASE_VARIANT}" # The base image for the Docker build + +# docker run: Configuration parameters for running the Docker container +CONTAINER_LAUNCH_SCRIPT="${CONTAINER_WORKSPACE_ROOT}/scripts/launch.sh" # The name of the launch script +CONTAINER_CUDA_DEVICE_ID=${COURSE_TEMP_CUDA_DEVICE_ID:-"6"} # The ID of the CUDA device to use, e.g. all, 0, 1, 2, etc. +CONTAINER_SSH_PORT=${SSH_PORT:-"22"} # The SSH port in the Docker container +HOST_SSH_PORT=${COURSE_TEMP_HOST_SSH_PORT:-"2341"} # The SSH port on the host machine to be mapped to the container's SSH port +CONTAINER_JUPYTER_PORT=${JUPYTER_PORT:-"8585"} # The Jupyter port in the Docker container +HOST_JUPYTER_PORT=${COURSE_TEMP_HOST_JUPYTER_PORT:-"19871"} # The Jupyter port on the host machine to be mapped to the container's Jupyter port +CONTAINER_JUPYTER_TOKEN=${COURSE_TEMP_JUPYTER_TOKEN:-""} # The Jupyter token to use +APP_SERVICE_NAME=${COURSE_TEMP_SERVICE_NAME:-"app"} # The server name (optional, can be left empty) +CONTAINER_WEB_SVC_PORT=${WEB_SVC_PORT:-"8080"} # The Web service port in the Docker container +HOST_WEB_SVC_PORT=${COURSE_TEMP_HOST_WEB_SVC_PORT-"18761"} # The Web service port on the host machine to be mapped to the container's Web service port diff --git a/.docker/docker.base.env b/.docker/docker.base.env new file mode 100644 index 0000000..c138314 --- /dev/null +++ b/.docker/docker.base.env @@ -0,0 +1,9 @@ +####################################################################################### +# Please do not make any changes below this line if you don't know what you are doing # +# change the variables above to your need # +####################################################################################### +# docker build: Configuration parameters for building the Docker image +BASE_VARIANT=${BASE_VARIANT:-"base"} # The variant of the Docker image. +IMAGE_TAG="${IMAGE_VERSION}-${BASE_VARIANT}" # The tag of the Docker image +IMAGE_NAME="${CONTAINER_REGISTRY}/${DOCKER_USERNAME}/${DOCKER_PROJECT_NAME}" # The full name of the Docker image +BUILD_FROM="nvidia/cuda:11.8.0-cudnn8-runtime-ubuntu22.04" # The base image for the Docker build diff --git a/.docker/docker.common.env b/.docker/docker.common.env new file mode 100644 index 0000000..d42f695 --- /dev/null +++ b/.docker/docker.common.env @@ -0,0 +1,52 @@ +######################################################### +# Configuration parameters for the docker project # +# Change the variables below to your need: # +######################################################### +APP_INSTALL_ROOT=${COURSE_TEMP_INSTALL_ROOT:-"/workspace/projects"} # The directory in the container where the project will be installed or cloned +APP_DIRNAME=${COURSE_TEMP_DIRNAME:-"course_temp"} # The directory name for the cloned project +APP_SRC_DIR=${APP_INSTALL_ROOT}/${APP_DIRNAME} +APP_VIRTUAL_ENV=${APP_INSTALL_ROOT}/.venvs/${APP_DIRNAME} +APP_WORKSPACE_ROOT=${APP_INSTALL_ROOT}/workspace +APP_HF_HOME="${APP_INSTALL_ROOT}/.cache/huggingface" # HuggingFace models cache directory + +####################################################################################### +# Host machine environment variables # +####################################################################################### +SYSTEM_HOSTNAME=${SYSTEM_HOSTNAME:-"$(hostname)"} # The hostname of the System. If not set, the system's hostname will be used. +GIT_COMMIT_GPGSIGN=${GIT_COMMIT_GPGSIGN:-"false"} # Whether to GPG-sign Git commits +SSH_PUB_KEY=${SSH_PUB_KEY:-""} + +HOST_WORKSPACE_LOCATION=${HOST_WORKSPACE_LOCATION:-"$PWD/workspace/$PROJECT_ID"} +HOST_WORKSPACE_ROOT=${HOST_WORKSPACE_ROOT:-"$HOST_WORKSPACE_LOCATION/workspace"} +HOST_SCRIPTS_DIR=${HOST_SCRIPTS_DIR:-"$HOST_WORKSPACE_ROOT/scripts"} +HOST_SSH_DIR=${HOST_SSH_DIR:-"$HOST_WORKSPACE_LOCATION/.ssh"} +HOST_CACHE_DIR=${HOST_CACHE_DIR:-"$HOST_WORKSPACE_LOCATION/.cache"} +HOST_HF_HOME=${HOST_HF_HOME:-"$HOST_CACHE_DIR/huggingface"} + +####################################################################################### +# Please do not make any changes below this line if you don't know what you are doing # +# change the variables above to your need # +####################################################################################### +# docker build: Configuration parameters for building the Docker image +DOCKER_PROJECT_NAME=${APP_NAME:-"course-template"} # The base name of the Docker image. +DOCKER_PROJECT_ID=${DOCKER_PROJECT_ID:-"default"} # The project ID associated with the Docker image to support multiple projects +DOCKER_USERNAME=${DOCKER_USERNAME:-"entelecheia"} # The username for Docker. +IMAGE_VERSION=${APP_VERSION:-"1.0.0"} # The version of the Docker image. If not set, "1.0.0" will be used. +CONTAINER_REGISTRY=${CONTAINER_REGISTRY:-"ghcr.io"} # The Docker registry to push the image to. +CONTAINER_USERNAME=${USERNAME:-"dev"} # The username of the user. If not set, the current user's username will be used. +CONTAINER_USER_UID=${USER_UID:-"9001"} # The user ID in the Docker container. +CONTAINER_USER_GID=${USER_GID:-"9001"} # The group ID in the Docker container. +CONTAINER_WORKSPACE_LOCATION=${CONTAINER_WORKSPACE_LOCATION:-"/"} # The location of the workspace directory in the Docker container +CONTAINER_WORKSPACE_ROOT=${CONTAINER_WORKSPACE_ROOT:-"/workspace"} # The workspace directory in the Docker container +CONTAINER_HOME="/home/${CONTAINER_USERNAME}" # The home directory in the Docker container +CONTAINER_CACHE_DIR="${CONTAINER_HOME}/.cache" # The cache directory in the Docker container container +CONTAINER_HF_HOME="${CONTAINER_HOME}/.cache/huggingface" # HuggingFace models cache directory +CONTAINER_SSH_DIR="${CONTAINER_HOME}/.ssh" # The SSH directory in the Docker container +CONTAINER_GH_CONFIG_DIR="${CONTAINER_HOME}/.config/gh" # The GitHub CLI configuration directory in the Docker container +CONTAINER_PASSAGE_DIR="${CONTAINER_HOME}/.passage" # The Passage directory in the Docker container + +# docker run: Configuration parameters for running the Docker container +CONTAINER_HOSTNAME="${DOCKER_PROJECT_NAME}-${DOCKER_PROJECT_ID}" # The hostname of the Docker container +CONTAINER_NETWORK_NAME="${DOCKER_PROJECT_NAME}-network" # The name of the Docker network +CONTAINER_IPC=${CONTAINER_IPC:-"host"} # The IPC mode for the Docker container +CONTAINER_RUN_COMMAND="zsh" # The command to run in the Docker container diff --git a/.docker/docker.version b/.docker/docker.version new file mode 100644 index 0000000..2eb0515 --- /dev/null +++ b/.docker/docker.version @@ -0,0 +1 @@ +APP_VERSION="0.0.0" # The version of the project diff --git a/.docker/scripts/init-dotfiles.sh b/.docker/scripts/init-dotfiles.sh new file mode 100644 index 0000000..6327a98 --- /dev/null +++ b/.docker/scripts/init-dotfiles.sh @@ -0,0 +1,3 @@ +#!/bin/bash +# this script should be executed only once when the docker container is created +dotu init diff --git a/.docker/scripts/launch.sh b/.docker/scripts/launch.sh new file mode 100644 index 0000000..346cf43 --- /dev/null +++ b/.docker/scripts/launch.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# add your custom commands here that should be executed every time the docker container starts +echo "Starting docker container..." + + + +### Set the $PUBLIC_KEY env var to enable SSH access. +# It is useful to have the full SSH server e.g. on Runpod. +# (use SCP to copy files to/from the image, etc) +if [[ -n "$SSH_PUB_KEY" ]] && [[ ! -d "${HOME}/.ssh" ]]; then + mkdir -p "${HOME}/.ssh" + echo "${SSH_PUB_KEY}" > "${HOME}/.ssh/authorized_keys" + chmod -R 700 "${HOME}/.ssh" +fi +sudo service ssh start + +# start jupyter notebook in background and redirect output to logfile +# change working directory to workspace root +# set token to value of JUPYTER_TOKEN +# set port to value of JUPYTER_DOCKER_PORT +if [[ -z "$(command -v jupyter)" ]]; then + echo "Jupyter not installed. Exiting..." + exit 1 +fi +jupyter lab \ + --no-browser \ + --notebook-dir="$WORKSPACE_ROOT" \ + --ServerApp.token="$JUPYTER_TOKEN" \ + --port="$JUPYTER_PORT" \ + --ip=0.0.0.0 \ + --allow-root diff --git a/.docker/scripts/requirements-base.txt b/.docker/scripts/requirements-base.txt new file mode 100644 index 0000000..c9356a7 --- /dev/null +++ b/.docker/scripts/requirements-base.txt @@ -0,0 +1 @@ +jupyterlab diff --git a/.docker/scripts/sshd_config b/.docker/scripts/sshd_config new file mode 100644 index 0000000..896e18c --- /dev/null +++ b/.docker/scripts/sshd_config @@ -0,0 +1,121 @@ +# This is the sshd server system-wide configuration file. See +# sshd_config(5) for more information. + +# This sshd was compiled with PATH=/usr/bin:/bin:/usr/sbin:/sbin + +# The strategy used for options in the default sshd_config shipped with +# OpenSSH is to specify options with their default value where +# possible, but leave them commented. Uncommented options override the +# default value. + +Include /etc/ssh/sshd_config.d/*.conf + +#Port 22 +#AddressFamily any +#ListenAddress 0.0.0.0 +#ListenAddress :: + +#HostKey /etc/ssh/ssh_host_rsa_key +#HostKey /etc/ssh/ssh_host_ecdsa_key +#HostKey /etc/ssh/ssh_host_ed25519_key + +# Ciphers and keying +#RekeyLimit default none + +# Logging +#SyslogFacility AUTH +#LogLevel INFO + +# Authentication: + +#LoginGraceTime 2m +#PermitRootLogin prohibit-password +#StrictModes yes +#MaxAuthTries 6 +#MaxSessions 10 + +PubkeyAuthentication yes + +# Expect .ssh/authorized_keys2 to be disregarded by default in future. +#AuthorizedKeysFile .ssh/authorized_keys .ssh/authorized_keys2 + +#AuthorizedPrincipalsFile none + +#AuthorizedKeysCommand none +#AuthorizedKeysCommandUser nobody + +# For this to work you will also need host keys in /etc/ssh/ssh_known_hosts +#HostbasedAuthentication no +# Change to yes if you don't trust ~/.ssh/known_hosts for +# HostbasedAuthentication +#IgnoreUserKnownHosts no +# Don't read the user's ~/.rhosts and ~/.shosts files +#IgnoreRhosts yes + +# To disable tunneled clear text passwords, change to no here! +PasswordAuthentication no +#PermitEmptyPasswords no + +# Change to yes to enable challenge-response passwords (beware issues with +# some PAM modules and threads) +ChallengeResponseAuthentication no + +# Kerberos options +#KerberosAuthentication no +#KerberosOrLocalPasswd yes +#KerberosTicketCleanup yes +#KerberosGetAFSToken no + +# GSSAPI options +#GSSAPIAuthentication no +#GSSAPICleanupCredentials yes +#GSSAPIStrictAcceptorCheck yes +#GSSAPIKeyExchange no + +# Set this to 'yes' to enable PAM authentication, account processing, +# and session processing. If this is enabled, PAM authentication will +# be allowed through the ChallengeResponseAuthentication and +# PasswordAuthentication. Depending on your PAM configuration, +# PAM authentication via ChallengeResponseAuthentication may bypass +# the setting of "PermitRootLogin without-password". +# If you just want the PAM account and session checks to run without +# PAM authentication, then enable this but set PasswordAuthentication +# and ChallengeResponseAuthentication to 'no'. +UsePAM yes + +#AllowAgentForwarding yes +#AllowTcpForwarding yes +#GatewayPorts no +X11Forwarding yes +#X11DisplayOffset 10 +#X11UseLocalhost yes +#PermitTTY yes +PrintMotd no +#PrintLastLog yes +#TCPKeepAlive yes +#PermitUserEnvironment no +#Compression delayed +#ClientAliveInterval 0 +#ClientAliveCountMax 3 +#UseDNS no +#PidFile /var/run/sshd.pid +#MaxStartups 10:30:100 +#PermitTunnel no +#ChrootDirectory none +#VersionAddendum none + +# no default banner path +#Banner none + +# Allow client to pass locale environment variables +AcceptEnv LANG LC_* + +# override default of no subsystems +Subsystem sftp /usr/lib/openssh/sftp-server + +# Example of overriding settings on a per-user basis +#Match User anoncvs +# X11Forwarding no +# AllowTcpForwarding no +# PermitTTY no +# ForceCommand cvs server diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..04a3f23 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,24 @@ +# Check http://editorconfig.org for more information + +# top-most .editorconfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true +charset = utf-8 +trim_trailing_whitespace = true +indent_style = space + +[*.py] +profile = black + +[Makefile] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +[*.{yaml,yml}] +indent_size = 2 diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..6313b56 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +* text=auto eol=lf diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml new file mode 100644 index 0000000..f86e92b --- /dev/null +++ b/.github/dependabot.yaml @@ -0,0 +1,25 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" + time: "03:00" + timezone: "Asia/Seoul" + labels: + - ":game_die: dependencies" + - ":robot: bot" + - package-ecosystem: "docker" + directory: "/.docker" + schedule: + interval: "daily" + time: "03:00" + timezone: "Asia/Seoul" + labels: + - ":game_die: dependencies" + - ":robot: bot" diff --git a/.github/labels.yaml b/.github/labels.yaml new file mode 100644 index 0000000..f7f83aa --- /dev/null +++ b/.github/labels.yaml @@ -0,0 +1,66 @@ +--- +# Labels names are important as they are used by Release Drafter to decide +# regarding where to record them in changelog or if to skip them. +# +# The repository labels will be automatically configured using this file and +# the GitHub Action https://github.com/marketplace/actions/github-labeler. +- name: breaking + description: Breaking Changes + color: bfd4f2 +- name: bug + description: Something isn't working + color: d73a4a +- name: build + description: Build System and Dependencies + color: bfdadc +- name: ci + description: Continuous Integration + color: 4a97d6 +- name: dependencies + description: Pull requests that update a dependency file + color: 0366d6 +- name: documentation + description: Improvements or additions to documentation + color: 0075ca +- name: duplicate + description: This issue or pull request already exists + color: cfd3d7 +- name: enhancement + description: New feature or request + color: a2eeef +- name: github_actions + description: Pull requests that update Github_actions code + color: "000000" +- name: good first issue + description: Good for newcomers + color: 7057ff +- name: help wanted + description: Extra attention is needed + color: 008672 +- name: invalid + description: This doesn't seem right + color: e4e669 +- name: performance + description: Performance + color: "016175" +- name: python + description: Pull requests that update Python code + color: 2b67c6 +- name: question + description: Further information is requested + color: d876e3 +- name: refactoring + description: Refactoring + color: ef67c4 +- name: removal + description: Removals and Deprecations + color: 9ae7ea +- name: style + description: Style + color: c120e5 +- name: testing + description: Testing + color: b1fc6f +- name: wontfix + description: This will not be worked on + color: ffffff diff --git a/.github/release.yaml b/.github/release.yaml new file mode 100644 index 0000000..bdf1094 --- /dev/null +++ b/.github/release.yaml @@ -0,0 +1,20 @@ +--- +changelog: + exclude: + labels: + - ignore-for-release + authors: + - octocat + categories: + - title: Breaking Changes 🛠 + labels: + - breaking + - title: Exciting New Features 🎉 + labels: + - feature + - title: Fixes 🔧 + labels: + - fix + - title: Other Changes + labels: + - "*" diff --git a/.github/scripts/autotoc.py b/.github/scripts/autotoc.py new file mode 100644 index 0000000..6280100 --- /dev/null +++ b/.github/scripts/autotoc.py @@ -0,0 +1,91 @@ +from pathlib import Path +from typing import Optional + +BOOK_DIR = Path("book") +PROJECTS_DIR = BOOK_DIR / "projects" +toc_file = BOOK_DIR / "_toc.yml" + +toc_contents = """ +# Table of contents +# Learn more at https://jupyterbook.org/customize/toc.html + +format: jb-book +root: index +chapters: + - file: syllabus/index +""" + + +def generate_file_entry( + root: Path, + file: Path, + num_indent_space: int = 2, +): + """ + Generates a file entry for a given file. + + Args: + root (Path): The root directory of the directory structure. + file (Path): The file to generate the entry for. + num_indent_space (int, optional): The number of spaces to use for indentation. Defaults to 2. + + Returns: + str: The updated contents of the TOC. + """ + indent = " " * num_indent_space + rel_path = file.relative_to(root) + return f"{indent}- file: {rel_path.parent}/{rel_path.stem}\n" + + +def generate_toc( + toc_contents: str, + root: Path, + section_dir: Optional[Path] = None, + num_indent_space: int = 2, +): + """ + Generates a table of contents (TOC) for a given directory structure. + + Args: + toc_contents (str): The current contents of the TOC. + root (Path): The root directory of the directory structure. + section_dir (Optional[Path], optional): The directory to generate the TOC for. Defaults to None. + num_indent_space (int, optional): The number of spaces to use for indentation. Defaults to 2. + + Returns: + str: The updated contents of the TOC. + """ + section_dir = section_dir or root + _files, _dirs = [], [] + index_path = "" + for path_ in sorted(section_dir.iterdir()): + if path_.is_dir(): + _dirs.append(path_) + elif path_.name == "index.md": + index_path = path_ + elif path_.suffix in [".md", ".ipynb"]: + _files.append(path_) + if index_path: + toc_contents += generate_file_entry(root, index_path, num_indent_space) + + if len(_files + _dirs) == 0: + return toc_contents + + num_indent_space += 2 + indent = " " * num_indent_space + toc_contents += f"{indent}sections:\n" + for path_ in _files: + toc_contents += generate_file_entry(root, path_, num_indent_space + 2) + + for path_ in _dirs: + toc_contents = generate_toc(toc_contents, root, path_, num_indent_space + 2) + + return toc_contents + + +toc_contents = generate_toc(toc_contents, BOOK_DIR, PROJECTS_DIR) +print(toc_contents) + + +with open(toc_file, "w") as f: + f.write(toc_contents) diff --git a/.github/scripts/free-disk-space.sh b/.github/scripts/free-disk-space.sh new file mode 100644 index 0000000..85cbd99 --- /dev/null +++ b/.github/scripts/free-disk-space.sh @@ -0,0 +1,49 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# +# The Azure provided machines typically have the following disk allocation: +# Total space: 85GB +# Allocated: 67 GB +# Free: 17 GB +# This script frees up 28 GB of disk space by deleting unneeded packages and +# large directories. +# The Flink end to end tests download and generate more than 17 GB of files, +# causing unpredictable behavior and build failures. +# +echo "==============================================================================" +echo "Freeing up disk space on CI system" +echo "==============================================================================" + +echo "Listing 100 largest packages" +dpkg-query -Wf '${Installed-Size}\t${Package}\n' | sort -n | tail -n 100 +df -h +echo "Removing large packages" +sudo apt-get remove -y '^ghc-8.*' +sudo apt-get remove -y '^dotnet-.*' +sudo apt-get remove -y '^llvm-.*' +sudo apt-get remove -y 'php.*' +sudo apt-get remove -y azure-cli google-cloud-sdk hhvm google-chrome-stable firefox powershell mono-devel +sudo apt-get autoremove -y +sudo apt-get clean +df -h +echo "Removing large directories" +# deleting 15GB +rm -rf /usr/share/dotnet/ +df -h +# delete the cache directory +rm -rf /opt/hostedtoolcache +df -h diff --git a/.github/workflows/deploy-app-image.yaml b/.github/workflows/deploy-app-image.yaml new file mode 100644 index 0000000..4283741 --- /dev/null +++ b/.github/workflows/deploy-app-image.yaml @@ -0,0 +1,88 @@ +# +name: deploy-app-image + +# Configures this workflow to run every time a change is pushed to the branch called `release`. +on: + workflow_call: + workflow_dispatch: + workflow_run: + workflows: ["deploy-base-image"] + types: + - completed + +# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. +env: + REGISTRY: ghcr.io + +# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu. +jobs: + build-and-push-image: + # check if the workflow result is success + if: ${{ github.event.workflow_run.conclusion == 'success' }} || ${{ github.event_name == 'workflow_dispatch' }} + runs-on: ubuntu-latest + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + permissions: + contents: read + packages: write + # + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + ref: docker + + - name: Secure disk space for the build + run: bash .github/scripts/free-disk-space.sh + + - name: Version from version file + uses: c-py/action-dotenv-to-setenv@v5 + with: + env-file: ./.docker/docker.version + + - name: Common environment Variables from Dotenv + uses: c-py/action-dotenv-to-setenv@v5 + with: + # use branch name as suffix for dotfile + env-file: ./.docker/docker.common.env + + - name: Environment Variables from Dotenv + uses: c-py/action-dotenv-to-setenv@v5 + with: + env-file: ./.docker/docker.app.env + + # Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here. + - name: Log in to the Container registry + uses: docker/login-action@v3.0.0 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5.5.1 + with: + images: ${{ env.IMAGE_NAME }} + tags: | + type=raw,value=${{ env.IMAGE_VERSION }}-${{ env.IMAGE_VARIANT }} + # set latest tag for docker branch + type=raw,value=latest + + # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. + # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. + # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + - name: Build and push Docker image + uses: docker/build-push-action@v5.1.0 + with: + context: . + file: ./.docker/Dockerfile.app + build-args: | + ARG_BUILD_FROM=${{ env.BUILD_FROM }} + ARG_USERNAME=${{ env.CONTAINER_USERNAME }} + ARG_USER_UID=${{ env.CONTAINER_USER_UID }} + ARG_USER_GID=${{ env.CONTAINER_USER_GID }} + ARG_WORKSPACE_ROOT=${{ env.CONTAINER_WORKSPACE_ROOT }} + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/deploy-base-image.yaml b/.github/workflows/deploy-base-image.yaml new file mode 100644 index 0000000..29507a9 --- /dev/null +++ b/.github/workflows/deploy-base-image.yaml @@ -0,0 +1,86 @@ +# +name: deploy-base-image + +# Configures this workflow to run every time a change is pushed to the branch called `release`. +on: + workflow_call: + workflow_dispatch: + push: + branches: + - docker* + paths: + - ".docker/**" + +# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. +env: + REGISTRY: ghcr.io + +# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu. +jobs: + build-and-push-image: + runs-on: ubuntu-latest + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + permissions: + contents: read + packages: write + # + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Secure disk space for the build + run: bash .github/scripts/free-disk-space.sh + + - name: Version from version file + uses: c-py/action-dotenv-to-setenv@v5 + with: + env-file: ./.docker/docker.version + + - name: Common environment Variables from Dotenv + uses: c-py/action-dotenv-to-setenv@v5 + with: + # use branch name as suffix for dotfile + env-file: ./.docker/docker.common.env + + - name: Environment Variables from Dotenv + uses: c-py/action-dotenv-to-setenv@v5 + with: + # use branch name as suffix for dotfile + env-file: ./.docker/docker.base.env + + # Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here. + - name: Log in to the Container registry + uses: docker/login-action@v3.0.0 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5.5.1 + with: + images: ${{ env.IMAGE_NAME }} + tags: | + type=raw,value=${{ env.IMAGE_VERSION }}-${{ env.BASE_VARIANT }} + # set latest tag for docker branch + type=raw,value=latest-${{ env.BASE_VARIANT }} + + # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. + # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. + # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + - name: Build and push Docker image + uses: docker/build-push-action@v5.0.0 + with: + context: . + file: ./.docker/Dockerfile.base + build-args: | + ARG_BUILD_FROM=${{ env.BUILD_FROM }} + ARG_USERNAME=${{ env.CONTAINER_USERNAME }} + ARG_USER_UID=${{ env.CONTAINER_USER_UID }} + ARG_USER_GID=${{ env.CONTAINER_USER_GID }} + ARG_WORKSPACE_ROOT=${{ env.CONTAINER_WORKSPACE_ROOT }} + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/deploy-docs.yaml b/.github/workflows/deploy-docs.yaml new file mode 100644 index 0000000..ec230bc --- /dev/null +++ b/.github/workflows/deploy-docs.yaml @@ -0,0 +1,43 @@ +--- +name: deploy-docs +on: + workflow_call: + workflow_dispatch: + push: + branches: + - main + # Paths can be used to only trigger actions when you have edited certain files, such as a file within the /docs directory + paths: + - "book/**" + - ".github/workflows/deploy-docs.yaml" + +permissions: + contents: write + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: 3.9 + + # install the source code + - name: Install the source code + run: pip install . + + # Build the book + - name: Build the book + run: | + pip install -r book/requirements.txt + jupyter-book build book + + # Deploy the book's HTML to gh-pages branch + - name: GitHub Pages action + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: book/_build/html + cname: diff --git a/.github/workflows/lint_and_test.yaml b/.github/workflows/lint_and_test.yaml new file mode 100644 index 0000000..3a40b90 --- /dev/null +++ b/.github/workflows/lint_and_test.yaml @@ -0,0 +1,78 @@ +--- +name: lint & test + +on: + workflow_call: + workflow_dispatch: + push: + branches: + - "*" + paths: + - "src/**" + - "tests/**" + - '.copier-template/**' + +jobs: + build: + # Name the Job + name: test-code-base + # Set the agent to run on + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + python-version: [3.8] + + steps: + # Checkout the code base # + - name: Checkout Code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # install poetry + - name: Install poetry + run: pipx install poetry>=1.3.2 + + # set up python + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: "poetry" + + # initialize + - name: Initialize + run: make initialize + + # install dependencies + - name: Install dependencies for linters + run: poe install-dev + + # run linters + - name: Run linters + run: | + set -o pipefail + poe lint + + # run unit tests + - name: Run unit-tests + run: | + set -o pipefail + poe tests-cov-fail + + # add content to GitHub summary + - name: Pytest coverage GitHub summary + run: | + set -o pipefail + echo '# Coverage report' >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat tests/pytest-coverage.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + echo '\n\n\n' + + - name: Upload coverage reports to Codecov + uses: codecov/codecov-action@v3 + with: + fail_ci_if_error: false # optional (default = false) + verbose: true # optional (default = false) diff --git a/.github/workflows/prerelease-to-test-pypi.yaml b/.github/workflows/prerelease-to-test-pypi.yaml new file mode 100644 index 0000000..0eb72c4 --- /dev/null +++ b/.github/workflows/prerelease-to-test-pypi.yaml @@ -0,0 +1,46 @@ +--- +name: semantic-prerelease-to-test-pypi + +on: + workflow_call: + workflow_dispatch: + +permissions: + contents: write + issues: write + pull-requests: write + +jobs: + build: + # Name the Job + name: prerelease + # Set the agent to run on + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + python-version: [3.8] + + # Load all steps # + steps: + # Checkout the code base # + - name: Checkout Code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # set env variable + - name: Set env variable + run: echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + + # semantic release + - name: Python Semantic Release to Test PyPI + uses: relekang/python-semantic-release@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + repository_username: __token__ + repository_password: ${{ secrets.TEST_PYPI_API_TOKEN }} + additional_options: --prerelease --verbosity=DEBUG --define=branch=${{ env.BRANCH }} + env: + REPOSITORY_URL: https://test.pypi.org/legacy/ + diff --git a/.github/workflows/prerelease.yaml b/.github/workflows/prerelease.yaml new file mode 100644 index 0000000..7fa4634 --- /dev/null +++ b/.github/workflows/prerelease.yaml @@ -0,0 +1,53 @@ +--- +name: semantic-prerelease + +on: + workflow_call: + workflow_dispatch: + push: + branches: + - pre* + - beta* + - alpha* + - rc* + - nigtly* + paths: + - 'src/**' + - 'pyproject.toml' + +permissions: + contents: write + issues: write + pull-requests: write + +jobs: + build: + # Name the Job + name: prerelease + # Set the agent to run on + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + python-version: [3.8] + + # Load all steps # + steps: + # Checkout the code base # + - name: Checkout Code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # set env variable + - name: Set env variable + run: echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + + # semantic release + - name: Python Semantic Release to PyPI + uses: relekang/python-semantic-release@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + repository_username: __token__ + repository_password: ${{ secrets.PYPI_API_TOKEN }} + additional_options: --prerelease --verbosity=DEBUG --define=branch=${{ env.BRANCH }} diff --git a/.github/workflows/release-patch.yaml b/.github/workflows/release-patch.yaml new file mode 100644 index 0000000..bc8ee1d --- /dev/null +++ b/.github/workflows/release-patch.yaml @@ -0,0 +1,51 @@ +--- +name: semantic-release-patch + +on: + workflow_call: + workflow_dispatch: + push: + branches: + - patch* + - hotfix* + paths: + - 'src/**' + - 'pyproject.toml' + +permissions: + contents: write + issues: write + pull-requests: write + +jobs: + build: + # Name the Job + name: semantic-release-patch + # Set the agent to run on + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + python-version: [3.8] + + steps: + # Checkout the code base # + - name: Checkout Code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # set env variable + - name: Set env variable + run: echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + + # semantic release + - name: Python Semantic Release to PyPI + uses: relekang/python-semantic-release@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + repository_username: __token__ + repository_password: ${{ secrets.PYPI_API_TOKEN }} + additional_options: --patch --verbosity=DEBUG --define=branch=${{ env.BRANCH }} + env: + REPOSITORY_URL: https://test.pypi.org/legacy/ diff --git a/.github/workflows/release-test.yaml b/.github/workflows/release-test.yaml new file mode 100644 index 0000000..4af7e13 --- /dev/null +++ b/.github/workflows/release-test.yaml @@ -0,0 +1,44 @@ +--- +name: semantic-release-test + +on: + workflow_call: + workflow_dispatch: + +permissions: + contents: write + issues: write + pull-requests: write + +jobs: + build: + # Name the Job + name: semantic-release-test + # Set the agent to run on + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + python-version: [3.8] + + steps: + # Checkout the code base # + - name: Checkout Code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # set env variable + - name: Set env variable + run: echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + + # semantic release + - name: Python Semantic Release to PyPI + uses: relekang/python-semantic-release@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + repository_username: __token__ + repository_password: ${{ secrets.PYPI_API_TOKEN }} + additional_options: --noop --verbosity=DEBUG --define=branch=${{ env.BRANCH }} + env: + REPOSITORY_URL: https://test.pypi.org/legacy/ diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000..8b6e95e --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,55 @@ +--- +name: semantic-release + +on: + workflow_call: + workflow_dispatch: + push: + branches: + - release* + paths: + - ".docker/**" + +permissions: + contents: write + issues: write + pull-requests: write + +jobs: + build: + # Name the Job + name: semantic-release + # Set the agent to run on + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + python-version: [3.8] + + # Load all steps # + steps: + # Checkout the code base # + - name: Checkout Code + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + # set env variable + - name: Set env variable + run: echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + + # install poetry + - name: Install poetry + run: pipx install poetry>=1.3.2 + + # install dependencies + - name: Install dependencies + run: poetry install --with dev + + - name: Python Semantic Release to PyPI + run: | + git config --global user.name "GitHub Actions" + git config --global user.email "actions@github.com" + poetry run semantic-release publish --verbosity=DEBUG --define=branch=${{ env.BRANCH }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2c4098a --- /dev/null +++ b/.gitignore @@ -0,0 +1,40 @@ +# environment files +.env.secret + +# project specific files +workspace/ + +# hypothesis: https://github.com/HypothesisWorks/hypothesis +.hypothesis/ +.vscode/ + +# Ignore node_modules directory +node_modules/ + +# Jupyter Notebook +_build/ +_autosummary/ + +# MkDocs +site/ + +# Python +*.py[cod] +__pycache__/ +dist/ +.tox/ +*.egg-info/ +*.egg +pytest_cache/ +pytest.xml +.mypy_cache/ + +# Coverage +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +htmlcov/ +pytest-coverage.* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..7ed3ccb --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,55 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +default_language_version: + python: python3 +default_install_hook_types: [commit-msg, pre-commit] +default_stages: [commit, manual] +fail_fast: true +minimum_pre_commit_version: "1.4.3" +repos: + - repo: meta + hooks: + - id: check-useless-excludes + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.10.0 + hooks: + - id: python-check-blanket-noqa + - id: python-check-blanket-type-ignore + - id: python-check-mock-methods + - id: python-no-eval + - id: python-no-log-warn + - id: text-unicode-replacement-char + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + types: [python] + - id: end-of-file-fixer + types: [python] + - id: check-yaml + - id: check-json + - id: check-added-large-files + # Prevent committing inline conflict markers + - id: check-merge-conflict + args: [--assume-in-merge] + - repo: local + hooks: + - id: commitizen + name: commitizen check + entry: cz check + args: [--allow-abort, --commit-msg-file] + stages: [commit-msg] + language: system + require_serial: true + - id: black + name: black + entry: black + require_serial: true + language: system + types: [python] + # Prevent committing .rej files + - id: forbidden-files + name: forbidden files + entry: found Copier update rejection files; review them and remove them + language: fail + files: "\\.rej$" \ No newline at end of file diff --git a/.tasks-extra.toml b/.tasks-extra.toml new file mode 100644 index 0000000..acd957f --- /dev/null +++ b/.tasks-extra.toml @@ -0,0 +1,7 @@ +[tool.poe.tasks.hello] +cmd = "echo 'Hello World'" +help = "Put your extra tasks here" + +[tool.poe.tasks.generate_toc] +cmd = "python3 .github/scripts/autotoc.py" +help = "Generate the table of contents" diff --git a/.tasks.toml b/.tasks.toml new file mode 100644 index 0000000..ddfe5a1 --- /dev/null +++ b/.tasks.toml @@ -0,0 +1,116 @@ +[tool.poe.tasks.format-black] +cmd = "black ." +help = "format code with black" + +[tool.poe.tasks.format-isort] +cmd = "isort ." +help = "sort imports with isort" + +[tool.poe.tasks.format] +sequence = ["format-black", "format-isort"] +help = "format code with black and isort" + +[tool.poe.tasks.lint-black] +cmd = "black --check --diff ." +help = "check code formatting with black" + +[tool.poe.tasks.lint-flake8] +cmd = "flake8 ." +help = "check code style with flake8" + +[tool.poe.tasks.lint-isort] +cmd = "isort --check-only --diff ." +help = "check import sorting with isort" + +[tool.poe.tasks.lint-mypy] +cmd = "mypy --config-file project.toml ." +help = "check types with mypy" + +[tool.poe.tasks.lint-mypy-reports] +cmd = "mypy --config-file project.toml . --html-report ./tests/mypy-report" +help = "generate an HTML report of the type (mypy) checker" + +[tool.poe.tasks.lint] +sequence = ["lint-black", "lint-flake8", "lint-isort"] +help = "check code style with flake8, black, and isort" + +[tool.poe.tasks.tests] +cmd = "pytest --doctest-modules" +help = "run tests with pytest" + +[tool.poe.tasks.tests-cov] +cmd = "pytest --cov=src --cov-report=xml" +help = "run tests with pytest and generate a coverage report" + +[tool.poe.tasks.tests-cov-fail] +shell = "pytest --cov=src --cov-report=xml --cov-fail-under=50 --junitxml=tests/pytest.xml | tee tests/pytest-coverage.txt" +help = "run tests with pytest and generate a coverage report, fail if coverage is below 80%" + +[tool.poe.tasks.clean-cov] +cmd = "rm -rf .coverage* tests/htmlcov tests/pytest.xml tests/pytest-coverage.txt" +help = "remove coverage reports" + +[tool.poe.tasks.clean-pycache] +cmd = "find . -type d -name __pycache__ -exec rm -rf {} +" +help = "remove __pycache__ directories" + +[tool.poe.tasks.clean-build] +cmd = "rm -rf build dist *.egg-info" +help = "remove build/python artifacts" + +[tool.poe.tasks.clean-docs] +cmd = "rm -rf book/_build docs/_build _site" +help = "remove documentation artifacts" + +[tool.poe.tasks.clean] +sequence = ["clean-cov", "clean-pycache", "clean-build", "clean-docs"] +help = "remove build artifacts and coverage reports" + +[tool.poe.tasks.install-ghp-import] +cmd = "pipx install ghp-import" +help = "install ghp-import" + +[tool.poe.tasks.install-jupyter-book-pipx] +shell = """ + pipx install jupyter-book + pipx inject jupyter-book $(awk '{if(!/^ *#/ && NF) print}' book/requirements.txt) +""" +help = "install jupyter-book with pipx" + +[tool.poe.tasks.install-jupyter-book] +shell = """ + pip install -r book/requirements.txt +""" +help = "install jupyter-book" + +[tool.poe.tasks.book-build] +cmd = "jupyter-book build book" +help = "build the book" + +[tool.poe.tasks.book-build-all] +cmd = "jupyter-book build book --all" +help = "build the book with all outputs" + +[tool.poe.tasks.book-publish] +cmd = "ghp-import -n -p -f book/_build/html" +help = "publish the book" + +[tool.poe.tasks.install] +cmd = "poetry install --without dev" +help = "install dependencies" + +[tool.poe.tasks.install-dev] +cmd = "poetry install --with dev" +help = "install dev dependencies" + +[tool.poe.tasks.update] +cmd = "poetry update" +help = "update dependencies" + +[tool.poe.tasks.lock] +cmd = "poetry lock" +help = "lock dependencies" + +[tool.poe.tasks.codecov-validate] +cmd = "curl -X POST --data-binary @codecov.yml https://codecov.io/validate" +help = "Validate codecov.yml" diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..61f3ce4 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1 @@ + diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..f7e1c06 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,132 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, caste, color, religion, or sexual +identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +- Focusing on what is best not just for us as individuals, but for the overall + community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or advances of + any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, + without their explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +[entelecheia@hotmail.com](mailto:entelecheia@hotmail.com). +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of +actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or permanent +ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the +community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.1, available at +[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. + +Community Impact Guidelines were inspired by +[Mozilla's code of conduct enforcement ladder][mozilla coc]. + +For answers to common questions about this code of conduct, see the FAQ at +[https://www.contributor-covenant.org/faq][faq]. Translations are available at +[https://www.contributor-covenant.org/translations][translations]. + +[homepage]: https://www.contributor-covenant.org +[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html +[mozilla coc]: https://github.com/mozilla/diversity +[faq]: https://www.contributor-covenant.org/faq +[translations]: https://www.contributor-covenant.org/translations diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..81d6105 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,55 @@ +# Contributing + +Contributions are welcome, and they are greatly appreciated! Every little bit +helps, and credit will always be given. You can contribute in the ways listed below. + +## Report Bugs + +Report bugs using GitHub issues. + +If you are reporting a bug, please include: + +* Your operating system name and version. +* Any details about your local setup that might be helpful in troubleshooting. +* Detailed steps to reproduce the bug. + +## Fix Bugs + +Look through the GitHub issues for bugs. Anything tagged with "bug" and "help +wanted" is open to whoever wants to implement it. + +## Implement Features + +Look through the GitHub issues for features. Anything tagged with "enhancement" +and "help wanted" is open to whoever wants to implement it. + +## Write Documentation + +Course Template could always use more documentation, whether as part of the +official Course Template docs, in docstrings, or even on the web in blog posts, +articles, and such. + +## Submit Feedback + +The best way to send feedback is to file an issue on GitHub. + +If you are proposing a feature: + +* Explain in detail how it would work. +* Keep the scope as narrow as possible, to make it easier to implement. +* Remember that this is a volunteer-driven project, and that contributions + are welcome :) + +## Get Started + +Ready to contribute? Here's how to set up `Course Template` for local development. + +1. Fork the repo on GitHub. +2. Clone your fork locally. +3. Create a branch for local development and make changes locally. +4. Commit your changes and push your branch to GitHub. +5. Submit a pull request through the GitHub website. + +## Code of Conduct + +Please note that the Course Template project is released with a [Contributor Code of Conduct](CODE_OF_CONDUCT.md). By contributing to this project you agree to abide by its terms. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..4ea99c2 --- /dev/null +++ b/LICENSE @@ -0,0 +1,395 @@ +Attribution 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution 4.0 International Public License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution 4.0 International Public License ("Public License"). To the +extent this Public License may be interpreted as a contract, You are +granted the Licensed Rights in consideration of Your acceptance of +these terms and conditions, and the Licensor grants You such rights in +consideration of benefits the Licensor receives from making the +Licensed Material available under these terms and conditions. + + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + + d. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + e. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + f. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + g. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + h. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + i. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + j. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + k. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part; and + + b. produce, reproduce, and Share Adapted Material. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties. + + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's + License You apply must not prevent recipients of the Adapted + Material from complying with this Public License. + + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..158ed3c --- /dev/null +++ b/Makefile @@ -0,0 +1,148 @@ +# To do stuff with make, you type `make` in a directory that has a file called +# "Makefile". You can also type `make -f ` to use a different filename. +# +# A Makefile is a collection of rules. Each rule is a recipe to do a specific +# thing, sort of like a grunt task or an npm package.json script. +# +# A rule looks like this: +# +# : +# +# +# The "target" is required. The prerequisites are optional, and the commands +# are also optional, but you have to have one or the other. +# +# Type `make` to show the available targets and a description of each. +# +.DEFAULT_GOAL := help +.PHONY: help +help: ## Display this help + @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-25s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) + + +##@ Clean-up + +clean: ## run all clean commands + @poe clean + +##@ Utilities + +large-files: ## show the 20 largest files in the repo + @find . -printf '%s %p\n'| sort -nr | head -20 + +disk-usage: ## show the disk usage of the repo + @du -h -d 2 . + +git-sizer: ## run git-sizer + @git-sizer --verbose + +gc-prune: ## garbage collect and prune + @git gc --prune=now + +##@ Setup + +install-node: ## install node + @export NVM_DIR="$${HOME}/.nvm"; \ + [ -s "$${NVM_DIR}/nvm.sh" ] && . "$${NVM_DIR}/nvm.sh"; \ + nvm install --lts + +nvm-ls: ## list node versions + @export NVM_DIR="$${HOME}/.nvm"; \ + [ -s "$${NVM_DIR}/nvm.sh" ] && . "$${NVM_DIR}/nvm.sh"; \ + nvm ls + +set-default-node: ## set default node + @export NVM_DIR="$${HOME}/.nvm"; \ + [ -s "$${NVM_DIR}/nvm.sh" ] && . "$${NVM_DIR}/nvm.sh"; \ + nvm alias default node + +install-pipx: ## install pipx (pre-requisite for external tools) + @command -v pipx &> /dev/null || pip install --user pipx || true + +install-copier: install-pipx ## install copier (pre-requisite for init-project) + @command -v copier &> /dev/null || pipx install copier || true + +install-poetry: install-pipx ## install poetry (pre-requisite for install) + @command -v poetry &> /dev/null || pipx install poetry || true + +install-poe: install-pipx ## install poetry (pre-requisite for install) + @command -v poe &> /dev/null || pipx install poethepoet || true + +install-commitzen: install-pipx ## install commitzen (pre-requisite for commit) + @command -v cz &> /dev/null || pipx install commitizen || true + +install-precommit: install-pipx ## install pre-commit + @command -v pre-commit &> /dev/null || pipx install pre-commit || true + +install-precommit-hooks: install-precommit ## install pre-commit hooks + @pre-commit install + +mkvirtualenv: ## create the project environment + @python3 -m venv "$$WORKON_HOME/deepnlp-2023" + @. "$$WORKON_HOME/deepnlp-2023/bin/activate" + @pip install --upgrade pip setuptools wheel + +mkvirtualenv-system: ## create the project environment with system site packages + @python3 -m venv "$$WORKON_HOME/deepnlp-2023" --system-site-packages + @. "$$WORKON_HOME/deepnlp-2023/bin/activate" + @pip install --upgrade pip setuptools wheel + +workon: ## activate the project environment + @. "$$WORKON_HOME/deepnlp-2023/bin/activate" + +initialize: install-pipx ## initialize the project environment + @pipx install copier + @pipx install poethepoet + @pipx install commitizen + @pipx install pre-commit + @pre-commit install + +init-project: initialize remove-template ## initialize the project (Warning: do this only once!) + @copier copy --trust --answers-file .copier-config.yaml gh:entelecheia/hyperfast-python-template . + +reinit-project: install-copier ## reinitialize the project (Warning: this may overwrite existing files!) + @bash -c 'args=(); while IFS= read -r file; do args+=("--skip" "$$file"); done < .copierignore; copier copy --trust "$${args[@]}" --answers-file .copier-config.yaml gh:entelecheia/hyperfast-python-template .' + +reinit-docker-project: install-copier ## reinitialize the project (Warning: this may overwrite existing files!) + @bash -c 'args=(); while IFS= read -r file; do args+=("--skip" "$$file"); done < .copierignore; copier copy "$${args[@]}" --answers-file .copier-docker-config.yaml --trust gh:entelecheia/hyperfast-docker-template .' + +##@ Docker + +symlink-global-docker-env: ## symlink global docker env file for local development + @DOCKERFILES_SHARE_DIR="$HOME/.local/share/dockerfiles" \ + DOCKER_GLOBAL_ENV_FILENAME=".env.docker" \ + DOCKER_GLOBAL_ENV_FILE="$${DOCKERFILES_SHARE_DIR}/$${DOCKER_GLOBAL_ENV_FILENAME}" \ + [ -f "$${DOCKER_GLOBAL_ENV_FILE}" ] && ln -sf "$${DOCKER_GLOBAL_ENV_FILE}" .env.docker || echo "Global docker env file not found" + +docker-login: ## login to docker + @bash .docker/.docker-scripts/docker-compose.sh login + +docker-build-base: ## build the docker base image + @bash .docker/.docker-scripts/docker-compose.sh build --variant base + +docker-build-app: ## build the docker app image + @bash .docker/.docker-scripts/docker-compose.sh build --variant app + +docker-config-base: ## show the docker base config + @bash .docker/.docker-scripts/docker-compose.sh config --variant base + +docker-config-app: ## show the docker app config + @bash .docker/.docker-scripts/docker-compose.sh config --variant app + +docker-push-base: ## push the docker base image + @bash .docker/.docker-scripts/docker-compose.sh push --variant base + +docker-push-app: ## push the docker app image + @bash .docker/.docker-scripts/docker-compose.sh push --variant app + +docker-run-base: ## run the docker base image + @bash .docker/.docker-scripts/docker-compose.sh run --variant base + +docker-run-app: ## run the docker app image + @bash .docker/.docker-scripts/docker-compose.sh run --variant app + +docker-up-app: ## launch the docker app image + @bash .docker/.docker-scripts/docker-compose.sh up --variant app + +docker-up-app-detach: ## launch the docker app image in detached mode + @bash .docker/.docker-scripts/docker-compose.sh up --variant app --detach diff --git a/README.md b/README.md index c2420a1..76f3338 100644 --- a/README.md +++ b/README.md @@ -1 +1,46 @@ -# course-template \ No newline at end of file +# Course Template + + +[![version-image]][release-url] +[![release-date-image]][release-url] +[![license-image]][license-url] +[![codecov][codecov-image]][codecov-url] +[![jupyter-book-image]][docs-url] + + +[codecov-image]: https://codecov.io/gh/entelecheia/course-template/branch/main/graph/badge.svg?token=[REPLACE_ME] +[codecov-url]: https://codecov.io/gh/entelecheia/course-template +[pypi-image]: https://img.shields.io/pypi/v/course-template +[license-image]: https://img.shields.io/github/license/entelecheia/course-template +[license-url]: https://github.com/entelecheia/course-template/blob/main/LICENSE +[version-image]: https://img.shields.io/github/v/release/entelecheia/course-template?sort=semver +[release-date-image]: https://img.shields.io/github/release-date/entelecheia/course-template +[release-url]: https://github.com/entelecheia/course-template/releases +[jupyter-book-image]: https://jupyterbook.org/en/stable/_images/badge.svg + +[repo-url]: https://github.com/entelecheia/course-template +[pypi-url]: https://pypi.org/project/course-template +[docs-url]: https://entelecheia.github.io/course-template +[changelog]: https://github.com/entelecheia/course-template/blob/main/CHANGELOG.md +[contributing guidelines]: https://github.com/entelecheia/course-template/blob/main/CONTRIBUTING.md + + +A template for a course + +- Documentation: [https://entelecheia.github.io/course-template][docs-url] +- GitHub: [https://github.com/entelecheia/course-template][repo-url] + + +A template for a course + +## Changelog + +See the [CHANGELOG] for more information. + +## Contributing + +Contributions are welcome! Please see the [contributing guidelines] for more information. + +## License + +This project is released under the [CC-BY-4.0 License][license-url]. diff --git a/book/_config.yml b/book/_config.yml new file mode 100644 index 0000000..b2abe03 --- /dev/null +++ b/book/_config.yml @@ -0,0 +1,103 @@ +####################################################################################### +# A default configuration that will be loaded for all jupyter books +# See the documentation for help and more options: +# https://jupyterbook.org/customize/config.html + +####################################################################################### +# Book settings +title: "Course Template" # The title of the book. Will be placed in the left navbar. +author: "Young Joon Lee" # The author of the book +copyright: "2024" # Copyright year to be placed in the footer +# logo: Path to the book logo + +# Force re-execution of notebooks on each build. +# See https://jupyterbook.org/content/execute.html +execute: + execute_notebooks: cache + exclude_patterns: + - "*.ipynb" + allow_errors: true + +# Define the name of the latex output file for PDF builds +latex: + latex_documents: + targetname: book.tex + +# Add a bibtex file so that we can create citations +bibtex_bibfiles: + - references.bib + +# Information about where the book exists on the web +repository: + url: https://github.com/entelecheia/course-template # Online location of your book + path_to_book: book # Optional path to your book, relative to the repository root + branch: main # Which branch of the repository should be used when creating links (optional) + +# Add GitHub buttons to your book +# See https://jupyterbook.org/customize/config.html#add-a-link-to-your-repository +html: + use_issues_button: true + use_repository_button: true + use_edit_page_button: true + favicon: https://assets.entelecheia.ai/favicon.png # favicon: Path to the favicon image + # google_analytics_id: Google Analytics ID + +sphinx: + config: + html_extra_path: ["assets"] + bibtex_reference_style: author_year + mathjax_path: https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js + intersphinx_mapping: + ebp: + - "https://executablebooks.org/en/latest/" + - null + myst-parser: + - "https://myst-parser.readthedocs.io/en/latest/" + - null + myst-nb: + - "https://myst-nb.readthedocs.io/en/latest/" + - null + sphinx: + - "https://www.sphinx-doc.org/en/master" + - null + nbformat: + - "https://nbformat.readthedocs.io/en/latest" + - null + sd: + - "https://sphinx-design.readthedocs.io/en/latest" + - null + sphinxproof: + - "https://sphinx-proof.readthedocs.io/en/latest/" + - null + hoverxref_intersphinx: + - "sphinxproof" + mathjax3_config: + tex: + macros: + "N": "\\mathbb{N}" + "floor": ["\\lfloor#1\\rfloor", 1] + "bmat": ["\\left[\\begin{array}"] + "emat": ["\\end{array}\\right]"] + add_module_names: false + autosummary_generate: true + + extra_extensions: + - sphinx.ext.intersphinx + - sphinx.ext.autodoc + - sphinx.ext.autosummary + - sphinx.ext.napoleon + - sphinx.ext.viewcode + - sphinx_inline_tabs + - sphinx_proof + - sphinx_examples + - hoverxref.extension + - sphinxcontrib.youtube + - sphinxcontrib.video + - sphinxcontrib.mermaid + - sphinx_thebe + - sphinx_carousel.carousel + - sphinxcontrib.lastupdate + +launch_buttons: + colab_url: "https://colab.research.google.com" + thebe: true diff --git a/book/_toc.yml b/book/_toc.yml new file mode 100644 index 0000000..6f222bb --- /dev/null +++ b/book/_toc.yml @@ -0,0 +1,9 @@ +# Table of contents +# Learn more at https://jupyterbook.org/customize/toc.html + +format: jb-book +root: index +chapters: + - file: usage + - file: api + - file: examples diff --git a/book/api.rst b/book/api.rst new file mode 100644 index 0000000..4d340f6 --- /dev/null +++ b/book/api.rst @@ -0,0 +1,16 @@ +API Reference +============= + +.. autosummary:: + :toctree: _autosummary + :recursive: + + coursetemp + +main +---- +.. autofunction:: coursetemp.__cli__.main + +get_version +----------- +.. autofunction:: coursetemp.__init__.get_version diff --git a/book/assets/assets/extra/.gitkeep b/book/assets/assets/extra/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/book/assets/extra/.gitkeep b/book/assets/extra/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/book/examples.md b/book/examples.md new file mode 100644 index 0000000..bc0c5c2 --- /dev/null +++ b/book/examples.md @@ -0,0 +1,34 @@ +--- +jupytext: + cell_metadata_filter: -all + formats: md:myst + text_representation: + extension: .md + format_name: myst + format_version: 0.13 + jupytext_version: 1.11.5 +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + +# Examples + +This page contains a few examples of how you can use Course Template package. + +## Installation + +You can install Course Template using `pip`: + +```{code-cell} +pip install course-template +``` + +## Usage + +```python +from coursetemp import get_version + +print(get_version()) +``` diff --git a/book/index.md b/book/index.md new file mode 100644 index 0000000..0f8c5ff --- /dev/null +++ b/book/index.md @@ -0,0 +1,53 @@ +# Course Template + +[![pypi-image]][pypi-url] +[![version-image]][release-url] +[![release-date-image]][release-url] +[![license-image]][license-url] +[![jupyter-book-image]][docs-url] +[![codecov][codecov-image]][codecov-url] + + +[hyperfast python template]: https://github.com/entelecheia/hyperfast-python-template + +[codecov-image]: https://codecov.io/gh/entelecheia/course-template/branch/main/graph/badge.svg?token=[REPLACE_ME] +[codecov-url]: https://codecov.io/gh/entelecheia/course-template +[pypi-image]: https://img.shields.io/pypi/v/course-template +[license-image]: https://img.shields.io/github/license/entelecheia/course-template +[license-url]: https://github.com/entelecheia/course-template/blob/main/LICENSE +[version-image]: https://img.shields.io/github/v/release/entelecheia/course-template?sort=semver +[release-date-image]: https://img.shields.io/github/release-date/entelecheia/course-template +[release-url]: https://github.com/entelecheia/course-template/releases +[jupyter-book-image]: https://jupyterbook.org/en/stable/_images/badge.svg + +[repo-url]: https://github.com/entelecheia/course-template +[pypi-url]: https://pypi.org/project/course-template +[docs-url]: https://entelecheia.github.io/course-template +[changelog]: https://github.com/entelecheia/course-template/blob/main/CHANGELOG.md +[contributing guidelines]: https://github.com/entelecheia/course-template/blob/main/CONTRIBUTING.md + + +A template for a course + +- Documentation: [https://entelecheia.github.io/course-template][docs-url] +- GitHub: [https://github.com/entelecheia/course-template][repo-url] +- PyPI: [https://pypi.org/project/course-template][pypi-url] + +A template for a course + +## Changelog + +See the [CHANGELOG] for more information. + +## Contributing + +Contributions are welcome! Please see the [contributing guidelines] for more information. + +## License + +This project is released under the [CC-BY-4.0 License][license-url]. + +## Table of Contents + +```{tableofcontents} +``` diff --git a/book/references.bib b/book/references.bib new file mode 100644 index 0000000..ec380ec --- /dev/null +++ b/book/references.bib @@ -0,0 +1,3 @@ +--- +--- + diff --git a/book/requirements.txt b/book/requirements.txt new file mode 100644 index 0000000..923ce4b --- /dev/null +++ b/book/requirements.txt @@ -0,0 +1,12 @@ +jupyter-book +docutils +sphinx-inline-tabs +sphinx-examples +sphinx-proof +sphinx-hoverxref +sphinxcontrib-youtube +sphinxcontrib-video +sphinx-thebe +sphinxcontrib-mermaid +sphinx-carousel +sphinxcontrib-lastupdate diff --git a/book/syllabus/index.md b/book/syllabus/index.md new file mode 100644 index 0000000..17c0682 --- /dev/null +++ b/book/syllabus/index.md @@ -0,0 +1 @@ +# Course Title diff --git a/book/usage.md b/book/usage.md new file mode 100644 index 0000000..8f04b05 --- /dev/null +++ b/book/usage.md @@ -0,0 +1 @@ +# Usage diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000..8ce5dbd --- /dev/null +++ b/codecov.yml @@ -0,0 +1,12 @@ +# When modifying this file, please validate using +# curl -X POST --data-binary @codecov.yml https://codecov.io/validate +coverage: + status: + project: + default: + target: 50% # the required coverage value + threshold: 10% # the leniency in hitting the target + informational: true + patch: + default: + informational: true diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..e1cfe99 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1233 @@ +# This file is automatically @generated by Poetry and should not be changed by hand. + +[[package]] +name = "black" +version = "24.2.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"}, + {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"}, + {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"}, + {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"}, + {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"}, + {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"}, + {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"}, + {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"}, + {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"}, + {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"}, + {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"}, + {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"}, + {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"}, + {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"}, + {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"}, + {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"}, + {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"}, + {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"}, + {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"}, + {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"}, + {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"}, + {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-log" +version = "0.4.0" +description = "Logging integration for Click" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "click-log-0.4.0.tar.gz", hash = "sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975"}, + {file = "click_log-0.4.0-py2.py3-none-any.whl", hash = "sha256:a43e394b528d52112af599f2fc9e4b7cf3c15f94e53581f74fa6867e68c91756"}, +] + +[package.dependencies] +click = "*" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.4.3" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, + {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, + {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, + {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, + {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, + {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, + {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, + {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, + {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, + {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, + {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, + {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, + {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "docutils" +version = "0.20.1" +description = "Docutils -- Python Documentation Utilities" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] + +[[package]] +name = "dotty-dict" +version = "1.3.1" +description = "Dictionary wrapper for quick access to deeply nested keys." +category = "dev" +optional = false +python-versions = ">=3.5,<4.0" +files = [ + {file = "dotty_dict-1.3.1-py3-none-any.whl", hash = "sha256:5022d234d9922f13aa711b4950372a06a6d64cb6d6db9ba43d0ba133ebfce31f"}, + {file = "dotty_dict-1.3.1.tar.gz", hash = "sha256:4b016e03b8ae265539757a53eba24b9bfda506fb94fbce0bee843c6f05541a15"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "flake8-pyproject" +version = "1.2.3" +description = "Flake8 plug-in loading the configuration from pyproject.toml" +category = "dev" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, +] + +[package.dependencies] +Flake8 = ">=5" +TOMLi = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["pyTest", "pyTest-cov"] + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.42" +description = "GitPython is a Python library used to interact with Git repositories" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.42-py3-none-any.whl", hash = "sha256:1bf9cd7c9e7255f77778ea54359e54ac22a72a5b51288c457c881057b7bb9ecd"}, + {file = "GitPython-3.1.42.tar.gz", hash = "sha256:2d99869e0fef71a73cbd242528105af1d6c1b108c60dfabd994bf292f76c3ceb"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.1.2" +description = "Read resources from Python packages" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.1.2-py3-none-any.whl", hash = "sha256:9a0a862501dc38b68adebc82970140c9e4209fc99601782925178f8386339938"}, + {file = "importlib_resources-6.1.2.tar.gz", hash = "sha256:308abf8474e2dba5f867d279237cd4076482c3de7104a40b41426370e891549b"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "invoke" +version = "2.2.0" +description = "Pythonic task execution" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "invoke-2.2.0-py3-none-any.whl", hash = "sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820"}, + {file = "invoke-2.2.0.tar.gz", hash = "sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jaraco-classes" +version = "3.3.1" +description = "Utility functions for Python class constructs" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.classes-3.3.1-py3-none-any.whl", hash = "sha256:86b534de565381f6b3c1c830d13f931d7be1a75f0081c57dff615578676e2206"}, + {file = "jaraco.classes-3.3.1.tar.gz", hash = "sha256:cb28a5ebda8bc47d8c8015307d93163464f9f2b91ab4006e09ff0ce07e8bfb30"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jeepney" +version = "0.8.0" +description = "Low-level, pure Python DBus protocol wrapper." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, +] + +[package.extras] +test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] +trio = ["async_generator", "trio"] + +[[package]] +name = "keyring" +version = "24.3.1" +description = "Store and access your passwords safely." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "keyring-24.3.1-py3-none-any.whl", hash = "sha256:df38a4d7419a6a60fea5cef1e45a948a3e8430dd12ad88b0f423c5c143906218"}, + {file = "keyring-24.3.1.tar.gz", hash = "sha256:c3327b6ffafc0e8befbdb597cacdb4928ffe5c1212f7645f186e6d9957a898db"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} +importlib-resources = {version = "*", markers = "python_version < \"3.9\""} +"jaraco.classes" = "*" +jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} +pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} +SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} + +[package.extras] +completion = ["shtab (>=1.1.0)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "more-itertools" +version = "10.2.0" +description = "More routines for operating on iterables, beyond itertools" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "more-itertools-10.2.0.tar.gz", hash = "sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1"}, + {file = "more_itertools-10.2.0-py3-none-any.whl", hash = "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684"}, +] + +[[package]] +name = "mypy" +version = "1.8.0" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nh3" +version = "0.2.15" +description = "Python bindings to the ammonia HTML sanitization library." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "nh3-0.2.15-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0"}, + {file = "nh3-0.2.15-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5"}, + {file = "nh3-0.2.15-cp37-abi3-win32.whl", hash = "sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601"}, + {file = "nh3-0.2.15-cp37-abi3-win_amd64.whl", hash = "sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e"}, + {file = "nh3-0.2.15.tar.gz", hash = "sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pkginfo" +version = "1.9.6" +description = "Query metadata from sdists / bdists / installed packages." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkginfo-1.9.6-py3-none-any.whl", hash = "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546"}, + {file = "pkginfo-1.9.6.tar.gz", hash = "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046"}, +] + +[package.extras] +testing = ["pytest", "pytest-cov"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "python-gitlab" +version = "3.15.0" +description = "Interact with GitLab API" +category = "dev" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "python-gitlab-3.15.0.tar.gz", hash = "sha256:c9e65eb7612a9fbb8abf0339972eca7fd7a73d4da66c9b446ffe528930aff534"}, + {file = "python_gitlab-3.15.0-py3-none-any.whl", hash = "sha256:8f8d1c0d387f642eb1ac7bf5e8e0cd8b3dd49c6f34170cee3c7deb7d384611f3"}, +] + +[package.dependencies] +requests = ">=2.25.0" +requests-toolbelt = ">=0.10.1" + +[package.extras] +autocompletion = ["argcomplete (>=1.10.0,<3)"] +yaml = ["PyYaml (>=5.2)"] + +[[package]] +name = "python-semantic-release" +version = "7.34.6" +description = "Automatic Semantic Versioning for Python projects" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "python-semantic-release-7.34.6.tar.gz", hash = "sha256:e9b8fb788024ae9510a924136d573588415a16eeca31cc5240f2754a80a2e831"}, + {file = "python_semantic_release-7.34.6-py3-none-any.whl", hash = "sha256:7e3969ba4663d9b2087b02bf3ac140e202551377bf045c34e09bfe19753e19ab"}, +] + +[package.dependencies] +click = ">=7,<9" +click-log = ">=0.3,<1" +dotty-dict = ">=1.3.0,<2" +gitpython = ">=3.0.8,<4" +invoke = ">=1.4.1,<3" +packaging = "*" +python-gitlab = ">=2,<4" +requests = ">=2.25,<3" +semver = ">=2.10,<3" +tomlkit = ">=0.10,<1.0" +twine = ">=3,<4" +wheel = "*" + +[package.extras] +dev = ["black", "isort", "tox"] +docs = ["Jinja2 (==3.0.3)", "Sphinx (==1.8.6)"] +mypy = ["mypy", "types-requests"] +test = ["coverage (>=5,<6)", "mock (==1.3.0)", "pytest (>=7,<8)", "pytest-mock (>=2,<3)", "pytest-xdist (>=1,<2)", "responses (==0.13.3)"] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, +] + +[[package]] +name = "readme-renderer" +version = "43.0" +description = "readme_renderer is a library for rendering readme descriptions for Warehouse" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "readme_renderer-43.0-py3-none-any.whl", hash = "sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9"}, + {file = "readme_renderer-43.0.tar.gz", hash = "sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311"}, +] + +[package.dependencies] +docutils = ">=0.13.1" +nh3 = ">=0.2.14" +Pygments = ">=2.5.1" + +[package.extras] +md = ["cmarkgfm (>=0.8.0)"] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "rfc3986" +version = "2.0.0" +description = "Validating URI References per RFC 3986" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"}, + {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"}, +] + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "secretstorage" +version = "3.3.3" +description = "Python bindings to FreeDesktop.org Secret Service API" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, +] + +[package.dependencies] +cryptography = ">=2.0" +jeepney = ">=0.6" + +[[package]] +name = "semver" +version = "2.13.0" +description = "Python helper for Semantic Versioning (http://semver.org/)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, + {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, +] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomlkit" +version = "0.12.4" +description = "Style preserving TOML library" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, + {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "twine" +version = "3.8.0" +description = "Collection of utilities for publishing packages on PyPI" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "twine-3.8.0-py3-none-any.whl", hash = "sha256:d0550fca9dc19f3d5e8eadfce0c227294df0a2a951251a4385797c8a6198b7c8"}, + {file = "twine-3.8.0.tar.gz", hash = "sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19"}, +] + +[package.dependencies] +colorama = ">=0.4.3" +importlib-metadata = ">=3.6" +keyring = ">=15.1" +pkginfo = ">=1.8.1" +readme-renderer = ">=21.0" +requests = ">=2.20" +requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0" +rfc3986 = ">=1.4.0" +tqdm = ">=4.14" +urllib3 = ">=1.26.0" + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wheel" +version = "0.42.0" +description = "A built-package format for Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, + {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.8.1,<3.13" +content-hash = "6789bf6ad9d60ea10c2677c4904672927e21afdadb794fb4ee442b0958a71226" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..d89c86d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,109 @@ +[tool.poetry] +name = "course-template" +version = "0.0.0" +description = "A template for a course" +authors = ["Young Joon Lee "] +license = "CC-BY-4.0" +homepage = "https://entelecheia.github.io/course-template" +repository = "https://github.com/entelecheia/course-template" +readme = "README.md" + + +[tool.poetry.dependencies] +python = ">=3.8.1,<3.13" +click = "^8.1.3" + +[tool.poetry.group.dev] +optional = true + +[tool.poetry.group.dev.dependencies] +python-semantic-release = "^7.33.1" +isort = "^5.12.0" +black = ">=23.0.0" +flake8 = "^6.0.0" +mypy = "^1.0.0" +flake8-pyproject = "^1.2.2" +pytest = "^7.2.1" +pytest-cov = "^4.0.0" + +[tool.poe] +include = [".tasks.toml", ".tasks-extra.toml"] + +[tool.black] +# 'extend-exclude' excludes files or directories in addition to the defaults +extend-exclude = ''' +# A regex preceded with ^/ will apply only to files and directories +# in the root of the project. +( + _version.py + | .refs + | .copier-template + | tests + | docs + | book + | _build + | node_modules +) +''' + +[tool.isort] +profile = "black" +skip = ['_version.py', 'node_modules', '_build', 'docs', 'tests', 'venv', '.copier-template', '.refs'] + +[tool.flake8] +ignore = ['F401', 'E501', 'W503'] +exclude = ["node_modules", "_build", "docs", "tests", "venv", ".copier-template", ".refs"] +per-file-ignores = ['__init__.py:F401', '_version.py:W292'] + +[tool.mypy] +namespace_packages = true +exclude = ["node_modules", "build", "_build", "dist", "docs", "tests", "venv", ".copier-template", ".refs"] +# 3rd party import +ignore_missing_imports = true +# dynamic typing +disallow_any_unimported = true +disallow_any_expr = false +disallow_any_decorated = false +disallow_any_explicit = true +disallow_any_generics = false +disallow_subclassing_any = true +# platform +python_version = "3.8" +# untyped +disallow_untyped_calls = true +disallow_untyped_defs = true +disallow_incomplete_defs = true +disallow_untyped_decorators = true +# None and Optional +no_implicit_optional = true +# Warnings +warn_return_any = false +warn_unreachable = true +# Misc +pretty = true + +[tool.pytest.ini_options] +testpaths = ["tests"] +addopts = "-p no:cacheprovider" # deactivating pytest caching. + +[tool.coverage.report] +exclude_lines = ['if __name__ == "__main__":'] + +[tool.commitizen] +name = "cz_conventional_commits" +tag_format = "v$version" + +[tool.semantic_release] +branch = "main" +version_toml = "pyproject.toml:tool.poetry.version" +version_source = "tag" +commit_version_number = true # required for version_source = "tag" +commit_subject = "chore(release): :rocket: {version} [skip ci]" +prerelease_tag = "rc" +major_on_zero = true +tag_commit = true +changelog_file = "CHANGELOG.md" +upload_to_repository = false +upload_to_release = false +hvcs = "github" # hosting version control system, gitlab is also supported +build_command = "mkdir -p dist && echo 'No build command configured' > dist/release.txt"